zenml-nightly 0.68.0.dev20241027__py3-none-any.whl → 0.68.1.dev20241101__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +17 -11
- RELEASE_NOTES.md +9 -0
- zenml/VERSION +1 -1
- zenml/__init__.py +1 -1
- zenml/analytics/context.py +16 -1
- zenml/analytics/utils.py +18 -7
- zenml/artifacts/utils.py +40 -216
- zenml/cli/__init__.py +63 -90
- zenml/cli/base.py +3 -3
- zenml/cli/login.py +951 -0
- zenml/cli/server.py +462 -353
- zenml/cli/service_accounts.py +4 -4
- zenml/cli/stack.py +77 -2
- zenml/cli/stack_components.py +5 -16
- zenml/cli/user_management.py +0 -12
- zenml/cli/utils.py +24 -77
- zenml/client.py +46 -14
- zenml/config/compiler.py +1 -0
- zenml/config/global_config.py +9 -0
- zenml/config/pipeline_configurations.py +2 -1
- zenml/config/pipeline_run_configuration.py +2 -1
- zenml/constants.py +3 -9
- zenml/enums.py +1 -1
- zenml/exceptions.py +11 -0
- zenml/integrations/github/code_repositories/github_code_repository.py +1 -1
- zenml/login/__init__.py +16 -0
- zenml/login/credentials.py +346 -0
- zenml/login/credentials_store.py +603 -0
- zenml/login/pro/__init__.py +16 -0
- zenml/login/pro/client.py +496 -0
- zenml/login/pro/constants.py +34 -0
- zenml/login/pro/models.py +25 -0
- zenml/login/pro/organization/__init__.py +14 -0
- zenml/login/pro/organization/client.py +79 -0
- zenml/login/pro/organization/models.py +32 -0
- zenml/login/pro/tenant/__init__.py +14 -0
- zenml/login/pro/tenant/client.py +92 -0
- zenml/login/pro/tenant/models.py +174 -0
- zenml/login/pro/utils.py +121 -0
- zenml/{cli → login}/web_login.py +64 -28
- zenml/materializers/base_materializer.py +43 -9
- zenml/materializers/built_in_materializer.py +1 -1
- zenml/metadata/metadata_types.py +49 -0
- zenml/model/model.py +0 -38
- zenml/models/__init__.py +3 -0
- zenml/models/v2/base/base.py +12 -8
- zenml/models/v2/base/filter.py +9 -0
- zenml/models/v2/core/artifact_version.py +49 -10
- zenml/models/v2/core/component.py +54 -19
- zenml/models/v2/core/flavor.py +13 -13
- zenml/models/v2/core/model.py +3 -1
- zenml/models/v2/core/model_version.py +3 -5
- zenml/models/v2/core/model_version_artifact.py +3 -1
- zenml/models/v2/core/model_version_pipeline_run.py +3 -1
- zenml/models/v2/core/pipeline.py +3 -1
- zenml/models/v2/core/pipeline_run.py +23 -1
- zenml/models/v2/core/run_template.py +3 -1
- zenml/models/v2/core/stack.py +7 -3
- zenml/models/v2/core/step_run.py +43 -2
- zenml/models/v2/misc/auth_models.py +11 -2
- zenml/models/v2/misc/server_models.py +2 -0
- zenml/orchestrators/base_orchestrator.py +8 -4
- zenml/orchestrators/step_launcher.py +1 -0
- zenml/orchestrators/step_run_utils.py +10 -2
- zenml/orchestrators/step_runner.py +67 -55
- zenml/orchestrators/utils.py +45 -22
- zenml/pipelines/pipeline_decorator.py +5 -0
- zenml/pipelines/pipeline_definition.py +206 -160
- zenml/pipelines/run_utils.py +11 -10
- zenml/services/local/local_daemon_entrypoint.py +4 -4
- zenml/services/service.py +2 -2
- zenml/stack/stack.py +2 -6
- zenml/stack/stack_component.py +2 -7
- zenml/stack/utils.py +26 -14
- zenml/steps/base_step.py +8 -2
- zenml/steps/step_context.py +0 -3
- zenml/steps/step_invocation.py +14 -5
- zenml/steps/utils.py +1 -0
- zenml/utils/materializer_utils.py +1 -1
- zenml/utils/requirements_utils.py +71 -0
- zenml/utils/singleton.py +15 -3
- zenml/utils/source_utils.py +39 -2
- zenml/utils/visualization_utils.py +1 -1
- zenml/zen_server/auth.py +44 -39
- zenml/zen_server/deploy/__init__.py +7 -7
- zenml/zen_server/deploy/base_provider.py +46 -73
- zenml/zen_server/deploy/{local → daemon}/__init__.py +3 -3
- zenml/zen_server/deploy/{local/local_provider.py → daemon/daemon_provider.py} +44 -63
- zenml/zen_server/deploy/{local/local_zen_server.py → daemon/daemon_zen_server.py} +50 -22
- zenml/zen_server/deploy/deployer.py +90 -171
- zenml/zen_server/deploy/deployment.py +20 -12
- zenml/zen_server/deploy/docker/docker_provider.py +9 -28
- zenml/zen_server/deploy/docker/docker_zen_server.py +19 -3
- zenml/zen_server/deploy/helm/Chart.yaml +1 -1
- zenml/zen_server/deploy/helm/README.md +2 -2
- zenml/zen_server/exceptions.py +11 -0
- zenml/zen_server/jwt.py +9 -9
- zenml/zen_server/routers/auth_endpoints.py +30 -8
- zenml/zen_server/routers/stack_components_endpoints.py +1 -1
- zenml/zen_server/routers/workspaces_endpoints.py +1 -1
- zenml/zen_server/template_execution/runner_entrypoint_configuration.py +7 -4
- zenml/zen_server/template_execution/utils.py +6 -61
- zenml/zen_server/utils.py +64 -36
- zenml/zen_stores/base_zen_store.py +4 -49
- zenml/zen_stores/migrations/versions/0.68.1_release.py +23 -0
- zenml/zen_stores/migrations/versions/c22561cbb3a9_add_artifact_unique_constraints.py +86 -0
- zenml/zen_stores/rest_zen_store.py +325 -147
- zenml/zen_stores/schemas/api_key_schemas.py +9 -4
- zenml/zen_stores/schemas/artifact_schemas.py +21 -2
- zenml/zen_stores/schemas/artifact_visualization_schemas.py +1 -1
- zenml/zen_stores/schemas/component_schemas.py +49 -6
- zenml/zen_stores/schemas/device_schemas.py +9 -4
- zenml/zen_stores/schemas/flavor_schemas.py +1 -1
- zenml/zen_stores/schemas/model_schemas.py +1 -1
- zenml/zen_stores/schemas/service_schemas.py +1 -1
- zenml/zen_stores/schemas/step_run_schemas.py +1 -1
- zenml/zen_stores/schemas/trigger_schemas.py +1 -1
- zenml/zen_stores/sql_zen_store.py +393 -140
- zenml/zen_stores/template_utils.py +3 -1
- {zenml_nightly-0.68.0.dev20241027.dist-info → zenml_nightly-0.68.1.dev20241101.dist-info}/METADATA +18 -12
- {zenml_nightly-0.68.0.dev20241027.dist-info → zenml_nightly-0.68.1.dev20241101.dist-info}/RECORD +124 -107
- zenml/api.py +0 -60
- {zenml_nightly-0.68.0.dev20241027.dist-info → zenml_nightly-0.68.1.dev20241101.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.68.0.dev20241027.dist-info → zenml_nightly-0.68.1.dev20241101.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.68.0.dev20241027.dist-info → zenml_nightly-0.68.1.dev20241101.dist-info}/entry_points.txt +0 -0
@@ -37,6 +37,7 @@ from uuid import UUID
|
|
37
37
|
|
38
38
|
import yaml
|
39
39
|
from pydantic import ConfigDict, ValidationError
|
40
|
+
from typing_extensions import Self
|
40
41
|
|
41
42
|
from zenml import constants
|
42
43
|
from zenml.analytics.enums import AnalyticsEvent
|
@@ -65,6 +66,7 @@ from zenml.models import (
|
|
65
66
|
PipelineRequest,
|
66
67
|
PipelineResponse,
|
67
68
|
PipelineRunResponse,
|
69
|
+
RunTemplateResponse,
|
68
70
|
ScheduleRequest,
|
69
71
|
)
|
70
72
|
from zenml.pipelines import build_utils
|
@@ -99,6 +101,7 @@ if TYPE_CHECKING:
|
|
99
101
|
from zenml.config.source import Source
|
100
102
|
from zenml.model.lazy_load import ModelVersionDataLazyLoader
|
101
103
|
from zenml.model.model import Model
|
104
|
+
from zenml.models import ArtifactVersionResponse
|
102
105
|
from zenml.types import HookSpecification
|
103
106
|
|
104
107
|
StepConfigurationUpdateOrDict = Union[
|
@@ -107,7 +110,6 @@ if TYPE_CHECKING:
|
|
107
110
|
|
108
111
|
logger = get_logger(__name__)
|
109
112
|
|
110
|
-
T = TypeVar("T", bound="Pipeline")
|
111
113
|
F = TypeVar("F", bound=Callable[..., None])
|
112
114
|
|
113
115
|
|
@@ -128,6 +130,7 @@ class Pipeline:
|
|
128
130
|
enable_artifact_visualization: Optional[bool] = None,
|
129
131
|
enable_step_logs: Optional[bool] = None,
|
130
132
|
settings: Optional[Mapping[str, "SettingsOrDict"]] = None,
|
133
|
+
tags: Optional[List[str]] = None,
|
131
134
|
extra: Optional[Dict[str, Any]] = None,
|
132
135
|
on_failure: Optional["HookSpecification"] = None,
|
133
136
|
on_success: Optional["HookSpecification"] = None,
|
@@ -144,7 +147,8 @@ class Pipeline:
|
|
144
147
|
enable_artifact_visualization: If artifact visualization should be
|
145
148
|
enabled for this pipeline.
|
146
149
|
enable_step_logs: If step logs should be enabled for this pipeline.
|
147
|
-
settings:
|
150
|
+
settings: Settings for this pipeline.
|
151
|
+
tags: Tags to apply to runs of this pipeline.
|
148
152
|
extra: Extra configurations for this pipeline.
|
149
153
|
on_failure: Callback function in event of failure of the step. Can
|
150
154
|
be a function with a single argument of type `BaseException`, or
|
@@ -168,6 +172,7 @@ class Pipeline:
|
|
168
172
|
enable_artifact_visualization=enable_artifact_visualization,
|
169
173
|
enable_step_logs=enable_step_logs,
|
170
174
|
settings=settings,
|
175
|
+
tags=tags,
|
171
176
|
extra=extra,
|
172
177
|
on_failure=on_failure,
|
173
178
|
on_success=on_success,
|
@@ -279,19 +284,20 @@ class Pipeline:
|
|
279
284
|
self.__suppress_warnings_flag__ = False
|
280
285
|
|
281
286
|
def configure(
|
282
|
-
self
|
287
|
+
self,
|
283
288
|
enable_cache: Optional[bool] = None,
|
284
289
|
enable_artifact_metadata: Optional[bool] = None,
|
285
290
|
enable_artifact_visualization: Optional[bool] = None,
|
286
291
|
enable_step_logs: Optional[bool] = None,
|
287
292
|
settings: Optional[Mapping[str, "SettingsOrDict"]] = None,
|
293
|
+
tags: Optional[List[str]] = None,
|
288
294
|
extra: Optional[Dict[str, Any]] = None,
|
289
295
|
on_failure: Optional["HookSpecification"] = None,
|
290
296
|
on_success: Optional["HookSpecification"] = None,
|
291
297
|
model: Optional["Model"] = None,
|
292
298
|
parameters: Optional[Dict[str, Any]] = None,
|
293
299
|
merge: bool = True,
|
294
|
-
) ->
|
300
|
+
) -> Self:
|
295
301
|
"""Configures the pipeline.
|
296
302
|
|
297
303
|
Configuration merging example:
|
@@ -312,6 +318,7 @@ class Pipeline:
|
|
312
318
|
enabled for this pipeline.
|
313
319
|
enable_step_logs: If step logs should be enabled for this pipeline.
|
314
320
|
settings: settings for this pipeline.
|
321
|
+
tags: Tags to apply to runs of this pipeline.
|
315
322
|
extra: Extra configurations for this pipeline.
|
316
323
|
on_failure: Callback function in event of failure of the step. Can
|
317
324
|
be a function with a single argument of type `BaseException`, or
|
@@ -340,6 +347,11 @@ class Pipeline:
|
|
340
347
|
# string of on_success hook function to be used for this pipeline
|
341
348
|
success_hook_source = resolve_and_validate_hook(on_success)
|
342
349
|
|
350
|
+
if merge and tags and self._configuration.tags:
|
351
|
+
# Merge tags explicitly here as the recursive update later only
|
352
|
+
# merges dicts
|
353
|
+
tags = self._configuration.tags + tags
|
354
|
+
|
343
355
|
values = dict_utils.remove_none_values(
|
344
356
|
{
|
345
357
|
"enable_cache": enable_cache,
|
@@ -347,6 +359,7 @@ class Pipeline:
|
|
347
359
|
"enable_artifact_visualization": enable_artifact_visualization,
|
348
360
|
"enable_step_logs": enable_step_logs,
|
349
361
|
"settings": settings,
|
362
|
+
"tags": tags,
|
350
363
|
"extra": extra,
|
351
364
|
"failure_hook_source": failure_hook_source,
|
352
365
|
"success_hook_source": success_hook_source,
|
@@ -539,7 +552,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
539
552
|
code_repository=code_repository,
|
540
553
|
)
|
541
554
|
|
542
|
-
def
|
555
|
+
def _create_deployment(
|
543
556
|
self,
|
544
557
|
*,
|
545
558
|
run_name: Optional[str] = None,
|
@@ -557,8 +570,8 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
557
570
|
config_path: Optional[str] = None,
|
558
571
|
unlisted: bool = False,
|
559
572
|
prevent_build_reuse: bool = False,
|
560
|
-
) ->
|
561
|
-
"""
|
573
|
+
) -> PipelineDeploymentResponse:
|
574
|
+
"""Create a pipeline deployment.
|
562
575
|
|
563
576
|
Args:
|
564
577
|
run_name: Name of the pipeline run.
|
@@ -585,175 +598,182 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
585
598
|
`DockerSettings.prevent_build_reuse` instead.
|
586
599
|
|
587
600
|
Returns:
|
588
|
-
|
589
|
-
running with a schedule.
|
601
|
+
The pipeline deployment.
|
590
602
|
|
591
603
|
Raises:
|
592
|
-
ValueError:
|
604
|
+
ValueError: If the orchestrator doesn't support scheduling, but a
|
605
|
+
schedule was given
|
593
606
|
"""
|
594
|
-
|
595
|
-
|
596
|
-
|
597
|
-
|
598
|
-
|
599
|
-
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
|
604
|
-
|
605
|
-
|
607
|
+
deployment, schedule, build = self._compile(
|
608
|
+
config_path=config_path,
|
609
|
+
run_name=run_name,
|
610
|
+
enable_cache=enable_cache,
|
611
|
+
enable_artifact_metadata=enable_artifact_metadata,
|
612
|
+
enable_artifact_visualization=enable_artifact_visualization,
|
613
|
+
enable_step_logs=enable_step_logs,
|
614
|
+
steps=step_configurations,
|
615
|
+
settings=settings,
|
616
|
+
schedule=schedule,
|
617
|
+
build=build,
|
618
|
+
extra=extra,
|
619
|
+
)
|
606
620
|
|
607
|
-
|
621
|
+
skip_pipeline_registration = constants.handle_bool_env_var(
|
622
|
+
constants.ENV_ZENML_SKIP_PIPELINE_REGISTRATION,
|
623
|
+
default=False,
|
624
|
+
)
|
608
625
|
|
609
|
-
|
610
|
-
deployment, schedule, build = self._compile(
|
611
|
-
config_path=config_path,
|
612
|
-
run_name=run_name,
|
613
|
-
enable_cache=enable_cache,
|
614
|
-
enable_artifact_metadata=enable_artifact_metadata,
|
615
|
-
enable_artifact_visualization=enable_artifact_visualization,
|
616
|
-
enable_step_logs=enable_step_logs,
|
617
|
-
steps=step_configurations,
|
618
|
-
settings=settings,
|
619
|
-
schedule=schedule,
|
620
|
-
build=build,
|
621
|
-
extra=extra,
|
622
|
-
)
|
626
|
+
register_pipeline = not (skip_pipeline_registration or unlisted)
|
623
627
|
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
)
|
628
|
+
pipeline_id = None
|
629
|
+
if register_pipeline:
|
630
|
+
pipeline_id = self._register().id
|
628
631
|
|
629
|
-
|
632
|
+
else:
|
633
|
+
logger.debug(f"Pipeline {self.name} is unlisted.")
|
634
|
+
|
635
|
+
stack = Client().active_stack
|
636
|
+
stack.validate()
|
637
|
+
|
638
|
+
schedule_id = None
|
639
|
+
if schedule:
|
640
|
+
if not stack.orchestrator.config.is_schedulable:
|
641
|
+
raise ValueError(
|
642
|
+
f"Stack {stack.name} does not support scheduling. "
|
643
|
+
"Not all orchestrator types support scheduling, "
|
644
|
+
"kindly consult with "
|
645
|
+
"https://docs.zenml.io/how-to/build-pipelines/schedule-a-pipeline "
|
646
|
+
"for details."
|
647
|
+
)
|
648
|
+
if schedule.name:
|
649
|
+
schedule_name = schedule.name
|
650
|
+
else:
|
651
|
+
schedule_name = format_name_template(
|
652
|
+
deployment.run_name_template
|
653
|
+
)
|
654
|
+
components = Client().active_stack_model.components
|
655
|
+
orchestrator = components[StackComponentType.ORCHESTRATOR][0]
|
656
|
+
schedule_model = ScheduleRequest(
|
657
|
+
workspace=Client().active_workspace.id,
|
658
|
+
user=Client().active_user.id,
|
659
|
+
pipeline_id=pipeline_id,
|
660
|
+
orchestrator_id=orchestrator.id,
|
661
|
+
name=schedule_name,
|
662
|
+
active=True,
|
663
|
+
cron_expression=schedule.cron_expression,
|
664
|
+
start_time=schedule.start_time,
|
665
|
+
end_time=schedule.end_time,
|
666
|
+
interval_second=schedule.interval_second,
|
667
|
+
catchup=schedule.catchup,
|
668
|
+
run_once_start_time=schedule.run_once_start_time,
|
669
|
+
)
|
670
|
+
schedule_id = Client().zen_store.create_schedule(schedule_model).id
|
671
|
+
logger.info(
|
672
|
+
f"Created schedule `{schedule_name}` for pipeline "
|
673
|
+
f"`{deployment.pipeline_configuration.name}`."
|
674
|
+
)
|
630
675
|
|
631
|
-
|
632
|
-
|
633
|
-
|
676
|
+
stack = Client().active_stack
|
677
|
+
stack.validate()
|
678
|
+
upload_notebook_cell_code_if_necessary(
|
679
|
+
deployment=deployment, stack=stack
|
680
|
+
)
|
634
681
|
|
635
|
-
|
636
|
-
|
682
|
+
local_repo_context = (
|
683
|
+
code_repository_utils.find_active_code_repository()
|
684
|
+
)
|
685
|
+
code_repository = build_utils.verify_local_repository_context(
|
686
|
+
deployment=deployment, local_repo_context=local_repo_context
|
687
|
+
)
|
637
688
|
|
638
|
-
|
639
|
-
|
640
|
-
|
641
|
-
|
642
|
-
|
643
|
-
|
644
|
-
raise ValueError(
|
645
|
-
f"Stack {stack.name} does not support scheduling. "
|
646
|
-
"Not all orchestrator types support scheduling, "
|
647
|
-
"kindly consult with "
|
648
|
-
"https://docs.zenml.io/how-to/build-pipelines/schedule-a-pipeline "
|
649
|
-
"for details."
|
650
|
-
)
|
651
|
-
if schedule.name:
|
652
|
-
schedule_name = schedule.name
|
653
|
-
else:
|
654
|
-
schedule_name = format_name_template(
|
655
|
-
deployment.run_name_template
|
656
|
-
)
|
657
|
-
components = Client().active_stack_model.components
|
658
|
-
orchestrator = components[StackComponentType.ORCHESTRATOR][0]
|
659
|
-
schedule_model = ScheduleRequest(
|
660
|
-
workspace=Client().active_workspace.id,
|
661
|
-
user=Client().active_user.id,
|
662
|
-
pipeline_id=pipeline_id,
|
663
|
-
orchestrator_id=orchestrator.id,
|
664
|
-
name=schedule_name,
|
665
|
-
active=True,
|
666
|
-
cron_expression=schedule.cron_expression,
|
667
|
-
start_time=schedule.start_time,
|
668
|
-
end_time=schedule.end_time,
|
669
|
-
interval_second=schedule.interval_second,
|
670
|
-
catchup=schedule.catchup,
|
671
|
-
run_once_start_time=schedule.run_once_start_time,
|
672
|
-
)
|
673
|
-
schedule_id = (
|
674
|
-
Client().zen_store.create_schedule(schedule_model).id
|
675
|
-
)
|
676
|
-
logger.info(
|
677
|
-
f"Created schedule `{schedule_name}` for pipeline "
|
678
|
-
f"`{deployment.pipeline_configuration.name}`."
|
679
|
-
)
|
689
|
+
if prevent_build_reuse:
|
690
|
+
logger.warning(
|
691
|
+
"Passing `prevent_build_reuse=True` to "
|
692
|
+
"`pipeline.with_opitions(...)` is deprecated. Use "
|
693
|
+
"`DockerSettings.prevent_build_reuse` instead."
|
694
|
+
)
|
680
695
|
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
696
|
+
build_model = build_utils.reuse_or_create_pipeline_build(
|
697
|
+
deployment=deployment,
|
698
|
+
pipeline_id=pipeline_id,
|
699
|
+
allow_build_reuse=not prevent_build_reuse,
|
700
|
+
build=build,
|
701
|
+
code_repository=code_repository,
|
702
|
+
)
|
703
|
+
build_id = build_model.id if build_model else None
|
704
|
+
|
705
|
+
code_reference = None
|
706
|
+
if local_repo_context and not local_repo_context.is_dirty:
|
707
|
+
source_root = source_utils.get_source_root()
|
708
|
+
subdirectory = (
|
709
|
+
Path(source_root)
|
710
|
+
.resolve()
|
711
|
+
.relative_to(local_repo_context.root)
|
685
712
|
)
|
686
713
|
|
687
|
-
|
688
|
-
|
714
|
+
code_reference = CodeReferenceRequest(
|
715
|
+
commit=local_repo_context.current_commit,
|
716
|
+
subdirectory=subdirectory.as_posix(),
|
717
|
+
code_repository=local_repo_context.code_repository_id,
|
689
718
|
)
|
690
|
-
|
691
|
-
|
719
|
+
|
720
|
+
code_path = None
|
721
|
+
if build_utils.should_upload_code(
|
722
|
+
deployment=deployment,
|
723
|
+
build=build_model,
|
724
|
+
code_reference=code_reference,
|
725
|
+
):
|
726
|
+
code_archive = code_utils.CodeArchive(
|
727
|
+
root=source_utils.get_source_root()
|
692
728
|
)
|
729
|
+
logger.info("Archiving pipeline code...")
|
730
|
+
code_path = code_utils.upload_code_if_necessary(code_archive)
|
731
|
+
|
732
|
+
request = PipelineDeploymentRequest(
|
733
|
+
user=Client().active_user.id,
|
734
|
+
workspace=Client().active_workspace.id,
|
735
|
+
stack=stack.id,
|
736
|
+
pipeline=pipeline_id,
|
737
|
+
build=build_id,
|
738
|
+
schedule=schedule_id,
|
739
|
+
code_reference=code_reference,
|
740
|
+
code_path=code_path,
|
741
|
+
**deployment.model_dump(),
|
742
|
+
)
|
743
|
+
return Client().zen_store.create_deployment(deployment=request)
|
693
744
|
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
"`DockerSettings.prevent_build_reuse` instead."
|
699
|
-
)
|
745
|
+
def _run(
|
746
|
+
self,
|
747
|
+
) -> Optional[PipelineRunResponse]:
|
748
|
+
"""Runs the pipeline on the active stack.
|
700
749
|
|
701
|
-
|
702
|
-
|
703
|
-
|
704
|
-
|
705
|
-
|
706
|
-
|
750
|
+
Returns:
|
751
|
+
The pipeline run or `None` if running with a schedule.
|
752
|
+
"""
|
753
|
+
if constants.SHOULD_PREVENT_PIPELINE_EXECUTION:
|
754
|
+
# An environment variable was set to stop the execution of
|
755
|
+
# pipelines. This is done to prevent execution of module-level
|
756
|
+
# pipeline.run() calls when importing modules needed to run a step.
|
757
|
+
logger.info(
|
758
|
+
"Preventing execution of pipeline '%s'. If this is not "
|
759
|
+
"intended behavior, make sure to unset the environment "
|
760
|
+
"variable '%s'.",
|
761
|
+
self.name,
|
762
|
+
constants.ENV_ZENML_PREVENT_PIPELINE_EXECUTION,
|
707
763
|
)
|
708
|
-
|
709
|
-
|
710
|
-
code_reference = None
|
711
|
-
if local_repo_context and not local_repo_context.is_dirty:
|
712
|
-
source_root = source_utils.get_source_root()
|
713
|
-
subdirectory = (
|
714
|
-
Path(source_root)
|
715
|
-
.resolve()
|
716
|
-
.relative_to(local_repo_context.root)
|
717
|
-
)
|
718
|
-
|
719
|
-
code_reference = CodeReferenceRequest(
|
720
|
-
commit=local_repo_context.current_commit,
|
721
|
-
subdirectory=subdirectory.as_posix(),
|
722
|
-
code_repository=local_repo_context.code_repository_id,
|
723
|
-
)
|
764
|
+
return None
|
724
765
|
|
725
|
-
|
726
|
-
if build_utils.should_upload_code(
|
727
|
-
deployment=deployment,
|
728
|
-
build=build_model,
|
729
|
-
code_reference=code_reference,
|
730
|
-
):
|
731
|
-
code_archive = code_utils.CodeArchive(
|
732
|
-
root=source_utils.get_source_root()
|
733
|
-
)
|
734
|
-
logger.info("Archiving pipeline code...")
|
735
|
-
code_path = code_utils.upload_code_if_necessary(code_archive)
|
766
|
+
logger.info(f"Initiating a new run for the pipeline: `{self.name}`.")
|
736
767
|
|
737
|
-
|
738
|
-
|
739
|
-
|
740
|
-
stack=stack.id,
|
741
|
-
pipeline=pipeline_id,
|
742
|
-
build=build_id,
|
743
|
-
schedule=schedule_id,
|
744
|
-
code_reference=code_reference,
|
745
|
-
code_path=code_path,
|
746
|
-
**deployment.model_dump(),
|
747
|
-
)
|
748
|
-
deployment_model = Client().zen_store.create_deployment(
|
749
|
-
deployment=deployment_request
|
750
|
-
)
|
768
|
+
with track_handler(AnalyticsEvent.RUN_PIPELINE) as analytics_handler:
|
769
|
+
stack = Client().active_stack
|
770
|
+
deployment = self._create_deployment(**self._run_args)
|
751
771
|
|
752
|
-
self.log_pipeline_deployment_metadata(
|
753
|
-
run = create_placeholder_run(deployment=
|
772
|
+
self.log_pipeline_deployment_metadata(deployment)
|
773
|
+
run = create_placeholder_run(deployment=deployment)
|
754
774
|
|
755
775
|
analytics_handler.metadata = self._get_pipeline_analytics_metadata(
|
756
|
-
deployment=
|
776
|
+
deployment=deployment,
|
757
777
|
stack=stack,
|
758
778
|
run_id=run.id if run else None,
|
759
779
|
)
|
@@ -766,11 +786,11 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
766
786
|
logger.info(
|
767
787
|
"You can visualize your pipeline runs in the `ZenML "
|
768
788
|
"Dashboard`. In order to try it locally, please run "
|
769
|
-
"`zenml
|
789
|
+
"`zenml login --local`."
|
770
790
|
)
|
771
791
|
|
772
792
|
deploy_pipeline(
|
773
|
-
deployment=
|
793
|
+
deployment=deployment, stack=stack, placeholder_run=run
|
774
794
|
)
|
775
795
|
if run:
|
776
796
|
return Client().get_pipeline_run(run.id)
|
@@ -1080,7 +1100,9 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1080
1100
|
self,
|
1081
1101
|
step: "BaseStep",
|
1082
1102
|
input_artifacts: Dict[str, StepArtifact],
|
1083
|
-
external_artifacts: Dict[
|
1103
|
+
external_artifacts: Dict[
|
1104
|
+
str, Union["ExternalArtifact", "ArtifactVersionResponse"]
|
1105
|
+
],
|
1084
1106
|
model_artifacts_or_metadata: Dict[str, "ModelVersionDataLazyLoader"],
|
1085
1107
|
client_lazy_loaders: Dict[str, "ClientLazyLoader"],
|
1086
1108
|
parameters: Dict[str, Any],
|
@@ -1181,7 +1203,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1181
1203
|
|
1182
1204
|
raise RuntimeError("Unable to find step ID")
|
1183
1205
|
|
1184
|
-
def __enter__(self
|
1206
|
+
def __enter__(self) -> Self:
|
1185
1207
|
"""Activate the pipeline context.
|
1186
1208
|
|
1187
1209
|
Raises:
|
@@ -1316,7 +1338,9 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1316
1338
|
"""
|
1317
1339
|
return copy.deepcopy(self)
|
1318
1340
|
|
1319
|
-
def __call__(
|
1341
|
+
def __call__(
|
1342
|
+
self, *args: Any, **kwargs: Any
|
1343
|
+
) -> Optional[PipelineRunResponse]:
|
1320
1344
|
"""Handle a call of the pipeline.
|
1321
1345
|
|
1322
1346
|
This method does one of two things:
|
@@ -1331,7 +1355,9 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1331
1355
|
**kwargs: Entrypoint function keyword arguments.
|
1332
1356
|
|
1333
1357
|
Returns:
|
1334
|
-
|
1358
|
+
If called within another pipeline, returns the outputs of the
|
1359
|
+
`entrypoint` method. Otherwise, returns the pipeline run or `None`
|
1360
|
+
if running with a schedule.
|
1335
1361
|
"""
|
1336
1362
|
if Pipeline.ACTIVE_PIPELINE:
|
1337
1363
|
# Calling a pipeline inside a pipeline, we return the potential
|
@@ -1343,7 +1369,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1343
1369
|
return self.entrypoint(*args, **kwargs)
|
1344
1370
|
|
1345
1371
|
self.prepare(*args, **kwargs)
|
1346
|
-
return self._run(
|
1372
|
+
return self._run()
|
1347
1373
|
|
1348
1374
|
def _call_entrypoint(self, *args: Any, **kwargs: Any) -> None:
|
1349
1375
|
"""Calls the pipeline entrypoint function with the given arguments.
|
@@ -1391,6 +1417,26 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1391
1417
|
else:
|
1392
1418
|
self.prepare()
|
1393
1419
|
|
1420
|
+
def create_run_template(
|
1421
|
+
self, name: str, **kwargs: Any
|
1422
|
+
) -> RunTemplateResponse:
|
1423
|
+
"""Create a run template for the pipeline.
|
1424
|
+
|
1425
|
+
Args:
|
1426
|
+
name: The name of the run template.
|
1427
|
+
**kwargs: Keyword arguments for the client method to create a run
|
1428
|
+
template.
|
1429
|
+
|
1430
|
+
Returns:
|
1431
|
+
The created run template.
|
1432
|
+
"""
|
1433
|
+
self._prepare_if_possible()
|
1434
|
+
deployment = self._create_deployment(**self._run_args)
|
1435
|
+
|
1436
|
+
return Client().create_run_template(
|
1437
|
+
name=name, deployment_id=deployment.id, **kwargs
|
1438
|
+
)
|
1439
|
+
|
1394
1440
|
def _reconfigure_from_file_with_overrides(
|
1395
1441
|
self,
|
1396
1442
|
config_path: Optional[str] = None,
|
zenml/pipelines/run_utils.py
CHANGED
@@ -22,6 +22,7 @@ from zenml.models import (
|
|
22
22
|
PipelineRunResponse,
|
23
23
|
StackResponse,
|
24
24
|
)
|
25
|
+
from zenml.orchestrators.publish_utils import publish_failed_pipeline_run
|
25
26
|
from zenml.orchestrators.utils import get_run_name
|
26
27
|
from zenml.stack import Flavor, Stack
|
27
28
|
from zenml.utils import code_utils, notebook_utils, source_utils
|
@@ -81,6 +82,7 @@ def create_placeholder_run(
|
|
81
82
|
deployment=deployment.id,
|
82
83
|
pipeline=deployment.pipeline.id if deployment.pipeline else None,
|
83
84
|
status=ExecutionStatus.INITIALIZING,
|
85
|
+
tags=deployment.pipeline_configuration.tags,
|
84
86
|
)
|
85
87
|
return Client().zen_store.create_run(run_request)
|
86
88
|
|
@@ -124,20 +126,18 @@ def deploy_pipeline(
|
|
124
126
|
Args:
|
125
127
|
deployment: The deployment to run.
|
126
128
|
stack: The stack on which to run the deployment.
|
127
|
-
placeholder_run: An optional placeholder run for the deployment.
|
128
|
-
will be deleted in case the pipeline deployment failed.
|
129
|
+
placeholder_run: An optional placeholder run for the deployment.
|
129
130
|
|
130
131
|
Raises:
|
131
132
|
Exception: Any exception that happened while deploying or running
|
132
133
|
(in case it happens synchronously) the pipeline.
|
133
134
|
"""
|
134
|
-
stack.prepare_pipeline_deployment(deployment=deployment)
|
135
|
-
|
136
135
|
# Prevent execution of nested pipelines which might lead to
|
137
136
|
# unexpected behavior
|
138
137
|
previous_value = constants.SHOULD_PREVENT_PIPELINE_EXECUTION
|
139
138
|
constants.SHOULD_PREVENT_PIPELINE_EXECUTION = True
|
140
139
|
try:
|
140
|
+
stack.prepare_pipeline_deployment(deployment=deployment)
|
141
141
|
stack.deploy_pipeline(
|
142
142
|
deployment=deployment,
|
143
143
|
placeholder_run=placeholder_run,
|
@@ -145,13 +145,14 @@ def deploy_pipeline(
|
|
145
145
|
except Exception as e:
|
146
146
|
if (
|
147
147
|
placeholder_run
|
148
|
-
and Client()
|
148
|
+
and Client()
|
149
|
+
.get_pipeline_run(placeholder_run.id, hydrate=False)
|
150
|
+
.status
|
149
151
|
== ExecutionStatus.INITIALIZING
|
150
152
|
):
|
151
|
-
# The run
|
152
|
-
#
|
153
|
-
|
154
|
-
Client().delete_pipeline_run(placeholder_run.id)
|
153
|
+
# The run failed during the initialization phase -> We change it's
|
154
|
+
# status to `Failed`
|
155
|
+
publish_failed_pipeline_run(placeholder_run.id)
|
155
156
|
|
156
157
|
raise e
|
157
158
|
finally:
|
@@ -203,7 +204,7 @@ def validate_stack_is_runnable_from_server(
|
|
203
204
|
assert len(component_list) == 1
|
204
205
|
component = component_list[0]
|
205
206
|
flavors = zen_store.list_flavors(
|
206
|
-
FlavorFilter(name=component.
|
207
|
+
FlavorFilter(name=component.flavor_name, type=component.type)
|
207
208
|
)
|
208
209
|
assert len(flavors) == 1
|
209
210
|
flavor_model = flavors[0]
|
@@ -24,13 +24,13 @@ import click
|
|
24
24
|
|
25
25
|
from zenml.utils.daemon import daemonize
|
26
26
|
|
27
|
-
# Try to import the
|
27
|
+
# Try to import the DaemonZenServer here because it needs to be registered in the
|
28
28
|
# service registry early on in order to be available for use in other modules.
|
29
|
-
# If the
|
29
|
+
# If the DaemonZenServer dependencies aren't installed, there is no need to register
|
30
30
|
# it anywhere so we simply pass.
|
31
31
|
try:
|
32
|
-
from zenml.zen_server.deploy.
|
33
|
-
|
32
|
+
from zenml.zen_server.deploy.daemon.daemon_zen_server import ( # noqa
|
33
|
+
DaemonZenServer,
|
34
34
|
)
|
35
35
|
except ImportError:
|
36
36
|
pass
|
zenml/services/service.py
CHANGED
@@ -78,9 +78,9 @@ def update_service_status(
|
|
78
78
|
if pre_status:
|
79
79
|
self.status.update_state(pre_status, "")
|
80
80
|
try:
|
81
|
-
logger.
|
81
|
+
logger.debug(f"Calling {func.__name__} method...")
|
82
82
|
result = func(self, *args, **kwargs)
|
83
|
-
logger.
|
83
|
+
logger.debug(f"{func.__name__} method executed successfully.")
|
84
84
|
if post_status:
|
85
85
|
self.status.update_state(post_status, "")
|
86
86
|
return result
|
zenml/stack/stack.py
CHANGED
@@ -810,18 +810,14 @@ class Stack:
|
|
810
810
|
self,
|
811
811
|
deployment: "PipelineDeploymentResponse",
|
812
812
|
placeholder_run: Optional["PipelineRunResponse"] = None,
|
813
|
-
) ->
|
813
|
+
) -> None:
|
814
814
|
"""Deploys a pipeline on this stack.
|
815
815
|
|
816
816
|
Args:
|
817
817
|
deployment: The pipeline deployment.
|
818
818
|
placeholder_run: An optional placeholder run for the deployment.
|
819
|
-
This will be deleted in case the pipeline deployment failed.
|
820
|
-
|
821
|
-
Returns:
|
822
|
-
The return value of the call to `orchestrator.run_pipeline(...)`.
|
823
819
|
"""
|
824
|
-
|
820
|
+
self.orchestrator.run(
|
825
821
|
deployment=deployment, stack=self, placeholder_run=placeholder_run
|
826
822
|
)
|
827
823
|
|