zenml-nightly 0.80.2.dev20250415__py3-none-any.whl → 0.80.2.dev20250416__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zenml/VERSION +1 -1
- zenml/cli/utils.py +13 -11
- zenml/config/compiler.py +1 -0
- zenml/config/global_config.py +1 -1
- zenml/config/pipeline_configurations.py +1 -0
- zenml/config/pipeline_run_configuration.py +1 -0
- zenml/constants.py +4 -1
- zenml/integrations/gcp/orchestrators/vertex_orchestrator.py +47 -5
- zenml/integrations/gcp/vertex_custom_job_parameters.py +15 -1
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +0 -1
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator_entrypoint.py +0 -1
- zenml/integrations/kubernetes/step_operators/kubernetes_step_operator.py +0 -3
- zenml/logging/step_logging.py +41 -21
- zenml/login/credentials_store.py +31 -0
- zenml/models/v2/base/base.py +8 -4
- zenml/models/v2/base/filter.py +1 -1
- zenml/models/v2/core/pipeline_run.py +19 -0
- zenml/orchestrators/step_launcher.py +2 -3
- zenml/orchestrators/step_runner.py +2 -2
- zenml/orchestrators/utils.py +2 -5
- zenml/pipelines/pipeline_context.py +1 -0
- zenml/pipelines/pipeline_decorator.py +4 -0
- zenml/pipelines/pipeline_definition.py +83 -22
- zenml/pipelines/run_utils.py +4 -0
- zenml/steps/utils.py +1 -1
- zenml/zen_server/auth.py +44 -64
- zenml/zen_server/download_utils.py +26 -29
- zenml/zen_server/jwt.py +0 -14
- zenml/zen_server/routers/auth_endpoints.py +5 -36
- zenml/zen_server/routers/pipeline_deployments_endpoints.py +63 -26
- zenml/zen_server/routers/runs_endpoints.py +57 -0
- zenml/zen_server/template_execution/utils.py +1 -1
- zenml/zen_stores/rest_zen_store.py +16 -13
- zenml/zen_stores/schemas/pipeline_run_schemas.py +1 -0
- zenml/zen_stores/sql_zen_store.py +18 -0
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/METADATA +2 -1
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/RECORD +40 -40
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/entry_points.txt +0 -0
zenml/orchestrators/utils.py
CHANGED
@@ -105,7 +105,6 @@ def is_setting_enabled(
|
|
105
105
|
def get_config_environment_vars(
|
106
106
|
schedule_id: Optional[UUID] = None,
|
107
107
|
pipeline_run_id: Optional[UUID] = None,
|
108
|
-
step_run_id: Optional[UUID] = None,
|
109
108
|
) -> Dict[str, str]:
|
110
109
|
"""Gets environment variables to set for mirroring the active config.
|
111
110
|
|
@@ -119,7 +118,6 @@ def get_config_environment_vars(
|
|
119
118
|
schedule_id: Optional schedule ID to use to generate a new API token.
|
120
119
|
pipeline_run_id: Optional pipeline run ID to use to generate a new API
|
121
120
|
token.
|
122
|
-
step_run_id: Optional step run ID to use to generate a new API token.
|
123
121
|
|
124
122
|
Returns:
|
125
123
|
Environment variable dict.
|
@@ -138,7 +136,7 @@ def get_config_environment_vars(
|
|
138
136
|
credentials_store = get_credentials_store()
|
139
137
|
url = global_config.store_configuration.url
|
140
138
|
api_token = credentials_store.get_token(url, allow_expired=False)
|
141
|
-
if schedule_id or pipeline_run_id
|
139
|
+
if schedule_id or pipeline_run_id:
|
142
140
|
assert isinstance(global_config.zen_store, RestZenStore)
|
143
141
|
|
144
142
|
# The user has the option to manually set an expiration for the API
|
@@ -173,7 +171,7 @@ def get_config_environment_vars(
|
|
173
171
|
# If only a schedule is given, the pipeline run credentials will
|
174
172
|
# be valid for the entire duration of the schedule.
|
175
173
|
api_key = credentials_store.get_api_key(url)
|
176
|
-
if not api_key and not pipeline_run_id
|
174
|
+
if not api_key and not pipeline_run_id:
|
177
175
|
logger.warning(
|
178
176
|
"An API token without an expiration time will be generated "
|
179
177
|
"and used to run this pipeline on a schedule. This is very "
|
@@ -194,7 +192,6 @@ def get_config_environment_vars(
|
|
194
192
|
token_type=APITokenType.WORKLOAD,
|
195
193
|
schedule_id=schedule_id,
|
196
194
|
pipeline_run_id=pipeline_run_id,
|
197
|
-
step_run_id=step_run_id,
|
198
195
|
)
|
199
196
|
|
200
197
|
environment_vars[ENV_ZENML_STORE_PREFIX + "API_TOKEN"] = (
|
@@ -109,6 +109,7 @@ class PipelineContext:
|
|
109
109
|
pipeline_configuration.enable_artifact_visualization
|
110
110
|
)
|
111
111
|
self.enable_step_logs = pipeline_configuration.enable_step_logs
|
112
|
+
self.enable_pipeline_logs = pipeline_configuration.enable_pipeline_logs
|
112
113
|
self.settings = pipeline_configuration.settings
|
113
114
|
self.extra = pipeline_configuration.extra
|
114
115
|
self.model = pipeline_configuration.model
|
@@ -50,6 +50,7 @@ def pipeline(
|
|
50
50
|
enable_cache: Optional[bool] = None,
|
51
51
|
enable_artifact_metadata: Optional[bool] = None,
|
52
52
|
enable_step_logs: Optional[bool] = None,
|
53
|
+
enable_pipeline_logs: Optional[bool] = None,
|
53
54
|
settings: Optional[Dict[str, "SettingsOrDict"]] = None,
|
54
55
|
tags: Optional[List[Union[str, "Tag"]]] = None,
|
55
56
|
extra: Optional[Dict[str, Any]] = None,
|
@@ -67,6 +68,7 @@ def pipeline(
|
|
67
68
|
enable_cache: Optional[bool] = None,
|
68
69
|
enable_artifact_metadata: Optional[bool] = None,
|
69
70
|
enable_step_logs: Optional[bool] = None,
|
71
|
+
enable_pipeline_logs: Optional[bool] = None,
|
70
72
|
settings: Optional[Dict[str, "SettingsOrDict"]] = None,
|
71
73
|
tags: Optional[List[Union[str, "Tag"]]] = None,
|
72
74
|
extra: Optional[Dict[str, Any]] = None,
|
@@ -84,6 +86,7 @@ def pipeline(
|
|
84
86
|
enable_cache: Whether to use caching or not.
|
85
87
|
enable_artifact_metadata: Whether to enable artifact metadata or not.
|
86
88
|
enable_step_logs: If step logs should be enabled for this pipeline.
|
89
|
+
enable_pipeline_logs: If pipeline logs should be enabled for this pipeline.
|
87
90
|
settings: Settings for this pipeline.
|
88
91
|
tags: Tags to apply to runs of the pipeline.
|
89
92
|
extra: Extra configurations for this pipeline.
|
@@ -108,6 +111,7 @@ def pipeline(
|
|
108
111
|
enable_cache=enable_cache,
|
109
112
|
enable_artifact_metadata=enable_artifact_metadata,
|
110
113
|
enable_step_logs=enable_step_logs,
|
114
|
+
enable_pipeline_logs=enable_pipeline_logs,
|
111
115
|
settings=settings,
|
112
116
|
tags=tags,
|
113
117
|
extra=extra,
|
@@ -16,7 +16,7 @@
|
|
16
16
|
import copy
|
17
17
|
import hashlib
|
18
18
|
import inspect
|
19
|
-
from contextlib import contextmanager
|
19
|
+
from contextlib import contextmanager, nullcontext
|
20
20
|
from pathlib import Path
|
21
21
|
from typing import (
|
22
22
|
TYPE_CHECKING,
|
@@ -56,8 +56,13 @@ from zenml.enums import StackComponentType
|
|
56
56
|
from zenml.exceptions import EntityExistsError
|
57
57
|
from zenml.hooks.hook_validators import resolve_and_validate_hook
|
58
58
|
from zenml.logger import get_logger
|
59
|
+
from zenml.logging.step_logging import (
|
60
|
+
PipelineLogsStorageContext,
|
61
|
+
prepare_logs_uri,
|
62
|
+
)
|
59
63
|
from zenml.models import (
|
60
64
|
CodeReferenceRequest,
|
65
|
+
LogsRequest,
|
61
66
|
PipelineBuildBase,
|
62
67
|
PipelineBuildResponse,
|
63
68
|
PipelineDeploymentBase,
|
@@ -130,6 +135,7 @@ class Pipeline:
|
|
130
135
|
enable_artifact_metadata: Optional[bool] = None,
|
131
136
|
enable_artifact_visualization: Optional[bool] = None,
|
132
137
|
enable_step_logs: Optional[bool] = None,
|
138
|
+
enable_pipeline_logs: Optional[bool] = None,
|
133
139
|
settings: Optional[Mapping[str, "SettingsOrDict"]] = None,
|
134
140
|
tags: Optional[List[Union[str, "Tag"]]] = None,
|
135
141
|
extra: Optional[Dict[str, Any]] = None,
|
@@ -149,6 +155,7 @@ class Pipeline:
|
|
149
155
|
enable_artifact_visualization: If artifact visualization should be
|
150
156
|
enabled for this pipeline.
|
151
157
|
enable_step_logs: If step logs should be enabled for this pipeline.
|
158
|
+
enable_pipeline_logs: If pipeline logs should be enabled for this pipeline.
|
152
159
|
settings: Settings for this pipeline.
|
153
160
|
tags: Tags to apply to runs of this pipeline.
|
154
161
|
extra: Extra configurations for this pipeline.
|
@@ -174,6 +181,7 @@ class Pipeline:
|
|
174
181
|
enable_artifact_metadata=enable_artifact_metadata,
|
175
182
|
enable_artifact_visualization=enable_artifact_visualization,
|
176
183
|
enable_step_logs=enable_step_logs,
|
184
|
+
enable_pipeline_logs=enable_pipeline_logs,
|
177
185
|
settings=settings,
|
178
186
|
tags=tags,
|
179
187
|
extra=extra,
|
@@ -293,6 +301,7 @@ class Pipeline:
|
|
293
301
|
enable_artifact_metadata: Optional[bool] = None,
|
294
302
|
enable_artifact_visualization: Optional[bool] = None,
|
295
303
|
enable_step_logs: Optional[bool] = None,
|
304
|
+
enable_pipeline_logs: Optional[bool] = None,
|
296
305
|
settings: Optional[Mapping[str, "SettingsOrDict"]] = None,
|
297
306
|
tags: Optional[List[Union[str, "Tag"]]] = None,
|
298
307
|
extra: Optional[Dict[str, Any]] = None,
|
@@ -322,6 +331,7 @@ class Pipeline:
|
|
322
331
|
enable_artifact_visualization: If artifact visualization should be
|
323
332
|
enabled for this pipeline.
|
324
333
|
enable_step_logs: If step logs should be enabled for this pipeline.
|
334
|
+
enable_pipeline_logs: If pipeline logs should be enabled for this pipeline.
|
325
335
|
settings: settings for this pipeline.
|
326
336
|
tags: Tags to apply to runs of this pipeline.
|
327
337
|
extra: Extra configurations for this pipeline.
|
@@ -364,6 +374,7 @@ class Pipeline:
|
|
364
374
|
"enable_artifact_metadata": enable_artifact_metadata,
|
365
375
|
"enable_artifact_visualization": enable_artifact_visualization,
|
366
376
|
"enable_step_logs": enable_step_logs,
|
377
|
+
"enable_pipeline_logs": enable_pipeline_logs,
|
367
378
|
"settings": settings,
|
368
379
|
"tags": tags,
|
369
380
|
"extra": extra,
|
@@ -588,6 +599,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
588
599
|
enable_artifact_metadata: Optional[bool] = None,
|
589
600
|
enable_artifact_visualization: Optional[bool] = None,
|
590
601
|
enable_step_logs: Optional[bool] = None,
|
602
|
+
enable_pipeline_logs: Optional[bool] = None,
|
591
603
|
schedule: Optional[Schedule] = None,
|
592
604
|
build: Union[str, "UUID", "PipelineBuildBase", None] = None,
|
593
605
|
settings: Optional[Mapping[str, "SettingsOrDict"]] = None,
|
@@ -610,6 +622,8 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
610
622
|
enable_artifact_visualization: If artifact visualization should be
|
611
623
|
enabled for this pipeline run.
|
612
624
|
enable_step_logs: If step logs should be enabled for this pipeline.
|
625
|
+
enable_pipeline_logs: If pipeline logs should be enabled for this
|
626
|
+
pipeline run.
|
613
627
|
schedule: Optional schedule to use for the run.
|
614
628
|
build: Optional build to use for the run.
|
615
629
|
settings: Settings for this pipeline run.
|
@@ -641,6 +655,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
641
655
|
enable_artifact_metadata=enable_artifact_metadata,
|
642
656
|
enable_artifact_visualization=enable_artifact_visualization,
|
643
657
|
enable_step_logs=enable_step_logs,
|
658
|
+
enable_pipeline_logs=enable_pipeline_logs,
|
644
659
|
steps=step_configurations,
|
645
660
|
settings=settings,
|
646
661
|
schedule=schedule,
|
@@ -723,7 +738,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
723
738
|
if prevent_build_reuse:
|
724
739
|
logger.warning(
|
725
740
|
"Passing `prevent_build_reuse=True` to "
|
726
|
-
"`pipeline.
|
741
|
+
"`pipeline.with_options(...)` is deprecated. Use "
|
727
742
|
"`DockerSettings.prevent_build_reuse` instead."
|
728
743
|
)
|
729
744
|
|
@@ -806,31 +821,77 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
806
821
|
|
807
822
|
with track_handler(AnalyticsEvent.RUN_PIPELINE) as analytics_handler:
|
808
823
|
stack = Client().active_stack
|
809
|
-
deployment = self._create_deployment(**self._run_args)
|
810
824
|
|
811
|
-
|
812
|
-
|
825
|
+
# Enable or disable pipeline run logs storage
|
826
|
+
if self._run_args.get("schedule"):
|
827
|
+
# Pipeline runs scheduled to run in the future are not logged
|
828
|
+
# via the client.
|
829
|
+
logging_enabled = False
|
830
|
+
elif constants.handle_bool_env_var(
|
831
|
+
constants.ENV_ZENML_DISABLE_PIPELINE_LOGS_STORAGE, False
|
832
|
+
):
|
833
|
+
logging_enabled = False
|
834
|
+
else:
|
835
|
+
logging_enabled = self._run_args.get(
|
836
|
+
"enable_pipeline_logs",
|
837
|
+
self.configuration.enable_pipeline_logs
|
838
|
+
if self.configuration.enable_pipeline_logs is not None
|
839
|
+
else True,
|
840
|
+
)
|
813
841
|
|
814
|
-
|
815
|
-
|
816
|
-
stack=stack,
|
817
|
-
run_id=run.id if run else None,
|
818
|
-
)
|
842
|
+
logs_context = nullcontext()
|
843
|
+
logs_model = None
|
819
844
|
|
820
|
-
if
|
821
|
-
|
822
|
-
|
823
|
-
|
824
|
-
|
825
|
-
|
826
|
-
|
827
|
-
|
828
|
-
|
845
|
+
if logging_enabled:
|
846
|
+
# Configure the logs
|
847
|
+
logs_uri = prepare_logs_uri(
|
848
|
+
stack.artifact_store,
|
849
|
+
)
|
850
|
+
|
851
|
+
logs_context = PipelineLogsStorageContext(
|
852
|
+
logs_uri=logs_uri,
|
853
|
+
artifact_store=stack.artifact_store,
|
854
|
+
prepend_step_name=False,
|
855
|
+
) # type: ignore[assignment]
|
856
|
+
|
857
|
+
logs_model = LogsRequest(
|
858
|
+
uri=logs_uri,
|
859
|
+
artifact_store_id=stack.artifact_store.id,
|
860
|
+
)
|
861
|
+
|
862
|
+
with logs_context:
|
863
|
+
deployment = self._create_deployment(**self._run_args)
|
864
|
+
|
865
|
+
self.log_pipeline_deployment_metadata(deployment)
|
866
|
+
run = create_placeholder_run(
|
867
|
+
deployment=deployment, logs=logs_model
|
868
|
+
)
|
869
|
+
|
870
|
+
analytics_handler.metadata = (
|
871
|
+
self._get_pipeline_analytics_metadata(
|
872
|
+
deployment=deployment,
|
873
|
+
stack=stack,
|
874
|
+
run_id=run.id if run else None,
|
829
875
|
)
|
876
|
+
)
|
877
|
+
|
878
|
+
if run:
|
879
|
+
run_url = dashboard_utils.get_run_url(run)
|
880
|
+
if run_url:
|
881
|
+
logger.info(
|
882
|
+
f"Dashboard URL for Pipeline Run: {run_url}"
|
883
|
+
)
|
884
|
+
else:
|
885
|
+
logger.info(
|
886
|
+
"You can visualize your pipeline runs in the `ZenML "
|
887
|
+
"Dashboard`. In order to try it locally, please run "
|
888
|
+
"`zenml login --local`."
|
889
|
+
)
|
890
|
+
|
891
|
+
deploy_pipeline(
|
892
|
+
deployment=deployment, stack=stack, placeholder_run=run
|
893
|
+
)
|
830
894
|
|
831
|
-
deploy_pipeline(
|
832
|
-
deployment=deployment, stack=stack, placeholder_run=run
|
833
|
-
)
|
834
895
|
if run:
|
835
896
|
return Client().get_pipeline_run(run.id)
|
836
897
|
return None
|
zenml/pipelines/run_utils.py
CHANGED
@@ -15,6 +15,7 @@ from zenml.enums import ExecutionStatus
|
|
15
15
|
from zenml.logger import get_logger
|
16
16
|
from zenml.models import (
|
17
17
|
FlavorFilter,
|
18
|
+
LogsRequest,
|
18
19
|
PipelineDeploymentBase,
|
19
20
|
PipelineDeploymentResponse,
|
20
21
|
PipelineRunRequest,
|
@@ -49,6 +50,7 @@ def get_default_run_name(pipeline_name: str) -> str:
|
|
49
50
|
|
50
51
|
def create_placeholder_run(
|
51
52
|
deployment: "PipelineDeploymentResponse",
|
53
|
+
logs: Optional["LogsRequest"] = None,
|
52
54
|
) -> Optional["PipelineRunResponse"]:
|
53
55
|
"""Create a placeholder run for the deployment.
|
54
56
|
|
@@ -57,6 +59,7 @@ def create_placeholder_run(
|
|
57
59
|
|
58
60
|
Args:
|
59
61
|
deployment: The deployment for which to create the placeholder run.
|
62
|
+
logs: The logs for the run.
|
60
63
|
|
61
64
|
Returns:
|
62
65
|
The placeholder run or `None` if no run was created.
|
@@ -86,6 +89,7 @@ def create_placeholder_run(
|
|
86
89
|
pipeline=deployment.pipeline.id if deployment.pipeline else None,
|
87
90
|
status=ExecutionStatus.INITIALIZING,
|
88
91
|
tags=deployment.pipeline_configuration.tags,
|
92
|
+
logs=logs,
|
89
93
|
)
|
90
94
|
run, _ = Client().zen_store.get_or_create_run(run_request)
|
91
95
|
return run
|
zenml/steps/utils.py
CHANGED
@@ -553,7 +553,7 @@ def run_as_single_step_pipeline(
|
|
553
553
|
orchestrator = Client().active_stack.orchestrator
|
554
554
|
|
555
555
|
pipeline_settings: Any = {}
|
556
|
-
if "synchronous" in orchestrator.config.model_fields:
|
556
|
+
if "synchronous" in type(orchestrator.config).model_fields:
|
557
557
|
# Make sure the orchestrator runs sync so we stream the logs
|
558
558
|
key = settings_utils.get_stack_component_setting_key(orchestrator)
|
559
559
|
pipeline_settings[key] = BaseSettings(synchronous=True)
|
zenml/zen_server/auth.py
CHANGED
@@ -15,7 +15,7 @@
|
|
15
15
|
|
16
16
|
from contextvars import ContextVar
|
17
17
|
from datetime import datetime, timedelta
|
18
|
-
from typing import Callable, Optional, Union
|
18
|
+
from typing import Callable, Optional, Tuple, Union
|
19
19
|
from urllib.parse import urlencode, urlparse
|
20
20
|
from uuid import UUID, uuid4
|
21
21
|
|
@@ -33,9 +33,12 @@ from zenml.analytics.context import AnalyticsContext
|
|
33
33
|
from zenml.constants import (
|
34
34
|
API,
|
35
35
|
DEFAULT_USERNAME,
|
36
|
+
DEFAULT_ZENML_SERVER_GENERIC_API_TOKEN_LIFETIME,
|
37
|
+
ENV_ZENML_WORKLOAD_TOKEN_EXPIRATION_LEEWAY,
|
36
38
|
EXTERNAL_AUTHENTICATOR_TIMEOUT,
|
37
39
|
LOGIN,
|
38
40
|
VERSION_1,
|
41
|
+
handle_int_env_var,
|
39
42
|
)
|
40
43
|
from zenml.enums import (
|
41
44
|
AuthScheme,
|
@@ -420,28 +423,32 @@ def authenticate_credentials(
|
|
420
423
|
@cache_result(expiry=30)
|
421
424
|
def get_pipeline_run_status(
|
422
425
|
pipeline_run_id: UUID,
|
423
|
-
) -> Optional[ExecutionStatus]:
|
426
|
+
) -> Tuple[Optional[ExecutionStatus], Optional[datetime]]:
|
424
427
|
"""Get the status of a pipeline run.
|
425
428
|
|
426
429
|
Args:
|
427
430
|
pipeline_run_id: The pipeline run ID.
|
428
431
|
|
429
432
|
Returns:
|
430
|
-
The pipeline run status or None if the pipeline
|
431
|
-
exist.
|
433
|
+
The pipeline run status and end time or None if the pipeline
|
434
|
+
run does not exist.
|
432
435
|
"""
|
433
436
|
try:
|
434
437
|
pipeline_run = zen_store().get_run(
|
435
|
-
pipeline_run_id, hydrate=
|
438
|
+
pipeline_run_id, hydrate=True
|
436
439
|
)
|
437
440
|
except KeyError:
|
438
|
-
return None
|
441
|
+
return None, None
|
439
442
|
|
440
|
-
return
|
443
|
+
return (
|
444
|
+
pipeline_run.status,
|
445
|
+
pipeline_run.end_time,
|
446
|
+
)
|
441
447
|
|
442
|
-
|
443
|
-
|
444
|
-
|
448
|
+
(
|
449
|
+
pipeline_run_status,
|
450
|
+
pipeline_run_end_time,
|
451
|
+
) = get_pipeline_run_status(decoded_token.pipeline_run_id)
|
445
452
|
if pipeline_run_status is None:
|
446
453
|
error = (
|
447
454
|
f"Authentication error: error retrieving token pipeline run "
|
@@ -450,59 +457,35 @@ def authenticate_credentials(
|
|
450
457
|
logger.error(error)
|
451
458
|
raise CredentialsNotValid(error)
|
452
459
|
|
460
|
+
leeway = handle_int_env_var(
|
461
|
+
ENV_ZENML_WORKLOAD_TOKEN_EXPIRATION_LEEWAY,
|
462
|
+
DEFAULT_ZENML_SERVER_GENERIC_API_TOKEN_LIFETIME,
|
463
|
+
)
|
453
464
|
if pipeline_run_status.is_finished:
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
475
|
-
step_run_id: The step run ID.
|
476
|
-
|
477
|
-
Returns:
|
478
|
-
The step run status or None if the step run does not exist.
|
479
|
-
"""
|
480
|
-
try:
|
481
|
-
step_run = zen_store().get_run_step(
|
482
|
-
step_run_id, hydrate=False
|
465
|
+
if leeway < 0:
|
466
|
+
# The token should never expire, we don't need to check
|
467
|
+
# the end time.
|
468
|
+
pass
|
469
|
+
elif (
|
470
|
+
# We don't know the end time. This should never happen, but
|
471
|
+
# just in case we always expire the token.
|
472
|
+
pipeline_run_end_time is None
|
473
|
+
# Calculate whether the token has expired.
|
474
|
+
or utc_now(tz_aware=pipeline_run_end_time)
|
475
|
+
> pipeline_run_end_time + timedelta(seconds=leeway)
|
476
|
+
):
|
477
|
+
error = (
|
478
|
+
f"The pipeline run {decoded_token.pipeline_run_id} has "
|
479
|
+
"finished and API tokens scoped to it are no longer "
|
480
|
+
"valid. If you want to increase the expiration time "
|
481
|
+
"of the token to allow steps to continue for longer "
|
482
|
+
"after other steps have failed, you can do so by "
|
483
|
+
"configuring the "
|
484
|
+
f"`{ENV_ZENML_WORKLOAD_TOKEN_EXPIRATION_LEEWAY}` "
|
485
|
+
"ZenML server environment variable."
|
483
486
|
)
|
484
|
-
|
485
|
-
|
486
|
-
|
487
|
-
return step_run.status
|
488
|
-
|
489
|
-
step_run_status = get_step_run_status(decoded_token.step_run_id)
|
490
|
-
if step_run_status is None:
|
491
|
-
error = (
|
492
|
-
f"Authentication error: error retrieving token step run "
|
493
|
-
f"{decoded_token.step_run_id}"
|
494
|
-
)
|
495
|
-
logger.error(error)
|
496
|
-
raise CredentialsNotValid(error)
|
497
|
-
|
498
|
-
if step_run_status.is_finished:
|
499
|
-
error = (
|
500
|
-
f"The execution of step run "
|
501
|
-
f"{decoded_token.step_run_id} has already concluded and "
|
502
|
-
"API tokens scoped to it are no longer valid."
|
503
|
-
)
|
504
|
-
logger.error(error)
|
505
|
-
raise CredentialsNotValid(error)
|
487
|
+
logger.error(error)
|
488
|
+
raise CredentialsNotValid(error)
|
506
489
|
|
507
490
|
auth_context = AuthContext(
|
508
491
|
user=user_model,
|
@@ -861,7 +844,6 @@ def generate_access_token(
|
|
861
844
|
expires_in: Optional[int] = None,
|
862
845
|
schedule_id: Optional[UUID] = None,
|
863
846
|
pipeline_run_id: Optional[UUID] = None,
|
864
|
-
step_run_id: Optional[UUID] = None,
|
865
847
|
) -> OAuthTokenResponse:
|
866
848
|
"""Generates an access token for the given user.
|
867
849
|
|
@@ -880,7 +862,6 @@ def generate_access_token(
|
|
880
862
|
expire.
|
881
863
|
schedule_id: The ID of the schedule to scope the token to.
|
882
864
|
pipeline_run_id: The ID of the pipeline run to scope the token to.
|
883
|
-
step_run_id: The ID of the step run to scope the token to.
|
884
865
|
|
885
866
|
Returns:
|
886
867
|
An authentication response with an access token.
|
@@ -956,7 +937,6 @@ def generate_access_token(
|
|
956
937
|
api_key_id=api_key.id if api_key else None,
|
957
938
|
schedule_id=schedule_id,
|
958
939
|
pipeline_run_id=pipeline_run_id,
|
959
|
-
step_run_id=step_run_id,
|
960
940
|
# Set the session ID if this is a cross-site request
|
961
941
|
session_id=session_id,
|
962
942
|
).encode(expires=expires)
|
@@ -18,7 +18,6 @@ import tarfile
|
|
18
18
|
import tempfile
|
19
19
|
from typing import (
|
20
20
|
TYPE_CHECKING,
|
21
|
-
Optional,
|
22
21
|
)
|
23
22
|
|
24
23
|
from zenml.artifacts.utils import _load_artifact_store
|
@@ -78,20 +77,15 @@ def verify_artifact_is_downloadable(
|
|
78
77
|
|
79
78
|
def create_artifact_archive(
|
80
79
|
artifact: "ArtifactVersionResponse",
|
81
|
-
archive_path: Optional[str] = None,
|
82
80
|
) -> str:
|
83
81
|
"""Create an archive of the given artifact.
|
84
82
|
|
85
83
|
Args:
|
86
84
|
artifact: The artifact to archive.
|
87
|
-
archive_path: The path to which to save the archive.
|
88
85
|
|
89
86
|
Returns:
|
90
87
|
The path to the created archive.
|
91
88
|
"""
|
92
|
-
if archive_path is None:
|
93
|
-
archive_path = tempfile.mktemp()
|
94
|
-
|
95
89
|
artifact_store = verify_artifact_is_downloadable(artifact)
|
96
90
|
|
97
91
|
def _prepare_tarinfo(path: str) -> tarfile.TarInfo:
|
@@ -101,26 +95,29 @@ def create_artifact_archive(
|
|
101
95
|
tarinfo.size = size
|
102
96
|
return tarinfo
|
103
97
|
|
104
|
-
with
|
105
|
-
|
106
|
-
|
107
|
-
dir
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
98
|
+
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
99
|
+
with tarfile.open(fileobj=temp_file, mode="w:gz") as tar:
|
100
|
+
if artifact_store.isdir(artifact.uri):
|
101
|
+
for dir, _, files in artifact_store.walk(artifact.uri):
|
102
|
+
dir = dir.decode() if isinstance(dir, bytes) else dir
|
103
|
+
dir_info = tarfile.TarInfo(
|
104
|
+
name=os.path.relpath(dir, artifact.uri)
|
105
|
+
)
|
106
|
+
dir_info.type = tarfile.DIRTYPE
|
107
|
+
dir_info.mode = 0o755
|
108
|
+
tar.addfile(dir_info)
|
109
|
+
|
110
|
+
for file in files:
|
111
|
+
file = (
|
112
|
+
file.decode() if isinstance(file, bytes) else file
|
113
|
+
)
|
114
|
+
path = os.path.join(dir, file)
|
115
|
+
tarinfo = _prepare_tarinfo(path)
|
116
|
+
with artifact_store.open(path, "rb") as f:
|
117
|
+
tar.addfile(tarinfo, fileobj=f)
|
118
|
+
else:
|
119
|
+
tarinfo = _prepare_tarinfo(artifact.uri)
|
120
|
+
with artifact_store.open(artifact.uri, "rb") as f:
|
121
|
+
tar.addfile(tarinfo, fileobj=f)
|
122
|
+
|
123
|
+
return temp_file.name
|
zenml/zen_server/jwt.py
CHANGED
@@ -54,7 +54,6 @@ class JWTToken(BaseModel):
|
|
54
54
|
api_key_id: Optional[UUID] = None
|
55
55
|
schedule_id: Optional[UUID] = None
|
56
56
|
pipeline_run_id: Optional[UUID] = None
|
57
|
-
step_run_id: Optional[UUID] = None
|
58
57
|
session_id: Optional[UUID] = None
|
59
58
|
claims: Dict[str, Any] = {}
|
60
59
|
|
@@ -148,16 +147,6 @@ class JWTToken(BaseModel):
|
|
148
147
|
"UUID"
|
149
148
|
)
|
150
149
|
|
151
|
-
step_run_id: Optional[UUID] = None
|
152
|
-
if "step_run_id" in claims:
|
153
|
-
try:
|
154
|
-
step_run_id = UUID(claims.pop("step_run_id"))
|
155
|
-
except ValueError:
|
156
|
-
raise CredentialsNotValid(
|
157
|
-
"Invalid JWT token: the step_run_id claim is not a valid "
|
158
|
-
"UUID"
|
159
|
-
)
|
160
|
-
|
161
150
|
session_id: Optional[UUID] = None
|
162
151
|
if "session_id" in claims:
|
163
152
|
try:
|
@@ -174,7 +163,6 @@ class JWTToken(BaseModel):
|
|
174
163
|
api_key_id=api_key_id,
|
175
164
|
schedule_id=schedule_id,
|
176
165
|
pipeline_run_id=pipeline_run_id,
|
177
|
-
step_run_id=step_run_id,
|
178
166
|
session_id=session_id,
|
179
167
|
claims=claims,
|
180
168
|
)
|
@@ -212,8 +200,6 @@ class JWTToken(BaseModel):
|
|
212
200
|
claims["schedule_id"] = str(self.schedule_id)
|
213
201
|
if self.pipeline_run_id:
|
214
202
|
claims["pipeline_run_id"] = str(self.pipeline_run_id)
|
215
|
-
if self.step_run_id:
|
216
|
-
claims["step_run_id"] = str(self.step_run_id)
|
217
203
|
if self.session_id:
|
218
204
|
claims["session_id"] = str(self.session_id)
|
219
205
|
|