zenml-nightly 0.80.2.dev20250415__py3-none-any.whl → 0.80.2.dev20250416__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zenml/VERSION +1 -1
- zenml/cli/utils.py +13 -11
- zenml/config/compiler.py +1 -0
- zenml/config/global_config.py +1 -1
- zenml/config/pipeline_configurations.py +1 -0
- zenml/config/pipeline_run_configuration.py +1 -0
- zenml/constants.py +4 -1
- zenml/integrations/gcp/orchestrators/vertex_orchestrator.py +47 -5
- zenml/integrations/gcp/vertex_custom_job_parameters.py +15 -1
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +0 -1
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator_entrypoint.py +0 -1
- zenml/integrations/kubernetes/step_operators/kubernetes_step_operator.py +0 -3
- zenml/logging/step_logging.py +41 -21
- zenml/login/credentials_store.py +31 -0
- zenml/models/v2/base/base.py +8 -4
- zenml/models/v2/base/filter.py +1 -1
- zenml/models/v2/core/pipeline_run.py +19 -0
- zenml/orchestrators/step_launcher.py +2 -3
- zenml/orchestrators/step_runner.py +2 -2
- zenml/orchestrators/utils.py +2 -5
- zenml/pipelines/pipeline_context.py +1 -0
- zenml/pipelines/pipeline_decorator.py +4 -0
- zenml/pipelines/pipeline_definition.py +83 -22
- zenml/pipelines/run_utils.py +4 -0
- zenml/steps/utils.py +1 -1
- zenml/zen_server/auth.py +44 -64
- zenml/zen_server/download_utils.py +26 -29
- zenml/zen_server/jwt.py +0 -14
- zenml/zen_server/routers/auth_endpoints.py +5 -36
- zenml/zen_server/routers/pipeline_deployments_endpoints.py +63 -26
- zenml/zen_server/routers/runs_endpoints.py +57 -0
- zenml/zen_server/template_execution/utils.py +1 -1
- zenml/zen_stores/rest_zen_store.py +16 -13
- zenml/zen_stores/schemas/pipeline_run_schemas.py +1 -0
- zenml/zen_stores/sql_zen_store.py +18 -0
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/METADATA +2 -1
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/RECORD +40 -40
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.80.2.dev20250415.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/entry_points.txt +0 -0
zenml/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.80.2.
|
1
|
+
0.80.2.dev20250416
|
zenml/cli/utils.py
CHANGED
@@ -308,13 +308,13 @@ def print_pydantic_models(
|
|
308
308
|
if isinstance(model, BaseIdentifiedResponse):
|
309
309
|
include_columns = ["id"]
|
310
310
|
|
311
|
-
if "name" in model.model_fields:
|
311
|
+
if "name" in type(model).model_fields:
|
312
312
|
include_columns.append("name")
|
313
313
|
|
314
314
|
include_columns.extend(
|
315
315
|
[
|
316
316
|
k
|
317
|
-
for k in model.get_body().model_fields.keys()
|
317
|
+
for k in type(model.get_body()).model_fields.keys()
|
318
318
|
if k not in exclude_columns
|
319
319
|
]
|
320
320
|
)
|
@@ -323,7 +323,9 @@ def print_pydantic_models(
|
|
323
323
|
include_columns.extend(
|
324
324
|
[
|
325
325
|
k
|
326
|
-
for k in
|
326
|
+
for k in type(
|
327
|
+
model.get_metadata()
|
328
|
+
).model_fields.keys()
|
327
329
|
if k not in exclude_columns
|
328
330
|
]
|
329
331
|
)
|
@@ -347,7 +349,7 @@ def print_pydantic_models(
|
|
347
349
|
# we want to attempt to represent them by name, if they contain
|
348
350
|
# such a field, else the id is used
|
349
351
|
if isinstance(value, BaseIdentifiedResponse):
|
350
|
-
if "name" in value.model_fields:
|
352
|
+
if "name" in type(value).model_fields:
|
351
353
|
items[k] = str(getattr(value, "name"))
|
352
354
|
else:
|
353
355
|
items[k] = str(value.id)
|
@@ -357,7 +359,7 @@ def print_pydantic_models(
|
|
357
359
|
elif isinstance(value, list):
|
358
360
|
for v in value:
|
359
361
|
if isinstance(v, BaseIdentifiedResponse):
|
360
|
-
if "name" in v.model_fields:
|
362
|
+
if "name" in type(v).model_fields:
|
361
363
|
items.setdefault(k, []).append(
|
362
364
|
str(getattr(v, "name"))
|
363
365
|
)
|
@@ -448,13 +450,13 @@ def print_pydantic_model(
|
|
448
450
|
if isinstance(model, BaseIdentifiedResponse):
|
449
451
|
include_columns = ["id"]
|
450
452
|
|
451
|
-
if "name" in model.model_fields:
|
453
|
+
if "name" in type(model).model_fields:
|
452
454
|
include_columns.append("name")
|
453
455
|
|
454
456
|
include_columns.extend(
|
455
457
|
[
|
456
458
|
k
|
457
|
-
for k in model.get_body().model_fields.keys()
|
459
|
+
for k in type(model.get_body()).model_fields.keys()
|
458
460
|
if k not in exclude_columns
|
459
461
|
]
|
460
462
|
)
|
@@ -463,7 +465,7 @@ def print_pydantic_model(
|
|
463
465
|
include_columns.extend(
|
464
466
|
[
|
465
467
|
k
|
466
|
-
for k in model.get_metadata().model_fields.keys()
|
468
|
+
for k in type(model.get_metadata()).model_fields.keys()
|
467
469
|
if k not in exclude_columns
|
468
470
|
]
|
469
471
|
)
|
@@ -482,7 +484,7 @@ def print_pydantic_model(
|
|
482
484
|
for k in include_columns:
|
483
485
|
value = getattr(model, k)
|
484
486
|
if isinstance(value, BaseIdentifiedResponse):
|
485
|
-
if "name" in value.model_fields:
|
487
|
+
if "name" in type(value).model_fields:
|
486
488
|
items[k] = str(getattr(value, "name"))
|
487
489
|
else:
|
488
490
|
items[k] = str(value.id)
|
@@ -492,7 +494,7 @@ def print_pydantic_model(
|
|
492
494
|
elif isinstance(value, list):
|
493
495
|
for v in value:
|
494
496
|
if isinstance(v, BaseIdentifiedResponse):
|
495
|
-
if "name" in v.model_fields:
|
497
|
+
if "name" in type(v).model_fields:
|
496
498
|
items.setdefault(k, []).append(str(getattr(v, "name")))
|
497
499
|
else:
|
498
500
|
items.setdefault(k, []).append(str(v.id))
|
@@ -2138,7 +2140,7 @@ def _scrub_secret(config: StackComponentConfig) -> Dict[str, Any]:
|
|
2138
2140
|
A configuration with secret values removed.
|
2139
2141
|
"""
|
2140
2142
|
config_dict = {}
|
2141
|
-
config_fields = config.
|
2143
|
+
config_fields = type(config).model_fields
|
2142
2144
|
for key, value in config_fields.items():
|
2143
2145
|
if getattr(config, key):
|
2144
2146
|
if secret_utils.is_secret_field(value):
|
zenml/config/compiler.py
CHANGED
@@ -210,6 +210,7 @@ class Compiler:
|
|
210
210
|
enable_artifact_metadata=config.enable_artifact_metadata,
|
211
211
|
enable_artifact_visualization=config.enable_artifact_visualization,
|
212
212
|
enable_step_logs=config.enable_step_logs,
|
213
|
+
enable_pipeline_logs=config.enable_pipeline_logs,
|
213
214
|
settings=config.settings,
|
214
215
|
tags=config.tags,
|
215
216
|
extra=config.extra,
|
zenml/config/global_config.py
CHANGED
@@ -447,7 +447,7 @@ class GlobalConfiguration(BaseModel, metaclass=GlobalConfigMetaClass):
|
|
447
447
|
"""
|
448
448
|
environment_vars = {}
|
449
449
|
|
450
|
-
for key in self.model_fields.keys():
|
450
|
+
for key in type(self).model_fields.keys():
|
451
451
|
if key == "store":
|
452
452
|
# The store configuration uses its own environment variable
|
453
453
|
# naming scheme
|
@@ -41,6 +41,7 @@ class PipelineConfigurationUpdate(StrictBaseModel):
|
|
41
41
|
enable_artifact_metadata: Optional[bool] = None
|
42
42
|
enable_artifact_visualization: Optional[bool] = None
|
43
43
|
enable_step_logs: Optional[bool] = None
|
44
|
+
enable_pipeline_logs: Optional[bool] = None
|
44
45
|
settings: Dict[str, SerializeAsAny[BaseSettings]] = {}
|
45
46
|
tags: Optional[List[Union[str, "Tag"]]] = None
|
46
47
|
extra: Dict[str, Any] = {}
|
@@ -40,6 +40,7 @@ class PipelineRunConfiguration(
|
|
40
40
|
enable_artifact_metadata: Optional[bool] = None
|
41
41
|
enable_artifact_visualization: Optional[bool] = None
|
42
42
|
enable_step_logs: Optional[bool] = None
|
43
|
+
enable_pipeline_logs: Optional[bool] = None
|
43
44
|
schedule: Optional[Schedule] = None
|
44
45
|
build: Union[PipelineBuildBase, UUID, None] = Field(
|
45
46
|
default=None, union_mode="left_to_right"
|
zenml/constants.py
CHANGED
@@ -168,6 +168,7 @@ ENV_ZENML_SKIP_STACK_VALIDATION = "ZENML_SKIP_STACK_VALIDATION"
|
|
168
168
|
ENV_ZENML_SERVER = "ZENML_SERVER"
|
169
169
|
ENV_ZENML_ENFORCE_TYPE_ANNOTATIONS = "ZENML_ENFORCE_TYPE_ANNOTATIONS"
|
170
170
|
ENV_ZENML_ENABLE_IMPLICIT_AUTH_METHODS = "ZENML_ENABLE_IMPLICIT_AUTH_METHODS"
|
171
|
+
ENV_ZENML_DISABLE_PIPELINE_LOGS_STORAGE = "ZENML_DISABLE_PIPELINE_LOGS_STORAGE"
|
171
172
|
ENV_ZENML_DISABLE_STEP_LOGS_STORAGE = "ZENML_DISABLE_STEP_LOGS_STORAGE"
|
172
173
|
ENV_ZENML_DISABLE_STEP_NAMES_IN_LOGS = "ZENML_DISABLE_STEP_NAMES_IN_LOGS"
|
173
174
|
ENV_ZENML_IGNORE_FAILURE_HOOK = "ZENML_IGNORE_FAILURE_HOOK"
|
@@ -199,7 +200,9 @@ ENV_ZENML_RUN_SINGLE_STEPS_WITHOUT_STACK = (
|
|
199
200
|
ENV_ZENML_PREVENT_CLIENT_SIDE_CACHING = "ZENML_PREVENT_CLIENT_SIDE_CACHING"
|
200
201
|
ENV_ZENML_DISABLE_CREDENTIALS_DISK_CACHING = "DISABLE_CREDENTIALS_DISK_CACHING"
|
201
202
|
ENV_ZENML_RUNNER_IMAGE_DISABLE_UV = "ZENML_RUNNER_IMAGE_DISABLE_UV"
|
202
|
-
|
203
|
+
ENV_ZENML_WORKLOAD_TOKEN_EXPIRATION_LEEWAY = (
|
204
|
+
"ZENML_WORKLOAD_TOKEN_EXPIRATION_LEEWAY"
|
205
|
+
)
|
203
206
|
# Logging variables
|
204
207
|
IS_DEBUG_ENV: bool = handle_bool_env_var(ENV_ZENML_DEBUG, default=False)
|
205
208
|
|
@@ -341,13 +341,55 @@ class VertexOrchestrator(ContainerizedOrchestrator, GoogleCredentialsMixin):
|
|
341
341
|
self.config.workload_service_account
|
342
342
|
)
|
343
343
|
|
344
|
+
# Create a dictionary of explicit parameters
|
345
|
+
params = custom_job_parameters.model_dump(
|
346
|
+
exclude_none=True, exclude={"additional_training_job_args"}
|
347
|
+
)
|
348
|
+
|
349
|
+
# Remove None values to let defaults be set by the function
|
350
|
+
params = {k: v for k, v in params.items() if v is not None}
|
351
|
+
|
352
|
+
# Add environment variables
|
353
|
+
params["env"] = [
|
354
|
+
{"name": key, "value": value} for key, value in environment.items()
|
355
|
+
]
|
356
|
+
|
357
|
+
# Check if any advanced parameters will override explicit parameters
|
358
|
+
if custom_job_parameters.additional_training_job_args:
|
359
|
+
overridden_params = set(params.keys()) & set(
|
360
|
+
custom_job_parameters.additional_training_job_args.keys()
|
361
|
+
)
|
362
|
+
if overridden_params:
|
363
|
+
logger.warning(
|
364
|
+
f"The following explicit parameters are being overridden by values in "
|
365
|
+
f"additional_training_job_args: {', '.join(overridden_params)}. "
|
366
|
+
f"This may lead to unexpected behavior. Consider using either explicit "
|
367
|
+
f"parameters or additional_training_job_args, but not both for the same parameters."
|
368
|
+
)
|
369
|
+
|
370
|
+
# Add any advanced parameters - these will override explicit parameters if provided
|
371
|
+
params.update(custom_job_parameters.additional_training_job_args)
|
372
|
+
|
373
|
+
# Add other parameters from orchestrator config if not already in params
|
374
|
+
if self.config.network and "network" not in params:
|
375
|
+
params["network"] = self.config.network
|
376
|
+
|
377
|
+
if (
|
378
|
+
self.config.encryption_spec_key_name
|
379
|
+
and "encryption_spec_key_name" not in params
|
380
|
+
):
|
381
|
+
params["encryption_spec_key_name"] = (
|
382
|
+
self.config.encryption_spec_key_name
|
383
|
+
)
|
384
|
+
if (
|
385
|
+
self.config.workload_service_account
|
386
|
+
and "service_account" not in params
|
387
|
+
):
|
388
|
+
params["service_account"] = self.config.workload_service_account
|
389
|
+
|
344
390
|
custom_job_component = create_custom_training_job_from_component(
|
345
391
|
component_spec=component,
|
346
|
-
|
347
|
-
{"name": key, "value": value}
|
348
|
-
for key, value in environment.items()
|
349
|
-
],
|
350
|
-
**custom_job_parameters.model_dump(),
|
392
|
+
**params,
|
351
393
|
)
|
352
394
|
|
353
395
|
return custom_job_component
|
@@ -13,7 +13,7 @@
|
|
13
13
|
# permissions and limitations under the License.
|
14
14
|
"""Vertex custom job parameter model."""
|
15
15
|
|
16
|
-
from typing import Optional
|
16
|
+
from typing import Any, Dict, Optional
|
17
17
|
|
18
18
|
from pydantic import BaseModel
|
19
19
|
|
@@ -37,8 +37,21 @@ class VertexCustomJobParameters(BaseModel):
|
|
37
37
|
boot_disk_type: Type of the boot disk. (Default: pd-ssd)
|
38
38
|
https://cloud.google.com/vertex-ai/docs/training/configure-compute#boot_disk_options
|
39
39
|
persistent_resource_id: The ID of the persistent resource to use for the job.
|
40
|
+
If empty (default), the job will not use a persistent resource.
|
41
|
+
When using a persistent resource, you must also specify a service_account.
|
42
|
+
Conversely, when explicitly setting this to an empty string, you
|
43
|
+
should not specify a service_account (ZenML will handle this automatically).
|
40
44
|
https://cloud.google.com/vertex-ai/docs/training/persistent-resource-overview
|
41
45
|
service_account: Specifies the service account to be used.
|
46
|
+
This is required when using a persistent_resource_id, and
|
47
|
+
should not be set when persistent_resource_id="".
|
48
|
+
additional_training_job_args: Additional arguments to pass to the create_custom_training_job_from_component
|
49
|
+
function. This allows passing any additional parameters supported by the Google
|
50
|
+
Cloud Pipeline Components library without requiring ZenML to update its API.
|
51
|
+
Note: If you specify parameters in this dictionary that are also defined as explicit
|
52
|
+
attributes (like machine_type or boot_disk_size_gb), the values in this dictionary
|
53
|
+
will override the explicit values.
|
54
|
+
See: https://google-cloud-pipeline-components.readthedocs.io/en/google-cloud-pipeline-components-2.19.0/api/v1/custom_job.html
|
42
55
|
"""
|
43
56
|
|
44
57
|
accelerator_type: Optional[str] = None
|
@@ -48,3 +61,4 @@ class VertexCustomJobParameters(BaseModel):
|
|
48
61
|
boot_disk_type: str = "pd-ssd"
|
49
62
|
persistent_resource_id: Optional[str] = None
|
50
63
|
service_account: Optional[str] = None
|
64
|
+
additional_training_job_args: Dict[str, Any] = {}
|
@@ -543,7 +543,6 @@ class KubernetesOrchestrator(ContainerizedOrchestrator):
|
|
543
543
|
mount_local_stores=self.config.is_local,
|
544
544
|
)
|
545
545
|
|
546
|
-
logger.info("Waiting for Kubernetes orchestrator pod to start...")
|
547
546
|
kube_utils.create_and_wait_for_pod_to_start(
|
548
547
|
core_api=self._k8s_core_api,
|
549
548
|
pod_display_name="Kubernetes orchestrator pod",
|
@@ -173,7 +173,6 @@ def main() -> None:
|
|
173
173
|
mount_local_stores=mount_local_stores,
|
174
174
|
)
|
175
175
|
|
176
|
-
logger.info(f"Waiting for pod of step `{step_name}` to start...")
|
177
176
|
kube_utils.create_and_wait_for_pod_to_start(
|
178
177
|
core_api=core_api,
|
179
178
|
pod_display_name=f"pod for step `{step_name}`",
|
@@ -218,9 +218,6 @@ class KubernetesStepOperator(BaseStepOperator):
|
|
218
218
|
mount_local_stores=False,
|
219
219
|
)
|
220
220
|
|
221
|
-
logger.info(
|
222
|
-
"Waiting for pod of step `%s` to start...", info.pipeline_step_name
|
223
|
-
)
|
224
221
|
kube_utils.create_and_wait_for_pod_to_start(
|
225
222
|
core_api=self._k8s_core_api,
|
226
223
|
pod_display_name=f"pod of step `{info.pipeline_step_name}`",
|
zenml/logging/step_logging.py
CHANGED
@@ -13,6 +13,7 @@
|
|
13
13
|
# permissions and limitations under the License.
|
14
14
|
"""ZenML logging handler."""
|
15
15
|
|
16
|
+
import logging
|
16
17
|
import os
|
17
18
|
import re
|
18
19
|
import sys
|
@@ -48,6 +49,7 @@ logger = get_logger(__name__)
|
|
48
49
|
redirected: ContextVar[bool] = ContextVar("redirected", default=False)
|
49
50
|
|
50
51
|
LOGS_EXTENSION = ".log"
|
52
|
+
PIPELINE_RUN_LOGS_FOLDER = "pipeline_runs"
|
51
53
|
|
52
54
|
|
53
55
|
def remove_ansi_escape_codes(text: str) -> str:
|
@@ -65,14 +67,14 @@ def remove_ansi_escape_codes(text: str) -> str:
|
|
65
67
|
|
66
68
|
def prepare_logs_uri(
|
67
69
|
artifact_store: "BaseArtifactStore",
|
68
|
-
step_name: str,
|
70
|
+
step_name: Optional[str] = None,
|
69
71
|
log_key: Optional[str] = None,
|
70
72
|
) -> str:
|
71
73
|
"""Generates and prepares a URI for the log file or folder for a step.
|
72
74
|
|
73
75
|
Args:
|
74
76
|
artifact_store: The artifact store on which the artifact will be stored.
|
75
|
-
step_name: Name of the step.
|
77
|
+
step_name: Name of the step. Skipped for global pipeline run logs.
|
76
78
|
log_key: The unique identification key of the log file.
|
77
79
|
|
78
80
|
Returns:
|
@@ -81,11 +83,8 @@ def prepare_logs_uri(
|
|
81
83
|
if log_key is None:
|
82
84
|
log_key = str(uuid4())
|
83
85
|
|
84
|
-
|
85
|
-
|
86
|
-
step_name,
|
87
|
-
"logs",
|
88
|
-
)
|
86
|
+
subfolder = step_name or PIPELINE_RUN_LOGS_FOLDER
|
87
|
+
logs_base_uri = os.path.join(artifact_store.path, subfolder, "logs")
|
89
88
|
|
90
89
|
# Create the dir
|
91
90
|
if not artifact_store.exists(logs_base_uri):
|
@@ -210,7 +209,7 @@ def fetch_logs(
|
|
210
209
|
artifact_store.cleanup()
|
211
210
|
|
212
211
|
|
213
|
-
class
|
212
|
+
class PipelineLogsStorage:
|
214
213
|
"""Helper class which buffers and stores logs to a given URI."""
|
215
214
|
|
216
215
|
def __init__(
|
@@ -324,6 +323,18 @@ class StepLogsStorage:
|
|
324
323
|
self.disabled = True
|
325
324
|
|
326
325
|
try:
|
326
|
+
# The configured logging handler uses a lock to ensure that
|
327
|
+
# logs generated by different threads are not interleaved.
|
328
|
+
# Given that most artifact stores are based on fsspec, which
|
329
|
+
# use a separate thread for async operations, it may happen that
|
330
|
+
# the fsspec library itself will log something, which will end
|
331
|
+
# up in a deadlock.
|
332
|
+
# To avoid this, we temporarily disable the lock in the logging
|
333
|
+
# handler while writing to the file.
|
334
|
+
logging_handler = logging.getLogger().handlers[0]
|
335
|
+
logging_lock = logging_handler.lock
|
336
|
+
logging_handler.lock = None
|
337
|
+
|
327
338
|
if self.buffer:
|
328
339
|
if self.artifact_store.config.IS_IMMUTABLE_FILESYSTEM:
|
329
340
|
_logs_uri = self._get_timestamped_filename()
|
@@ -353,6 +364,9 @@ class StepLogsStorage:
|
|
353
364
|
# I/O errors.
|
354
365
|
logger.error(f"Error while trying to write logs: {e}")
|
355
366
|
finally:
|
367
|
+
# Restore the original logging handler lock
|
368
|
+
logging_handler.lock = logging_lock
|
369
|
+
|
356
370
|
self.buffer = []
|
357
371
|
self.last_save_time = time.time()
|
358
372
|
|
@@ -418,27 +432,32 @@ class StepLogsStorage:
|
|
418
432
|
)
|
419
433
|
|
420
434
|
|
421
|
-
class
|
422
|
-
"""Context manager which patches stdout and stderr during
|
435
|
+
class PipelineLogsStorageContext:
|
436
|
+
"""Context manager which patches stdout and stderr during pipeline run execution."""
|
423
437
|
|
424
438
|
def __init__(
|
425
|
-
self,
|
439
|
+
self,
|
440
|
+
logs_uri: str,
|
441
|
+
artifact_store: "BaseArtifactStore",
|
442
|
+
prepend_step_name: bool = True,
|
426
443
|
) -> None:
|
427
444
|
"""Initializes and prepares a storage object.
|
428
445
|
|
429
446
|
Args:
|
430
447
|
logs_uri: the URI of the logs file.
|
431
|
-
artifact_store: Artifact Store from the current
|
448
|
+
artifact_store: Artifact Store from the current pipeline run context.
|
449
|
+
prepend_step_name: Whether to prepend the step name to the logs.
|
432
450
|
"""
|
433
|
-
self.storage =
|
451
|
+
self.storage = PipelineLogsStorage(
|
434
452
|
logs_uri=logs_uri, artifact_store=artifact_store
|
435
453
|
)
|
454
|
+
self.prepend_step_name = prepend_step_name
|
436
455
|
|
437
|
-
def __enter__(self) -> "
|
456
|
+
def __enter__(self) -> "PipelineLogsStorageContext":
|
438
457
|
"""Enter condition of the context manager.
|
439
458
|
|
440
459
|
Wraps the `write` method of both stderr and stdout, so each incoming
|
441
|
-
message gets stored in the
|
460
|
+
message gets stored in the pipeline logs storage.
|
442
461
|
|
443
462
|
Returns:
|
444
463
|
self
|
@@ -499,14 +518,17 @@ class StepLogsStorageContext:
|
|
499
518
|
"""
|
500
519
|
|
501
520
|
def wrapped_write(*args: Any, **kwargs: Any) -> Any:
|
502
|
-
|
503
|
-
|
504
|
-
|
521
|
+
step_names_disabled = (
|
522
|
+
handle_bool_env_var(
|
523
|
+
ENV_ZENML_DISABLE_STEP_NAMES_IN_LOGS, default=False
|
524
|
+
)
|
525
|
+
or not self.prepend_step_name
|
505
526
|
)
|
506
527
|
|
507
528
|
if step_names_disabled:
|
508
529
|
output = method(*args, **kwargs)
|
509
530
|
else:
|
531
|
+
message = args[0]
|
510
532
|
# Try to get step context if not available yet
|
511
533
|
step_context = None
|
512
534
|
try:
|
@@ -515,9 +537,7 @@ class StepLogsStorageContext:
|
|
515
537
|
pass
|
516
538
|
|
517
539
|
if step_context and args[0] != "\n":
|
518
|
-
message = f"[{step_context.step_name}] " +
|
519
|
-
else:
|
520
|
-
message = args[0]
|
540
|
+
message = f"[{step_context.step_name}] " + message
|
521
541
|
|
522
542
|
output = method(message, *args[1:], **kwargs)
|
523
543
|
|
zenml/login/credentials_store.py
CHANGED
@@ -25,6 +25,7 @@ from zenml.constants import (
|
|
25
25
|
from zenml.io import fileio
|
26
26
|
from zenml.logger import get_logger
|
27
27
|
from zenml.login.credentials import APIToken, ServerCredentials, ServerType
|
28
|
+
from zenml.login.pro.constants import ZENML_PRO_API_URL
|
28
29
|
from zenml.login.pro.workspace.models import WorkspaceRead
|
29
30
|
from zenml.models import OAuthTokenResponse, ServerModel
|
30
31
|
from zenml.utils import yaml_utils
|
@@ -396,6 +397,36 @@ class CredentialsStore(metaclass=SingletonMetaClass):
|
|
396
397
|
"""
|
397
398
|
return self.get_pro_token(pro_api_url) is not None
|
398
399
|
|
400
|
+
def can_login(self, server_url: str) -> bool:
|
401
|
+
"""Check if credentials to login to the given server exist.
|
402
|
+
|
403
|
+
Args:
|
404
|
+
server_url: The server URL for which to check the authentication.
|
405
|
+
|
406
|
+
Returns:
|
407
|
+
True if the credentials store contains credentials that can be used
|
408
|
+
to login to the given server URL, False otherwise.
|
409
|
+
"""
|
410
|
+
self.check_and_reload_from_file()
|
411
|
+
credentials = self.get_credentials(server_url)
|
412
|
+
if not credentials:
|
413
|
+
return False
|
414
|
+
|
415
|
+
if credentials.api_key is not None:
|
416
|
+
return True
|
417
|
+
elif (
|
418
|
+
credentials.username is not None
|
419
|
+
and credentials.password is not None
|
420
|
+
):
|
421
|
+
return True
|
422
|
+
elif credentials.type == ServerType.PRO:
|
423
|
+
pro_api_url = credentials.pro_api_url or ZENML_PRO_API_URL
|
424
|
+
pro_token = self.get_pro_token(pro_api_url, allow_expired=False)
|
425
|
+
if pro_token:
|
426
|
+
return True
|
427
|
+
|
428
|
+
return False
|
429
|
+
|
399
430
|
def set_api_key(
|
400
431
|
self,
|
401
432
|
server_url: str,
|
zenml/models/v2/base/base.py
CHANGED
@@ -134,7 +134,7 @@ class BaseResponse(BaseZenModel, Generic[AnyBody, AnyMetadata, AnyResources]):
|
|
134
134
|
)
|
135
135
|
|
136
136
|
# Check if the name has changed
|
137
|
-
if "name" in self.model_fields:
|
137
|
+
if "name" in type(self).model_fields:
|
138
138
|
original_name = getattr(self, "name")
|
139
139
|
hydrated_name = getattr(hydrated_model, "name")
|
140
140
|
|
@@ -172,7 +172,7 @@ class BaseResponse(BaseZenModel, Generic[AnyBody, AnyMetadata, AnyResources]):
|
|
172
172
|
)
|
173
173
|
|
174
174
|
# Check all the fields in the body
|
175
|
-
for field in self.get_body().model_fields:
|
175
|
+
for field in type(self.get_body()).model_fields:
|
176
176
|
original_value = getattr(self.get_body(), field)
|
177
177
|
hydrated_value = getattr(hydrated_model.get_body(), field)
|
178
178
|
|
@@ -255,7 +255,9 @@ class BaseResponse(BaseZenModel, Generic[AnyBody, AnyMetadata, AnyResources]):
|
|
255
255
|
"""
|
256
256
|
if self.metadata is None:
|
257
257
|
# If the metadata is not there, check the class first.
|
258
|
-
metadata_annotation =
|
258
|
+
metadata_annotation = (
|
259
|
+
type(self).model_fields["metadata"].annotation
|
260
|
+
)
|
259
261
|
assert metadata_annotation is not None, (
|
260
262
|
"For each response model, an annotated metadata"
|
261
263
|
"field should exist."
|
@@ -293,7 +295,9 @@ class BaseResponse(BaseZenModel, Generic[AnyBody, AnyMetadata, AnyResources]):
|
|
293
295
|
"""
|
294
296
|
if self.resources is None:
|
295
297
|
# If the resources are not there, check the class first.
|
296
|
-
resources_annotation =
|
298
|
+
resources_annotation = (
|
299
|
+
type(self).model_fields["resources"].annotation
|
300
|
+
)
|
297
301
|
assert resources_annotation is not None, (
|
298
302
|
"For each response model, an annotated resources"
|
299
303
|
"field should exist."
|
zenml/models/v2/base/filter.py
CHANGED
@@ -665,7 +665,7 @@ class BaseFilter(BaseModel):
|
|
665
665
|
A list of Filter models.
|
666
666
|
"""
|
667
667
|
return self._generate_filter_list(
|
668
|
-
{key: getattr(self, key) for key in self.model_fields}
|
668
|
+
{key: getattr(self, key) for key in type(self).model_fields}
|
669
669
|
)
|
670
670
|
|
671
671
|
@property
|
@@ -45,6 +45,7 @@ from zenml.models.v2.base.scoped import (
|
|
45
45
|
RunMetadataFilterMixin,
|
46
46
|
TaggableFilter,
|
47
47
|
)
|
48
|
+
from zenml.models.v2.core.logs import LogsRequest
|
48
49
|
from zenml.models.v2.core.model_version import ModelVersionResponse
|
49
50
|
from zenml.models.v2.core.tag import TagResponse
|
50
51
|
from zenml.utils.tag_utils import Tag
|
@@ -55,6 +56,7 @@ if TYPE_CHECKING:
|
|
55
56
|
from zenml.models import TriggerExecutionResponse
|
56
57
|
from zenml.models.v2.core.artifact_version import ArtifactVersionResponse
|
57
58
|
from zenml.models.v2.core.code_reference import CodeReferenceResponse
|
59
|
+
from zenml.models.v2.core.logs import LogsResponse
|
58
60
|
from zenml.models.v2.core.pipeline import PipelineResponse
|
59
61
|
from zenml.models.v2.core.pipeline_build import (
|
60
62
|
PipelineBuildResponse,
|
@@ -124,6 +126,10 @@ class PipelineRunRequest(ProjectScopedRequest):
|
|
124
126
|
default=None,
|
125
127
|
title="Tags of the pipeline run.",
|
126
128
|
)
|
129
|
+
logs: Optional[LogsRequest] = Field(
|
130
|
+
default=None,
|
131
|
+
title="Logs of the pipeline run.",
|
132
|
+
)
|
127
133
|
|
128
134
|
model_config = ConfigDict(protected_namespaces=())
|
129
135
|
|
@@ -252,6 +258,10 @@ class PipelineRunResponseResources(ProjectScopedResponseResources):
|
|
252
258
|
tags: List[TagResponse] = Field(
|
253
259
|
title="Tags associated with the pipeline run.",
|
254
260
|
)
|
261
|
+
logs: Optional["LogsResponse"] = Field(
|
262
|
+
title="Logs associated with this pipeline run.",
|
263
|
+
default=None,
|
264
|
+
)
|
255
265
|
|
256
266
|
# TODO: In Pydantic v2, the `model_` is a protected namespaces for all
|
257
267
|
# fields defined under base models. If not handled, this raises a warning.
|
@@ -579,6 +589,15 @@ class PipelineRunResponse(
|
|
579
589
|
"""
|
580
590
|
return self.get_resources().tags
|
581
591
|
|
592
|
+
@property
|
593
|
+
def logs(self) -> Optional["LogsResponse"]:
|
594
|
+
"""The `logs` property.
|
595
|
+
|
596
|
+
Returns:
|
597
|
+
the value of the property.
|
598
|
+
"""
|
599
|
+
return self.get_resources().logs
|
600
|
+
|
582
601
|
|
583
602
|
# ------------------ Filter Model ------------------
|
584
603
|
|
@@ -158,7 +158,7 @@ class StepLauncher:
|
|
158
158
|
step_name=self._step_name,
|
159
159
|
)
|
160
160
|
|
161
|
-
logs_context = step_logging.
|
161
|
+
logs_context = step_logging.PipelineLogsStorageContext(
|
162
162
|
logs_uri=logs_uri, artifact_store=self._stack.artifact_store
|
163
163
|
) # type: ignore[assignment]
|
164
164
|
|
@@ -240,7 +240,7 @@ class StepLauncher:
|
|
240
240
|
# the external jobs in step operators
|
241
241
|
if isinstance(
|
242
242
|
logs_context,
|
243
|
-
step_logging.
|
243
|
+
step_logging.PipelineLogsStorageContext,
|
244
244
|
):
|
245
245
|
force_write_logs = partial(
|
246
246
|
logs_context.storage.save_to_file,
|
@@ -421,7 +421,6 @@ class StepLauncher:
|
|
421
421
|
)
|
422
422
|
environment = orchestrator_utils.get_config_environment_vars(
|
423
423
|
pipeline_run_id=step_run_info.run_id,
|
424
|
-
step_run_id=step_run_info.step_run_id,
|
425
424
|
)
|
426
425
|
if last_retry:
|
427
426
|
environment[ENV_ZENML_IGNORE_FAILURE_HOOK] = str(False)
|
@@ -40,7 +40,7 @@ from zenml.constants import (
|
|
40
40
|
from zenml.enums import ArtifactSaveType
|
41
41
|
from zenml.exceptions import StepInterfaceError
|
42
42
|
from zenml.logger import get_logger
|
43
|
-
from zenml.logging.step_logging import
|
43
|
+
from zenml.logging.step_logging import PipelineLogsStorageContext, redirected
|
44
44
|
from zenml.materializers.base_materializer import BaseMaterializer
|
45
45
|
from zenml.models.v2.core.step_run import StepRunInputResponse
|
46
46
|
from zenml.orchestrators.publish_utils import (
|
@@ -136,7 +136,7 @@ class StepRunner:
|
|
136
136
|
logs_context = nullcontext()
|
137
137
|
if step_logging_enabled and not redirected.get():
|
138
138
|
if step_run.logs:
|
139
|
-
logs_context =
|
139
|
+
logs_context = PipelineLogsStorageContext( # type: ignore[assignment]
|
140
140
|
logs_uri=step_run.logs.uri,
|
141
141
|
artifact_store=self._stack.artifact_store,
|
142
142
|
)
|