zenml-nightly 0.83.1.dev20250710__py3-none-any.whl → 0.83.1.dev20250711__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. zenml/VERSION +1 -1
  2. zenml/artifact_stores/base_artifact_store.py +51 -23
  3. zenml/artifacts/utils.py +3 -1
  4. zenml/cli/pipeline.py +13 -2
  5. zenml/constants.py +3 -0
  6. zenml/container_registries/base_container_registry.py +17 -5
  7. zenml/integrations/aws/flavors/sagemaker_orchestrator_flavor.py +150 -117
  8. zenml/integrations/aws/flavors/sagemaker_step_operator_flavor.py +43 -42
  9. zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +16 -7
  10. zenml/integrations/azure/orchestrators/azureml_orchestrator.py +18 -12
  11. zenml/integrations/bentoml/flavors/bentoml_model_deployer_flavor.py +7 -1
  12. zenml/integrations/databricks/flavors/databricks_orchestrator_flavor.py +58 -23
  13. zenml/integrations/feast/flavors/feast_feature_store_flavor.py +18 -5
  14. zenml/integrations/gcp/flavors/vertex_experiment_tracker_flavor.py +10 -42
  15. zenml/integrations/gcp/flavors/vertex_orchestrator_flavor.py +99 -92
  16. zenml/integrations/gcp/google_credentials_mixin.py +13 -8
  17. zenml/integrations/gcp/orchestrators/vertex_orchestrator.py +18 -9
  18. zenml/integrations/huggingface/__init__.py +1 -1
  19. zenml/integrations/hyperai/flavors/hyperai_orchestrator_flavor.py +28 -30
  20. zenml/integrations/kaniko/flavors/kaniko_image_builder_flavor.py +56 -40
  21. zenml/integrations/kubeflow/flavors/kubeflow_orchestrator_flavor.py +59 -48
  22. zenml/integrations/kubernetes/flavors/kubernetes_orchestrator_flavor.py +159 -121
  23. zenml/integrations/kubernetes/flavors/kubernetes_step_operator_flavor.py +48 -33
  24. zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +182 -1
  25. zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator_entrypoint.py +7 -3
  26. zenml/integrations/lightning/flavors/lightning_orchestrator_flavor.py +41 -25
  27. zenml/integrations/mlflow/flavors/mlflow_experiment_tracker_flavor.py +51 -44
  28. zenml/integrations/mlflow/flavors/mlflow_model_deployer_flavor.py +9 -4
  29. zenml/integrations/neptune/flavors/neptune_experiment_tracker_flavor.py +13 -12
  30. zenml/integrations/s3/flavors/s3_artifact_store_flavor.py +32 -7
  31. zenml/integrations/vllm/flavors/vllm_model_deployer_flavor.py +7 -1
  32. zenml/integrations/wandb/flavors/wandb_experiment_tracker_flavor.py +34 -25
  33. zenml/integrations/whylogs/flavors/whylogs_data_validator_flavor.py +14 -11
  34. zenml/logging/step_logging.py +8 -7
  35. zenml/models/v2/core/pipeline_run.py +0 -59
  36. zenml/orchestrators/base_orchestrator.py +7 -1
  37. zenml/pipelines/build_utils.py +2 -1
  38. zenml/stack/authentication_mixin.py +6 -5
  39. zenml/stack/flavor.py +5 -1
  40. zenml/utils/code_utils.py +2 -1
  41. zenml/utils/docker_utils.py +22 -0
  42. zenml/utils/io_utils.py +18 -0
  43. zenml/utils/pipeline_docker_image_builder.py +4 -1
  44. zenml/utils/run_utils.py +101 -8
  45. zenml/zen_server/deploy/daemon/daemon_zen_server.py +4 -0
  46. zenml/zen_server/deploy/docker/docker_zen_server.py +2 -0
  47. zenml/zen_server/routers/runs_endpoints.py +20 -28
  48. {zenml_nightly-0.83.1.dev20250710.dist-info → zenml_nightly-0.83.1.dev20250711.dist-info}/METADATA +9 -22
  49. {zenml_nightly-0.83.1.dev20250710.dist-info → zenml_nightly-0.83.1.dev20250711.dist-info}/RECORD +52 -52
  50. {zenml_nightly-0.83.1.dev20250710.dist-info → zenml_nightly-0.83.1.dev20250711.dist-info}/LICENSE +0 -0
  51. {zenml_nightly-0.83.1.dev20250710.dist-info → zenml_nightly-0.83.1.dev20250711.dist-info}/WHEEL +0 -0
  52. {zenml_nightly-0.83.1.dev20250710.dist-info → zenml_nightly-0.83.1.dev20250711.dist-info}/entry_points.txt +0 -0
zenml/VERSION CHANGED
@@ -1 +1 @@
1
- 0.83.1.dev20250710
1
+ 0.83.1.dev20250711
@@ -33,11 +33,15 @@ from typing import (
33
33
  cast,
34
34
  )
35
35
 
36
- from pydantic import model_validator
36
+ from pydantic import Field, model_validator
37
37
 
38
- from zenml.constants import ENV_ZENML_SERVER
38
+ from zenml.constants import (
39
+ ENV_ZENML_SERVER,
40
+ ENV_ZENML_SERVER_ALLOW_LOCAL_FILE_ACCESS,
41
+ handle_bool_env_var,
42
+ )
39
43
  from zenml.enums import StackComponentType
40
- from zenml.exceptions import ArtifactStoreInterfaceError
44
+ from zenml.exceptions import ArtifactStoreInterfaceError, IllegalOperationError
41
45
  from zenml.io import fileio
42
46
  from zenml.logger import get_logger
43
47
  from zenml.stack import Flavor, StackComponent, StackComponentConfig
@@ -73,6 +77,12 @@ class _sanitize_paths:
73
77
  """
74
78
  self.func = func
75
79
  self.fixed_root_path = fixed_root_path
80
+ if ENV_ZENML_SERVER in os.environ:
81
+ self.allow_local_file_access = handle_bool_env_var(
82
+ ENV_ZENML_SERVER_ALLOW_LOCAL_FILE_ACCESS, False
83
+ )
84
+ else:
85
+ self.allow_local_file_access = True
76
86
 
77
87
  self.path_args: List[int] = []
78
88
  self.path_kwargs: List[str] = []
@@ -93,7 +103,15 @@ class _sanitize_paths:
93
103
  Raises:
94
104
  FileNotFoundError: If the path is outside of the artifact store
95
105
  bounds.
106
+ IllegalOperationError: If the path is a local file and the server
107
+ is not configured to allow local file access.
96
108
  """
109
+ if not self.allow_local_file_access and not io_utils.is_remote(path):
110
+ raise IllegalOperationError(
111
+ "Files in a local artifact store cannot be accessed from the "
112
+ "server."
113
+ )
114
+
97
115
  if not path.startswith(self.fixed_root_path):
98
116
  raise FileNotFoundError(
99
117
  f"File `{path}` is outside of "
@@ -169,9 +187,18 @@ class _sanitize_paths:
169
187
 
170
188
 
171
189
  class BaseArtifactStoreConfig(StackComponentConfig):
172
- """Config class for `BaseArtifactStore`."""
190
+ """Config class for `BaseArtifactStore`.
173
191
 
174
- path: str
192
+ Base configuration for artifact storage backends.
193
+ Field descriptions are defined inline using Field() descriptors.
194
+ """
195
+
196
+ path: str = Field(
197
+ description="Root path for artifact storage. Must be a valid URI supported by the "
198
+ "specific artifact store implementation. Examples: 's3://my-bucket/artifacts', "
199
+ "'/local/storage/path', 'gs://bucket-name/zenml-artifacts', 'azure://container/path'. "
200
+ "Path must be accessible with the configured credentials and permissions"
201
+ )
175
202
 
176
203
  SUPPORTED_SCHEMES: ClassVar[Set[str]]
177
204
  IS_IMMUTABLE_FILESYSTEM: ClassVar[bool] = False
@@ -435,40 +462,41 @@ class BaseArtifactStore(StackComponent):
435
462
  **kwargs: The keyword arguments to pass to the Pydantic object.
436
463
  """
437
464
  super(BaseArtifactStore, self).__init__(*args, **kwargs)
465
+ self._add_path_sanitization()
438
466
 
439
467
  # If running in a ZenML server environment, we don't register
440
468
  # the filesystems. We always use the artifact stores directly.
441
469
  if ENV_ZENML_SERVER not in os.environ:
442
470
  self._register()
443
471
 
472
+ def _add_path_sanitization(self) -> None:
473
+ """Add path sanitization to the artifact store."""
474
+ for method_name, method in inspect.getmembers(BaseArtifactStore):
475
+ if getattr(method, "__isabstractmethod__", False):
476
+ method_implementation = getattr(self, method_name)
477
+ sanitized_method = _sanitize_paths(
478
+ method_implementation, self.path
479
+ )
480
+ setattr(self, method_name, sanitized_method)
481
+
444
482
  def _register(self) -> None:
445
483
  """Create and register a filesystem within the filesystem registry."""
446
484
  from zenml.io.filesystem import BaseFilesystem
447
485
  from zenml.io.filesystem_registry import default_filesystem_registry
448
486
  from zenml.io.local_filesystem import LocalFilesystem
449
487
 
450
- overloads: Dict[str, Any] = {
451
- "SUPPORTED_SCHEMES": self.config.SUPPORTED_SCHEMES,
452
- }
453
- for abc_method in inspect.getmembers(BaseArtifactStore):
454
- if getattr(abc_method[1], "__isabstractmethod__", False):
455
- sanitized_method = _sanitize_paths(
456
- getattr(self, abc_method[0]), self.path
457
- )
458
- # prepare overloads for filesystem methods
459
- overloads[abc_method[0]] = staticmethod(sanitized_method)
460
-
461
- # decorate artifact store methods
462
- setattr(
463
- self,
464
- abc_method[0],
465
- sanitized_method,
466
- )
467
-
468
488
  # Local filesystem is always registered, no point in doing it again.
469
489
  if isinstance(self, LocalFilesystem):
470
490
  return
471
491
 
492
+ overloads: Dict[str, Any] = {
493
+ "SUPPORTED_SCHEMES": self.config.SUPPORTED_SCHEMES,
494
+ }
495
+ for method_name, method in inspect.getmembers(BaseArtifactStore):
496
+ if getattr(method, "__isabstractmethod__", False):
497
+ method_implementation = getattr(self, method_name)
498
+ overloads[method_name] = staticmethod(method_implementation)
499
+
472
500
  filesystem_class = type(
473
501
  self.__class__.__name__, (BaseFilesystem,), overloads
474
502
  )
zenml/artifacts/utils.py CHANGED
@@ -49,6 +49,7 @@ from zenml.enums import (
49
49
  )
50
50
  from zenml.exceptions import (
51
51
  DoesNotExistException,
52
+ IllegalOperationError,
52
53
  StepContextError,
53
54
  )
54
55
  from zenml.io import fileio
@@ -925,6 +926,7 @@ def _load_file_from_artifact_store(
925
926
  DoesNotExistException: If the file does not exist in the artifact store.
926
927
  NotImplementedError: If the artifact store cannot open the file.
927
928
  IOError: If the artifact store rejects the request.
929
+ IllegalOperationError: If the artifact store rejects the request.
928
930
  """
929
931
  try:
930
932
  with artifact_store.open(uri, mode) as text_file:
@@ -946,7 +948,7 @@ def _load_file_from_artifact_store(
946
948
  f"File '{uri}' does not exist in artifact store "
947
949
  f"'{artifact_store.name}'."
948
950
  )
949
- except IOError as e:
951
+ except (IOError, IllegalOperationError) as e:
950
952
  raise e
951
953
  except Exception as e:
952
954
  logger.exception(e)
zenml/cli/pipeline.py CHANGED
@@ -604,16 +604,27 @@ def delete_pipeline_run(
604
604
 
605
605
  @runs.command("refresh")
606
606
  @click.argument("run_name_or_id", type=str, required=True)
607
- def refresh_pipeline_run(run_name_or_id: str) -> None:
607
+ @click.option(
608
+ "--include-steps",
609
+ is_flag=True,
610
+ default=False,
611
+ help="Also refresh the status of individual steps.",
612
+ )
613
+ def refresh_pipeline_run(
614
+ run_name_or_id: str, include_steps: bool = False
615
+ ) -> None:
608
616
  """Refresh the status of a pipeline run.
609
617
 
610
618
  Args:
611
619
  run_name_or_id: The name or ID of the pipeline run to refresh.
620
+ include_steps: If True, also refresh the status of individual steps.
612
621
  """
613
622
  try:
614
623
  # Fetch and update the run
615
624
  run = Client().get_pipeline_run(name_id_or_prefix=run_name_or_id)
616
- run.refresh_run_status()
625
+ run_utils.refresh_run_status(
626
+ run=run, include_step_updates=include_steps
627
+ )
617
628
 
618
629
  except KeyError as e:
619
630
  cli_utils.error(str(e))
zenml/constants.py CHANGED
@@ -166,6 +166,9 @@ ENV_ZENML_DISABLE_CLIENT_SERVER_MISMATCH_WARNING = (
166
166
  ENV_ZENML_SKIP_IMAGE_BUILDER_DEFAULT = "ZENML_SKIP_IMAGE_BUILDER_DEFAULT"
167
167
  ENV_ZENML_SKIP_STACK_VALIDATION = "ZENML_SKIP_STACK_VALIDATION"
168
168
  ENV_ZENML_SERVER = "ZENML_SERVER"
169
+ ENV_ZENML_SERVER_ALLOW_LOCAL_FILE_ACCESS = (
170
+ "ZENML_SERVER_ALLOW_LOCAL_FILE_ACCESS"
171
+ )
169
172
  ENV_ZENML_ENFORCE_TYPE_ANNOTATIONS = "ZENML_ENFORCE_TYPE_ANNOTATIONS"
170
173
  ENV_ZENML_ENABLE_IMPLICIT_AUTH_METHODS = "ZENML_ENABLE_IMPLICIT_AUTH_METHODS"
171
174
  ENV_ZENML_DISABLE_PIPELINE_LOGS_STORAGE = "ZENML_DISABLE_PIPELINE_LOGS_STORAGE"
@@ -16,7 +16,7 @@
16
16
  import re
17
17
  from typing import TYPE_CHECKING, Optional, Tuple, Type, cast
18
18
 
19
- from pydantic import field_validator
19
+ from pydantic import Field, field_validator
20
20
 
21
21
  from zenml.constants import DOCKER_REGISTRY_RESOURCE_TYPE
22
22
  from zenml.enums import StackComponentType
@@ -36,12 +36,24 @@ if TYPE_CHECKING:
36
36
  class BaseContainerRegistryConfig(AuthenticationConfigMixin):
37
37
  """Base config for a container registry.
38
38
 
39
- Attributes:
40
- uri: The URI of the container registry.
39
+ Configuration for connecting to container image registries.
40
+ Field descriptions are defined inline using Field() descriptors.
41
41
  """
42
42
 
43
- uri: str
44
- default_repository: Optional[str] = None
43
+ uri: str = Field(
44
+ description="Container registry URI (e.g., 'gcr.io' for Google Container "
45
+ "Registry, 'docker.io' for Docker Hub, 'registry.gitlab.com' for GitLab "
46
+ "Container Registry, 'ghcr.io' for GitHub Container Registry). This is "
47
+ "the base URL where container images will be pushed to and pulled from."
48
+ )
49
+ default_repository: Optional[str] = Field(
50
+ default=None,
51
+ description="Default repository namespace for image storage (e.g., "
52
+ "'username' for Docker Hub, 'project-id' for GCR, 'organization' for "
53
+ "GitHub Container Registry). If not specified, images will be stored at "
54
+ "the registry root. For Docker Hub this would mean only official images "
55
+ "can be pushed.",
56
+ )
45
57
 
46
58
  @field_validator("uri")
47
59
  @classmethod
@@ -37,100 +37,123 @@ DEFAULT_OUTPUT_DATA_S3_MODE = "EndOfJob"
37
37
 
38
38
 
39
39
  class SagemakerOrchestratorSettings(BaseSettings):
40
- """Settings for the Sagemaker orchestrator.
41
-
42
- Attributes:
43
- synchronous: If `True`, the client running a pipeline using this
44
- orchestrator waits until all steps finish running. If `False`,
45
- the client returns immediately and the pipeline is executed
46
- asynchronously. Defaults to `True`.
47
- instance_type: The instance type to use for the processing job.
48
- execution_role: The IAM role to use for the step execution.
49
- processor_role: DEPRECATED: use `execution_role` instead.
50
- volume_size_in_gb: The size of the EBS volume to use for the processing
51
- job.
52
- max_runtime_in_seconds: The maximum runtime in seconds for the
53
- processing job.
54
- tags: Tags to apply to the Processor/Estimator assigned to the step.
55
- pipeline_tags: Tags to apply to the pipeline via the
56
- sagemaker.workflow.pipeline.Pipeline.create method.
57
- processor_tags: DEPRECATED: use `tags` instead.
58
- keep_alive_period_in_seconds: The time in seconds after which the
59
- provisioned instance will be terminated if not used. This is only
60
- applicable for TrainingStep type and it is not possible to use
61
- TrainingStep type if the `output_data_s3_uri` is set to Dict[str, str].
62
- use_training_step: Whether to use the TrainingStep type.
63
- It is not possible to use TrainingStep type
64
- if the `output_data_s3_uri` is set to Dict[str, str] or if the
65
- `output_data_s3_mode` != "EndOfJob".
66
- processor_args: Arguments that are directly passed to the SageMaker
67
- Processor for a specific step, allowing for overriding the default
68
- settings provided when configuring the component. See
69
- https://sagemaker.readthedocs.io/en/stable/api/training/processing.html#sagemaker.processing.Processor
70
- for a full list of arguments.
71
- For processor_args.instance_type, check
72
- https://docs.aws.amazon.com/sagemaker/latest/dg/notebooks-available-instance-types.html
73
- for a list of available instance types.
74
- environment: Environment variables to pass to the container.
75
- estimator_args: Arguments that are directly passed to the SageMaker
76
- Estimator for a specific step, allowing for overriding the default
77
- settings provided when configuring the component. See
78
- https://sagemaker.readthedocs.io/en/stable/api/training/estimators.html#sagemaker.estimator.Estimator
79
- for a full list of arguments.
80
- For a list of available instance types, check
81
- https://docs.aws.amazon.com/sagemaker/latest/dg/cmn-info-instance-types.html.
82
- input_data_s3_mode: How data is made available to the container.
83
- Two possible input modes: File, Pipe.
84
- input_data_s3_uri: S3 URI where data is located if not locally,
85
- e.g. s3://my-bucket/my-data/train. How data will be made available
86
- to the container is configured with input_data_s3_mode. Two possible
87
- input types:
88
- - str: S3 location where training data is saved.
89
- - Dict[str, str]: (ChannelName, S3Location) which represent
90
- - Dict[str, str]: (ChannelName, S3Location) which represent
91
- channels (e.g. training, validation, testing) where
92
- specific parts of the data are saved in S3.
93
- output_data_s3_mode: How data is uploaded to the S3 bucket.
94
- Two possible output modes: EndOfJob, Continuous.
95
- output_data_s3_uri: S3 URI where data is uploaded after or during processing run.
96
- e.g. s3://my-bucket/my-data/output. How data will be made available
97
- to the container is configured with output_data_s3_mode. Two possible
98
- input types:
99
- - str: S3 location where data will be uploaded from a local folder
100
- named /opt/ml/processing/output/data.
101
- - Dict[str, str]: (ChannelName, S3Location) which represent
102
- channels (e.g. output_one, output_two) where
103
- specific parts of the data are stored locally for S3 upload.
104
- Data must be available locally in /opt/ml/processing/output/data/<ChannelName>.
105
- """
106
-
107
- synchronous: bool = True
40
+ """Settings for the Sagemaker orchestrator."""
41
+
42
+ synchronous: bool = Field(
43
+ True,
44
+ description="Controls whether pipeline execution blocks the client. If True, "
45
+ "the client waits until all steps complete before returning. If False, "
46
+ "returns immediately and executes asynchronously. Useful for long-running "
47
+ "production pipelines where you don't want to maintain a connection",
48
+ )
108
49
 
109
- instance_type: Optional[str] = None
110
- execution_role: Optional[str] = None
111
- volume_size_in_gb: int = 30
112
- max_runtime_in_seconds: int = 86400
113
- tags: Dict[str, str] = {}
114
- pipeline_tags: Dict[str, str] = {}
115
- keep_alive_period_in_seconds: Optional[int] = 300 # 5 minutes
116
- use_training_step: Optional[bool] = None
50
+ instance_type: Optional[str] = Field(
51
+ None,
52
+ description="AWS EC2 instance type for step execution. Must be a valid "
53
+ "SageMaker-supported instance type. Examples: 'ml.t3.medium' (2 vCPU, 4GB RAM), "
54
+ "'ml.m5.xlarge' (4 vCPU, 16GB RAM), 'ml.p3.2xlarge' (8 vCPU, 61GB RAM, 1 GPU). "
55
+ "Defaults to ml.m5.xlarge for training steps or ml.t3.medium for processing steps",
56
+ )
57
+ execution_role: Optional[str] = Field(
58
+ None,
59
+ description="IAM role ARN for SageMaker step execution permissions. Must have "
60
+ "necessary policies attached (SageMakerFullAccess, S3 access, etc.). "
61
+ "Example: 'arn:aws:iam::123456789012:role/SageMakerExecutionRole'. "
62
+ "If not provided, uses the default SageMaker execution role",
63
+ )
64
+ volume_size_in_gb: int = Field(
65
+ 30,
66
+ description="EBS volume size in GB for step execution storage. Must be between "
67
+ "1-16384 GB. Used for temporary files, model artifacts, and data processing. "
68
+ "Larger volumes needed for big datasets or model training. Example: 30 for "
69
+ "small jobs, 100+ for large ML training jobs",
70
+ )
71
+ max_runtime_in_seconds: int = Field(
72
+ 86400, # 24 hours
73
+ description="Maximum execution time in seconds before job termination. Must be "
74
+ "between 1-432000 seconds (5 days). Used to prevent runaway jobs and control costs. "
75
+ "Examples: 3600 (1 hour), 86400 (24 hours), 259200 (3 days). "
76
+ "Consider your longest expected step duration",
77
+ )
78
+ tags: Dict[str, str] = Field(
79
+ default_factory=dict,
80
+ description="Tags to apply to the Processor/Estimator assigned to the step. "
81
+ "Example: {'Environment': 'Production', 'Project': 'MLOps'}",
82
+ )
83
+ pipeline_tags: Dict[str, str] = Field(
84
+ default_factory=dict,
85
+ description="Tags to apply to the pipeline via the "
86
+ "sagemaker.workflow.pipeline.Pipeline.create method. Example: "
87
+ "{'Environment': 'Production', 'Project': 'MLOps'}",
88
+ )
89
+ keep_alive_period_in_seconds: Optional[int] = Field(
90
+ 300, # 5 minutes
91
+ description="The time in seconds after which the provisioned instance "
92
+ "will be terminated if not used. This is only applicable for "
93
+ "TrainingStep type.",
94
+ )
95
+ use_training_step: Optional[bool] = Field(
96
+ None,
97
+ description="Whether to use the TrainingStep type. It is not possible "
98
+ "to use TrainingStep type if the `output_data_s3_uri` is set to "
99
+ "Dict[str, str] or if the `output_data_s3_mode` != 'EndOfJob'.",
100
+ )
117
101
 
118
- processor_args: Dict[str, Any] = {}
119
- estimator_args: Dict[str, Any] = {}
120
- environment: Dict[str, str] = {}
102
+ processor_args: Dict[str, Any] = Field(
103
+ default_factory=dict,
104
+ description="Arguments that are directly passed to the SageMaker "
105
+ "Processor for a specific step, allowing for overriding the default "
106
+ "settings provided when configuring the component. Example: "
107
+ "{'instance_count': 2, 'base_job_name': 'my-processing-job'}",
108
+ )
109
+ estimator_args: Dict[str, Any] = Field(
110
+ default_factory=dict,
111
+ description="Arguments that are directly passed to the SageMaker "
112
+ "Estimator for a specific step, allowing for overriding the default "
113
+ "settings provided when configuring the component. Example: "
114
+ "{'train_instance_count': 2, 'train_max_run': 3600}",
115
+ )
116
+ environment: Dict[str, str] = Field(
117
+ default_factory=dict,
118
+ description="Environment variables to pass to the container. "
119
+ "Example: {'LOG_LEVEL': 'INFO', 'DEBUG_MODE': 'False'}",
120
+ )
121
121
 
122
- input_data_s3_mode: str = "File"
122
+ input_data_s3_mode: str = Field(
123
+ "File",
124
+ description="How data is made available to the container. "
125
+ "Two possible input modes: File, Pipe.",
126
+ )
123
127
  input_data_s3_uri: Optional[Union[str, Dict[str, str]]] = Field(
124
- default=None, union_mode="left_to_right"
128
+ default=None,
129
+ union_mode="left_to_right",
130
+ description="S3 URI where data is located if not locally. Example string: "
131
+ "'s3://my-bucket/my-data/train'. Example dict: "
132
+ "{'training': 's3://bucket/train', 'validation': 's3://bucket/val'}",
125
133
  )
126
134
 
127
- output_data_s3_mode: str = DEFAULT_OUTPUT_DATA_S3_MODE
135
+ output_data_s3_mode: str = Field(
136
+ DEFAULT_OUTPUT_DATA_S3_MODE,
137
+ description="How data is uploaded to the S3 bucket. "
138
+ "Two possible output modes: EndOfJob, Continuous.",
139
+ )
128
140
  output_data_s3_uri: Optional[Union[str, Dict[str, str]]] = Field(
129
- default=None, union_mode="left_to_right"
141
+ default=None,
142
+ union_mode="left_to_right",
143
+ description="S3 URI where data is uploaded after or during processing run. "
144
+ "Example string: 's3://my-bucket/my-data/output'. Example dict: "
145
+ "{'output_one': 's3://bucket/out1', 'output_two': 's3://bucket/out2'}",
146
+ )
147
+ processor_role: Optional[str] = Field(
148
+ None,
149
+ description="DEPRECATED: use `execution_role` instead. "
150
+ "The IAM role to use for the step execution.",
151
+ )
152
+ processor_tags: Optional[Dict[str, str]] = Field(
153
+ None,
154
+ description="DEPRECATED: use `tags` instead. "
155
+ "Tags to apply to the Processor assigned to the step.",
130
156
  )
131
-
132
- processor_role: Optional[str] = None
133
- processor_tags: Optional[Dict[str, str]] = None
134
157
  _deprecation_validator = deprecation_utils.deprecate_pydantic_attributes(
135
158
  ("processor_role", "execution_role"), ("processor_tags", "tags")
136
159
  )
@@ -184,39 +207,49 @@ class SagemakerOrchestratorConfig(
184
207
  `aws_secret_access_key`, and optional `aws_auth_role_arn`,
185
208
  - If none of the above are provided, unspecified credentials will be
186
209
  loaded from the default AWS config.
187
-
188
- Attributes:
189
- execution_role: The IAM role ARN to use for the pipeline.
190
- scheduler_role: The ARN of the IAM role that will be assumed by
191
- the EventBridge service to launch Sagemaker pipelines
192
- (For more details regarding the required permissions, please check:
193
- https://docs.zenml.io/stacks/stack-components/orchestrators/sagemaker#required-iam-permissions-for-schedules)
194
- aws_access_key_id: The AWS access key ID to use to authenticate to AWS.
195
- If not provided, the value from the default AWS config will be used.
196
- aws_secret_access_key: The AWS secret access key to use to authenticate
197
- to AWS. If not provided, the value from the default AWS config will
198
- be used.
199
- aws_profile: The AWS profile to use for authentication if not using
200
- service connectors or explicit credentials. If not provided, the
201
- default profile will be used.
202
- aws_auth_role_arn: The ARN of an intermediate IAM role to assume when
203
- authenticating to AWS.
204
- region: The AWS region where the processing job will be run. If not
205
- provided, the value from the default AWS config will be used.
206
- bucket: Name of the S3 bucket to use for storing artifacts
207
- from the job run. If not provided, a default bucket will be created
208
- based on the following format:
209
- "sagemaker-{region}-{aws-account-id}".
210
210
  """
211
211
 
212
- execution_role: str
213
- scheduler_role: Optional[str] = None
214
- aws_access_key_id: Optional[str] = SecretField(default=None)
215
- aws_secret_access_key: Optional[str] = SecretField(default=None)
216
- aws_profile: Optional[str] = None
217
- aws_auth_role_arn: Optional[str] = None
218
- region: Optional[str] = None
219
- bucket: Optional[str] = None
212
+ execution_role: str = Field(
213
+ ..., description="The IAM role ARN to use for the pipeline."
214
+ )
215
+ scheduler_role: Optional[str] = Field(
216
+ None,
217
+ description="The ARN of the IAM role that will be assumed by "
218
+ "the EventBridge service to launch Sagemaker pipelines. "
219
+ "Required for scheduled pipelines.",
220
+ )
221
+ aws_access_key_id: Optional[str] = SecretField(
222
+ default=None,
223
+ description="The AWS access key ID to use to authenticate to AWS. "
224
+ "If not provided, the value from the default AWS config will be used.",
225
+ )
226
+ aws_secret_access_key: Optional[str] = SecretField(
227
+ default=None,
228
+ description="The AWS secret access key to use to authenticate to AWS. "
229
+ "If not provided, the value from the default AWS config will be used.",
230
+ )
231
+ aws_profile: Optional[str] = Field(
232
+ None,
233
+ description="The AWS profile to use for authentication if not using "
234
+ "service connectors or explicit credentials. If not provided, the "
235
+ "default profile will be used.",
236
+ )
237
+ aws_auth_role_arn: Optional[str] = Field(
238
+ None,
239
+ description="The ARN of an intermediate IAM role to assume when "
240
+ "authenticating to AWS.",
241
+ )
242
+ region: Optional[str] = Field(
243
+ None,
244
+ description="The AWS region where the processing job will be run. "
245
+ "If not provided, the value from the default AWS config will be used.",
246
+ )
247
+ bucket: Optional[str] = Field(
248
+ None,
249
+ description="Name of the S3 bucket to use for storing artifacts "
250
+ "from the job run. If not provided, a default bucket will be created "
251
+ "based on the following format: 'sagemaker-{region}-{aws-account-id}'.",
252
+ )
220
253
 
221
254
  @property
222
255
  def is_remote(self) -> bool:
@@ -34,38 +34,37 @@ if TYPE_CHECKING:
34
34
 
35
35
 
36
36
  class SagemakerStepOperatorSettings(BaseSettings):
37
- """Settings for the Sagemaker step operator.
38
-
39
- Attributes:
40
- experiment_name: The name for the experiment to which the job
41
- will be associated. If not provided, the job runs would be
42
- independent.
43
- input_data_s3_uri: S3 URI where training data is located if not locally,
44
- e.g. s3://my-bucket/my-data/train. How data will be made available
45
- to the container is configured with estimator_args.input_mode. Two possible
46
- input types:
47
- - str: S3 location where training data is saved.
48
- - Dict[str, str]: (ChannelName, S3Location) which represent
49
- channels (e.g. training, validation, testing) where
50
- specific parts of the data are saved in S3.
51
- estimator_args: Arguments that are directly passed to the SageMaker
52
- Estimator. See
53
- https://sagemaker.readthedocs.io/en/stable/api/training/estimators.html#sagemaker.estimator.Estimator
54
- for a full list of arguments.
55
- For estimator_args.instance_type, check
56
- https://docs.aws.amazon.com/sagemaker/latest/dg/notebooks-available-instance-types.html
57
- for a list of available instance types.
58
- environment: Environment variables to pass to the container.
59
-
60
- """
61
-
62
- instance_type: Optional[str] = None
63
- experiment_name: Optional[str] = None
37
+ """Settings for the Sagemaker step operator."""
38
+
39
+ instance_type: Optional[str] = Field(
40
+ None,
41
+ description="DEPRECATED: The instance type to use for the step execution. "
42
+ "Use estimator_args instead. Example: 'ml.m5.xlarge'",
43
+ )
44
+ experiment_name: Optional[str] = Field(
45
+ None,
46
+ description="The name for the experiment to which the job will be associated. "
47
+ "If not provided, the job runs would be independent. Example: 'my-training-experiment'",
48
+ )
64
49
  input_data_s3_uri: Optional[Union[str, Dict[str, str]]] = Field(
65
- default=None, union_mode="left_to_right"
50
+ default=None,
51
+ union_mode="left_to_right",
52
+ description="S3 URI where training data is located if not locally. "
53
+ "Example string: 's3://my-bucket/my-data/train'. Example dict: "
54
+ "{'training': 's3://bucket/train', 'validation': 's3://bucket/val'}",
55
+ )
56
+ estimator_args: Dict[str, Any] = Field(
57
+ default_factory=dict,
58
+ description="Arguments that are directly passed to the SageMaker Estimator. "
59
+ "See SageMaker documentation for available arguments and instance types. Example: "
60
+ "{'instance_type': 'ml.m5.xlarge', 'instance_count': 1, "
61
+ "'train_max_run': 3600, 'input_mode': 'File'}",
62
+ )
63
+ environment: Dict[str, str] = Field(
64
+ default_factory=dict,
65
+ description="Environment variables to pass to the container during execution. "
66
+ "Example: {'LOG_LEVEL': 'INFO', 'DEBUG_MODE': 'False'}",
66
67
  )
67
- estimator_args: Dict[str, Any] = {}
68
- environment: Dict[str, str] = {}
69
68
 
70
69
  _deprecation_validator = deprecation_utils.deprecate_pydantic_attributes(
71
70
  "instance_type"
@@ -75,18 +74,20 @@ class SagemakerStepOperatorSettings(BaseSettings):
75
74
  class SagemakerStepOperatorConfig(
76
75
  BaseStepOperatorConfig, SagemakerStepOperatorSettings
77
76
  ):
78
- """Config for the Sagemaker step operator.
79
-
80
- Attributes:
81
- role: The role that has to be assigned to the jobs which are
82
- running in Sagemaker.
83
- bucket: Name of the S3 bucket to use for storing artifacts
84
- from the job run. If not provided, a default bucket will be created
85
- based on the following format: "sagemaker-{region}-{aws-account-id}".
86
- """
87
-
88
- role: str
89
- bucket: Optional[str] = None
77
+ """Config for the Sagemaker step operator."""
78
+
79
+ role: str = Field(
80
+ ...,
81
+ description="The IAM role ARN that has to be assigned to the jobs "
82
+ "running in SageMaker. This role must have the necessary permissions "
83
+ "to access SageMaker and S3 resources.",
84
+ )
85
+ bucket: Optional[str] = Field(
86
+ None,
87
+ description="Name of the S3 bucket to use for storing artifacts from the job run. "
88
+ "If not provided, a default bucket will be created based on the format: "
89
+ "'sagemaker-{region}-{aws-account-id}'.",
90
+ )
90
91
 
91
92
  @property
92
93
  def is_remote(self) -> bool: