zenml-nightly 0.70.0.dev20241127__py3-none-any.whl → 0.70.0.dev20241129__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. zenml/VERSION +1 -1
  2. zenml/artifacts/artifact_config.py +21 -1
  3. zenml/artifacts/utils.py +5 -1
  4. zenml/cli/pipeline.py +80 -0
  5. zenml/config/compiler.py +12 -3
  6. zenml/config/pipeline_configurations.py +20 -0
  7. zenml/config/pipeline_run_configuration.py +1 -0
  8. zenml/config/step_configurations.py +21 -0
  9. zenml/enums.py +1 -0
  10. zenml/integrations/__init__.py +1 -0
  11. zenml/integrations/constants.py +1 -0
  12. zenml/integrations/feast/__init__.py +1 -1
  13. zenml/integrations/feast/feature_stores/feast_feature_store.py +13 -9
  14. zenml/integrations/kubernetes/orchestrators/kube_utils.py +46 -2
  15. zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +13 -2
  16. zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator_entrypoint.py +3 -1
  17. zenml/integrations/kubernetes/orchestrators/manifest_utils.py +3 -2
  18. zenml/integrations/kubernetes/step_operators/kubernetes_step_operator.py +3 -1
  19. zenml/integrations/modal/__init__.py +46 -0
  20. zenml/integrations/modal/flavors/__init__.py +26 -0
  21. zenml/integrations/modal/flavors/modal_step_operator_flavor.py +125 -0
  22. zenml/integrations/modal/step_operators/__init__.py +22 -0
  23. zenml/integrations/modal/step_operators/modal_step_operator.py +242 -0
  24. zenml/materializers/built_in_materializer.py +18 -1
  25. zenml/materializers/structured_string_materializer.py +8 -3
  26. zenml/model/model.py +6 -2
  27. zenml/models/v2/core/pipeline_run.py +4 -0
  28. zenml/models/v2/core/step_run.py +1 -1
  29. zenml/orchestrators/publish_utils.py +1 -1
  30. zenml/orchestrators/step_launcher.py +6 -2
  31. zenml/orchestrators/step_run_utils.py +15 -6
  32. zenml/orchestrators/step_runner.py +40 -2
  33. zenml/orchestrators/utils.py +29 -4
  34. zenml/pipelines/pipeline_decorator.py +4 -0
  35. zenml/pipelines/pipeline_definition.py +14 -3
  36. zenml/pipelines/run_utils.py +8 -3
  37. zenml/steps/base_step.py +11 -1
  38. zenml/steps/entrypoint_function_utils.py +4 -2
  39. zenml/steps/step_decorator.py +4 -0
  40. zenml/steps/utils.py +17 -5
  41. zenml/types.py +4 -0
  42. zenml/utils/string_utils.py +30 -12
  43. zenml/utils/visualization_utils.py +4 -1
  44. zenml/zen_server/template_execution/utils.py +1 -0
  45. zenml/zen_stores/schemas/artifact_schemas.py +2 -1
  46. zenml/zen_stores/schemas/pipeline_run_schemas.py +14 -3
  47. zenml/zen_stores/schemas/step_run_schemas.py +19 -0
  48. zenml/zen_stores/sql_zen_store.py +15 -11
  49. {zenml_nightly-0.70.0.dev20241127.dist-info → zenml_nightly-0.70.0.dev20241129.dist-info}/METADATA +1 -1
  50. {zenml_nightly-0.70.0.dev20241127.dist-info → zenml_nightly-0.70.0.dev20241129.dist-info}/RECORD +53 -48
  51. {zenml_nightly-0.70.0.dev20241127.dist-info → zenml_nightly-0.70.0.dev20241129.dist-info}/LICENSE +0 -0
  52. {zenml_nightly-0.70.0.dev20241127.dist-info → zenml_nightly-0.70.0.dev20241129.dist-info}/WHEEL +0 -0
  53. {zenml_nightly-0.70.0.dev20241127.dist-info → zenml_nightly-0.70.0.dev20241129.dist-info}/entry_points.txt +0 -0
zenml/VERSION CHANGED
@@ -1 +1 @@
1
- 0.70.0.dev20241127
1
+ 0.70.0.dev20241129
@@ -21,6 +21,7 @@ from zenml.enums import ArtifactType
21
21
  from zenml.logger import get_logger
22
22
  from zenml.metadata.metadata_types import MetadataType
23
23
  from zenml.utils.pydantic_utils import before_validator_handler
24
+ from zenml.utils.string_utils import format_name_template
24
25
 
25
26
  logger = get_logger(__name__)
26
27
 
@@ -45,7 +46,13 @@ class ArtifactConfig(BaseModel):
45
46
  ```
46
47
 
47
48
  Attributes:
48
- name: The name of the artifact.
49
+ name: The name of the artifact:
50
+ - static string e.g. "name"
51
+ - dynamic string e.g. "name_{date}_{time}_{custom_placeholder}"
52
+ If you use any placeholders besides `date` and `time`,
53
+ you need to provide the values for them in the `substitutions`
54
+ argument of the step decorator or the `substitutions` argument
55
+ of `with_options` of the step.
49
56
  version: The version of the artifact.
50
57
  tags: The tags of the artifact.
51
58
  run_metadata: Metadata to add to the artifact.
@@ -111,3 +118,16 @@ class ArtifactConfig(BaseModel):
111
118
  data.setdefault("artifact_type", ArtifactType.SERVICE)
112
119
 
113
120
  return data
121
+
122
+ def _evaluated_name(self, substitutions: Dict[str, str]) -> Optional[str]:
123
+ """Evaluated name of the artifact.
124
+
125
+ Args:
126
+ substitutions: Extra placeholders to use in the name template.
127
+
128
+ Returns:
129
+ The evaluated name of the artifact.
130
+ """
131
+ if self.name:
132
+ return format_name_template(self.name, substitutions=substitutions)
133
+ return self.name
zenml/artifacts/utils.py CHANGED
@@ -689,7 +689,11 @@ def _link_artifact_version_to_the_step_and_model(
689
689
  client.zen_store.update_run_step(
690
690
  step_run_id=step_run.id,
691
691
  step_run_update=StepRunUpdate(
692
- outputs={artifact_version.artifact.name: artifact_version.id}
692
+ outputs={
693
+ artifact_version.artifact.name: [
694
+ artifact_version.id,
695
+ ]
696
+ }
693
697
  ),
694
698
  )
695
699
  error_message = "model"
zenml/cli/pipeline.py CHANGED
@@ -315,6 +315,86 @@ def run_pipeline(
315
315
  pipeline_instance()
316
316
 
317
317
 
318
+ @pipeline.command(
319
+ "create-run-template",
320
+ help="Create a run template for a pipeline. The SOURCE argument needs to "
321
+ "be an importable source path resolving to a ZenML pipeline instance, e.g. "
322
+ "`my_module.my_pipeline_instance`.",
323
+ )
324
+ @click.argument("source")
325
+ @click.option(
326
+ "--name",
327
+ "-n",
328
+ type=str,
329
+ required=True,
330
+ help="Name for the template",
331
+ )
332
+ @click.option(
333
+ "--config",
334
+ "-c",
335
+ "config_path",
336
+ type=click.Path(exists=True, dir_okay=False),
337
+ required=False,
338
+ help="Path to configuration file for the build.",
339
+ )
340
+ @click.option(
341
+ "--stack",
342
+ "-s",
343
+ "stack_name_or_id",
344
+ type=str,
345
+ required=False,
346
+ help="Name or ID of the stack to use for the build.",
347
+ )
348
+ def create_run_template(
349
+ source: str,
350
+ name: str,
351
+ config_path: Optional[str] = None,
352
+ stack_name_or_id: Optional[str] = None,
353
+ ) -> None:
354
+ """Create a run template for a pipeline.
355
+
356
+ Args:
357
+ source: Importable source resolving to a pipeline instance.
358
+ name: Name of the run template.
359
+ config_path: Path to pipeline configuration file.
360
+ stack_name_or_id: Name or ID of the stack for which the template should
361
+ be created.
362
+ """
363
+ if not Client().root:
364
+ cli_utils.warning(
365
+ "You're running the `zenml pipeline create-run-template` command "
366
+ "without a ZenML repository. Your current working directory will "
367
+ "be used as the source root relative to which the registered step "
368
+ "classes will be resolved. To silence this warning, run `zenml "
369
+ "init` at your source code root."
370
+ )
371
+
372
+ try:
373
+ pipeline_instance = source_utils.load(source)
374
+ except ModuleNotFoundError as e:
375
+ source_root = source_utils.get_source_root()
376
+ cli_utils.error(
377
+ f"Unable to import module `{e.name}`. Make sure the source path is "
378
+ f"relative to your source root `{source_root}`."
379
+ )
380
+ except AttributeError as e:
381
+ cli_utils.error("Unable to load attribute from module: " + str(e))
382
+
383
+ if not isinstance(pipeline_instance, Pipeline):
384
+ cli_utils.error(
385
+ f"The given source path `{source}` does not resolve to a pipeline "
386
+ "object."
387
+ )
388
+
389
+ with cli_utils.temporary_active_stack(stack_name_or_id=stack_name_or_id):
390
+ pipeline_instance = pipeline_instance.with_options(
391
+ config_path=config_path
392
+ )
393
+ template = pipeline_instance.create_run_template(name=name)
394
+
395
+ cli_utils.declare(f"Created run template `{template.id}`.")
396
+
397
+
318
398
  @pipeline.command("list", help="List all registered pipelines.")
319
399
  @list_options(PipelineFilter)
320
400
  def list_pipelines(**kwargs: Any) -> None:
zenml/config/compiler.py CHANGED
@@ -99,7 +99,10 @@ class Compiler:
99
99
 
100
100
  self._apply_stack_default_settings(pipeline=pipeline, stack=stack)
101
101
  if run_configuration.run_name:
102
- self._verify_run_name(run_configuration.run_name)
102
+ self._verify_run_name(
103
+ run_configuration.run_name,
104
+ pipeline.configuration.substitutions,
105
+ )
103
106
 
104
107
  pipeline_settings = self._filter_and_validate_settings(
105
108
  settings=pipeline.configuration.settings,
@@ -305,16 +308,22 @@ class Compiler:
305
308
  return default_settings
306
309
 
307
310
  @staticmethod
308
- def _verify_run_name(run_name: str) -> None:
311
+ def _verify_run_name(
312
+ run_name: str,
313
+ substitutions: Dict[str, str],
314
+ ) -> None:
309
315
  """Verifies that the run name contains only valid placeholders.
310
316
 
311
317
  Args:
312
318
  run_name: The run name to verify.
319
+ substitutions: The substitutions to be used in the run name.
313
320
 
314
321
  Raises:
315
322
  ValueError: If the run name contains invalid placeholders.
316
323
  """
317
- valid_placeholder_names = {"date", "time"}
324
+ valid_placeholder_names = {"date", "time"}.union(
325
+ set(substitutions.keys())
326
+ )
318
327
  placeholders = {
319
328
  v[1] for v in string.Formatter().parse(run_name) if v[1]
320
329
  }
@@ -13,6 +13,7 @@
13
13
  # permissions and limitations under the License.
14
14
  """Pipeline configuration classes."""
15
15
 
16
+ from datetime import datetime
16
17
  from typing import TYPE_CHECKING, Any, Dict, List, Optional
17
18
 
18
19
  from pydantic import SerializeAsAny, field_validator
@@ -46,6 +47,25 @@ class PipelineConfigurationUpdate(StrictBaseModel):
46
47
  model: Optional[Model] = None
47
48
  parameters: Optional[Dict[str, Any]] = None
48
49
  retry: Optional[StepRetryConfig] = None
50
+ substitutions: Dict[str, str] = {}
51
+
52
+ def _get_full_substitutions(
53
+ self, start_time: Optional[datetime]
54
+ ) -> Dict[str, str]:
55
+ """Returns the full substitutions dict.
56
+
57
+ Args:
58
+ start_time: Start time of the pipeline run.
59
+
60
+ Returns:
61
+ The full substitutions dict including date and time.
62
+ """
63
+ if start_time is None:
64
+ start_time = datetime.utcnow()
65
+ ret = self.substitutions.copy()
66
+ ret.setdefault("date", start_time.strftime("%Y_%m_%d"))
67
+ ret.setdefault("time", start_time.strftime("%H_%M_%S_%f"))
68
+ return ret
49
69
 
50
70
 
51
71
  class PipelineConfiguration(PipelineConfigurationUpdate):
@@ -52,3 +52,4 @@ class PipelineRunConfiguration(
52
52
  retry: Optional[StepRetryConfig] = None
53
53
  failure_hook_source: Optional[SourceWithValidator] = None
54
54
  success_hook_source: Optional[SourceWithValidator] = None
55
+ substitutions: Dict[str, str] = {}
@@ -13,6 +13,7 @@
13
13
  # permissions and limitations under the License.
14
14
  """Pipeline configuration classes."""
15
15
 
16
+ from datetime import datetime
16
17
  from typing import (
17
18
  TYPE_CHECKING,
18
19
  Any,
@@ -49,6 +50,7 @@ from zenml.utils.pydantic_utils import before_validator_handler
49
50
 
50
51
  if TYPE_CHECKING:
51
52
  from zenml.config import DockerSettings, ResourceSettings
53
+ from zenml.config.pipeline_configurations import PipelineConfiguration
52
54
 
53
55
  logger = get_logger(__name__)
54
56
 
@@ -152,6 +154,7 @@ class StepConfigurationUpdate(StrictBaseModel):
152
154
  success_hook_source: Optional[SourceWithValidator] = None
153
155
  model: Optional[Model] = None
154
156
  retry: Optional[StepRetryConfig] = None
157
+ substitutions: Dict[str, str] = {}
155
158
 
156
159
  outputs: Mapping[str, PartialArtifactConfiguration] = {}
157
160
 
@@ -237,6 +240,24 @@ class StepConfiguration(PartialStepConfiguration):
237
240
  model_or_dict = model_or_dict.model_dump()
238
241
  return DockerSettings.model_validate(model_or_dict)
239
242
 
243
+ def _get_full_substitutions(
244
+ self,
245
+ pipeline_config: "PipelineConfiguration",
246
+ start_time: Optional[datetime],
247
+ ) -> Dict[str, str]:
248
+ """Get the full set of substitutions for this step configuration.
249
+
250
+ Args:
251
+ pipeline_config: The pipeline configuration.
252
+ start_time: The start time of the pipeline run.
253
+
254
+ Returns:
255
+ The full set of substitutions for this step configuration.
256
+ """
257
+ ret = pipeline_config._get_full_substitutions(start_time)
258
+ ret.update(self.substitutions)
259
+ return ret
260
+
240
261
 
241
262
  class InputSpec(StrictBaseModel):
242
263
  """Step input specification."""
zenml/enums.py CHANGED
@@ -60,6 +60,7 @@ class VisualizationType(StrEnum):
60
60
  HTML = "html"
61
61
  IMAGE = "image"
62
62
  MARKDOWN = "markdown"
63
+ JSON = "json"
63
64
 
64
65
 
65
66
  class ZenMLServiceType(StrEnum):
@@ -48,6 +48,7 @@ from zenml.integrations.lightgbm import LightGBMIntegration # noqa
48
48
 
49
49
  # from zenml.integrations.llama_index import LlamaIndexIntegration # noqa
50
50
  from zenml.integrations.mlflow import MlflowIntegration # noqa
51
+ from zenml.integrations.modal import ModalIntegration # noqa
51
52
  from zenml.integrations.neptune import NeptuneIntegration # noqa
52
53
  from zenml.integrations.neural_prophet import NeuralProphetIntegration # noqa
53
54
  from zenml.integrations.numpy import NumpyIntegration # noqa
@@ -42,6 +42,7 @@ LANGCHAIN = "langchain"
42
42
  LIGHTGBM = "lightgbm"
43
43
  # LLAMA_INDEX = "llama_index"
44
44
  MLFLOW = "mlflow"
45
+ MODAL = "modal"
45
46
  NEPTUNE = "neptune"
46
47
  NEURAL_PROPHET = "neural_prophet"
47
48
  NUMPY = "numpy"
@@ -31,7 +31,7 @@ class FeastIntegration(Integration):
31
31
 
32
32
  NAME = FEAST
33
33
  # click is added to keep the feast click version in sync with ZenML's click
34
- REQUIREMENTS = ["feast", "click>=8.0.1,<8.1.4"]
34
+ REQUIREMENTS = ["feast>=0.12.0", "click>=8.0.1,<8.1.4"]
35
35
  REQUIREMENTS_IGNORED_ON_UNINSTALL = ["click", "pandas"]
36
36
 
37
37
  @classmethod
@@ -16,7 +16,7 @@
16
16
  from typing import Any, Dict, List, Union, cast
17
17
 
18
18
  import pandas as pd
19
- from feast import FeatureStore # type: ignore
19
+ from feast import FeatureService, FeatureStore # type: ignore
20
20
  from feast.infra.registry.base_registry import BaseRegistry # type: ignore
21
21
 
22
22
  from zenml.feature_stores.base_feature_store import BaseFeatureStore
@@ -43,14 +43,14 @@ class FeastFeatureStore(BaseFeatureStore):
43
43
  def get_historical_features(
44
44
  self,
45
45
  entity_df: Union[pd.DataFrame, str],
46
- features: List[str],
46
+ features: Union[List[str], FeatureService],
47
47
  full_feature_names: bool = False,
48
48
  ) -> pd.DataFrame:
49
49
  """Returns the historical features for training or batch scoring.
50
50
 
51
51
  Args:
52
52
  entity_df: The entity DataFrame or entity name.
53
- features: The features to retrieve.
53
+ features: The features to retrieve or a FeatureService.
54
54
  full_feature_names: Whether to return the full feature names.
55
55
 
56
56
  Raise:
@@ -70,14 +70,14 @@ class FeastFeatureStore(BaseFeatureStore):
70
70
  def get_online_features(
71
71
  self,
72
72
  entity_rows: List[Dict[str, Any]],
73
- features: List[str],
73
+ features: Union[List[str], FeatureService],
74
74
  full_feature_names: bool = False,
75
75
  ) -> Dict[str, Any]:
76
76
  """Returns the latest online feature data.
77
77
 
78
78
  Args:
79
79
  entity_rows: The entity rows to retrieve.
80
- features: The features to retrieve.
80
+ features: The features to retrieve or a FeatureService.
81
81
  full_feature_names: Whether to return the full feature names.
82
82
 
83
83
  Raise:
@@ -118,17 +118,21 @@ class FeastFeatureStore(BaseFeatureStore):
118
118
  fs = FeatureStore(repo_path=self.config.feast_repo)
119
119
  return [ds.name for ds in fs.list_entities()]
120
120
 
121
- def get_feature_services(self) -> List[str]:
122
- """Returns the feature service names.
121
+ def get_feature_services(self) -> List[FeatureService]:
122
+ """Returns the feature services.
123
123
 
124
124
  Raise:
125
125
  ConnectionError: If the online component (Redis) is not available.
126
126
 
127
127
  Returns:
128
- The feature service names.
128
+ The feature services.
129
129
  """
130
130
  fs = FeatureStore(repo_path=self.config.feast_repo)
131
- return [ds.name for ds in fs.list_feature_services()]
131
+ feature_services: List[FeatureService] = list(
132
+ fs.list_feature_services()
133
+ )
134
+
135
+ return feature_services
132
136
 
133
137
  def get_feature_views(self) -> List[str]:
134
138
  """Returns the feature view names.
@@ -94,18 +94,62 @@ def load_kube_config(
94
94
  k8s_config.load_kube_config(context=context)
95
95
 
96
96
 
97
- def sanitize_pod_name(pod_name: str) -> str:
97
+ def calculate_max_pod_name_length_for_namespace(namespace: str) -> int:
98
+ """Calculate the max pod length for a certain namespace.
99
+
100
+ Args:
101
+ namespace: The namespace in which the pod will be created.
102
+
103
+ Returns:
104
+ The maximum pod name length.
105
+ """
106
+ # Kubernetes allows Pod names to have 253 characters. However, when
107
+ # creating a pod they try to create a log file which is called
108
+ # <NAMESPACE>_<POD_NAME>_<UUID>, which adds additional characters and
109
+ # runs into filesystem limitations for filename lengths (255). We therefore
110
+ # subtract the length of a UUID (36), the two underscores and the
111
+ # namespace length from the max filename length.
112
+ return 255 - 38 - len(namespace)
113
+
114
+
115
+ def sanitize_pod_name(pod_name: str, namespace: str) -> str:
98
116
  """Sanitize pod names so they conform to Kubernetes pod naming convention.
99
117
 
100
118
  Args:
101
119
  pod_name: Arbitrary input pod name.
120
+ namespace: Namespace in which the Pod will be created.
102
121
 
103
122
  Returns:
104
123
  Sanitized pod name.
105
124
  """
125
+ # https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-subdomain-names
106
126
  pod_name = re.sub(r"[^a-z0-9-]", "-", pod_name.lower())
107
127
  pod_name = re.sub(r"^[-]+", "", pod_name)
108
- return re.sub(r"[-]+", "-", pod_name)
128
+ pod_name = re.sub(r"[-]+$", "", pod_name)
129
+ pod_name = re.sub(r"[-]+", "-", pod_name)
130
+
131
+ allowed_length = calculate_max_pod_name_length_for_namespace(
132
+ namespace=namespace
133
+ )
134
+ return pod_name[:allowed_length]
135
+
136
+
137
+ def sanitize_label(label: str) -> str:
138
+ """Sanitize a label for a Kubernetes resource.
139
+
140
+ Args:
141
+ label: The label to sanitize.
142
+
143
+ Returns:
144
+ The sanitized label.
145
+ """
146
+ # https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#rfc-1035-label-names
147
+ label = re.sub(r"[^a-z0-9-]", "-", label.lower())
148
+ label = re.sub(r"^[-]+", "", label)
149
+ label = re.sub(r"[-]+$", "", label)
150
+ label = re.sub(r"[-]+", "-", label)
151
+
152
+ return label[:63]
109
153
 
110
154
 
111
155
  def pod_is_not_pending(pod: k8s_client.V1Pod) -> bool:
@@ -395,8 +395,19 @@ class KubernetesOrchestrator(ContainerizedOrchestrator):
395
395
  )
396
396
 
397
397
  pipeline_name = deployment.pipeline_configuration.name
398
- orchestrator_run_name = get_orchestrator_run_name(pipeline_name)
399
- pod_name = kube_utils.sanitize_pod_name(orchestrator_run_name)
398
+
399
+ # We already make sure the orchestrator run name has the correct length
400
+ # to make sure we don't cut off the randomized suffix later when
401
+ # sanitizing the pod name. This avoids any pod naming collisions.
402
+ max_length = kube_utils.calculate_max_pod_name_length_for_namespace(
403
+ namespace=self.config.kubernetes_namespace
404
+ )
405
+ orchestrator_run_name = get_orchestrator_run_name(
406
+ pipeline_name, max_length=max_length
407
+ )
408
+ pod_name = kube_utils.sanitize_pod_name(
409
+ orchestrator_run_name, namespace=self.config.kubernetes_namespace
410
+ )
400
411
 
401
412
  assert stack.container_registry
402
413
 
@@ -90,7 +90,9 @@ def main() -> None:
90
90
  """
91
91
  # Define Kubernetes pod name.
92
92
  pod_name = f"{orchestrator_run_id}-{step_name}"
93
- pod_name = kube_utils.sanitize_pod_name(pod_name)
93
+ pod_name = kube_utils.sanitize_pod_name(
94
+ pod_name, namespace=args.kubernetes_namespace
95
+ )
94
96
 
95
97
  image = KubernetesOrchestrator.get_image(
96
98
  deployment=deployment_config, step_name=step_name
@@ -25,6 +25,7 @@ from zenml.constants import ENV_ZENML_ENABLE_REPO_INIT_WARNINGS
25
25
  from zenml.integrations.airflow.orchestrators.dag_generator import (
26
26
  ENV_ZENML_LOCAL_STORES_PATH,
27
27
  )
28
+ from zenml.integrations.kubernetes.orchestrators import kube_utils
28
29
  from zenml.integrations.kubernetes.pod_settings import KubernetesPodSettings
29
30
 
30
31
 
@@ -167,8 +168,8 @@ def build_pod_manifest(
167
168
  # Add run_name and pipeline_name to the labels
168
169
  labels.update(
169
170
  {
170
- "run": run_name,
171
- "pipeline": pipeline_name,
171
+ "run": kube_utils.sanitize_label(run_name),
172
+ "pipeline": kube_utils.sanitize_label(pipeline_name),
172
173
  }
173
174
  )
174
175
 
@@ -197,7 +197,9 @@ class KubernetesStepOperator(BaseStepOperator):
197
197
  )
198
198
 
199
199
  pod_name = f"{info.run_name}_{info.pipeline_step_name}"
200
- pod_name = kube_utils.sanitize_pod_name(pod_name)
200
+ pod_name = kube_utils.sanitize_pod_name(
201
+ pod_name, namespace=self.config.kubernetes_namespace
202
+ )
201
203
 
202
204
  command = entrypoint_command[:3]
203
205
  args = entrypoint_command[3:]
@@ -0,0 +1,46 @@
1
+ # Copyright (c) ZenML GmbH 2024. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at:
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
12
+ # or implied. See the License for the specific language governing
13
+ # permissions and limitations under the License.
14
+ """Modal integration for cloud-native step execution.
15
+
16
+ The Modal integration sub-module provides a step operator flavor that allows
17
+ executing steps on Modal's cloud infrastructure.
18
+ """
19
+ from typing import List, Type
20
+
21
+ from zenml.integrations.constants import MODAL
22
+ from zenml.integrations.integration import Integration
23
+ from zenml.stack import Flavor
24
+
25
+ MODAL_STEP_OPERATOR_FLAVOR = "modal"
26
+
27
+
28
+ class ModalIntegration(Integration):
29
+ """Definition of Modal integration for ZenML."""
30
+
31
+ NAME = MODAL
32
+ REQUIREMENTS = ["modal>=0.64.49,<1"]
33
+
34
+ @classmethod
35
+ def flavors(cls) -> List[Type[Flavor]]:
36
+ """Declare the stack component flavors for the Modal integration.
37
+
38
+ Returns:
39
+ List of new stack component flavors.
40
+ """
41
+ from zenml.integrations.modal.flavors import ModalStepOperatorFlavor
42
+
43
+ return [ModalStepOperatorFlavor]
44
+
45
+
46
+ ModalIntegration.check_installation()
@@ -0,0 +1,26 @@
1
+ # Copyright (c) ZenML GmbH 2024. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at:
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
12
+ # or implied. See the License for the specific language governing
13
+ # permissions and limitations under the License.
14
+ """Modal integration flavors."""
15
+
16
+ from zenml.integrations.modal.flavors.modal_step_operator_flavor import (
17
+ ModalStepOperatorConfig,
18
+ ModalStepOperatorFlavor,
19
+ ModalStepOperatorSettings,
20
+ )
21
+
22
+ __all__ = [
23
+ "ModalStepOperatorConfig",
24
+ "ModalStepOperatorFlavor",
25
+ "ModalStepOperatorSettings",
26
+ ]