zenml-nightly 0.70.0.dev20241128__py3-none-any.whl → 0.70.0.dev20241130__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. zenml/VERSION +1 -1
  2. zenml/artifacts/artifact_config.py +7 -1
  3. zenml/artifacts/utils.py +55 -30
  4. zenml/cli/__init__.py +15 -0
  5. zenml/cli/base.py +4 -4
  6. zenml/cli/pipeline.py +80 -0
  7. zenml/cli/server.py +1 -1
  8. zenml/cli/stack.py +0 -3
  9. zenml/cli/stack_components.py +0 -1
  10. zenml/cli/utils.py +0 -5
  11. zenml/client.py +8 -18
  12. zenml/config/compiler.py +12 -3
  13. zenml/config/pipeline_configurations.py +20 -0
  14. zenml/config/pipeline_run_configuration.py +1 -0
  15. zenml/config/step_configurations.py +21 -0
  16. zenml/enums.py +1 -0
  17. zenml/integrations/feast/__init__.py +1 -1
  18. zenml/integrations/feast/feature_stores/feast_feature_store.py +13 -9
  19. zenml/materializers/built_in_materializer.py +18 -1
  20. zenml/materializers/structured_string_materializer.py +8 -3
  21. zenml/model/model.py +11 -3
  22. zenml/model/utils.py +18 -16
  23. zenml/models/__init__.py +6 -0
  24. zenml/models/v2/core/artifact_version.py +6 -3
  25. zenml/models/v2/core/component.py +0 -22
  26. zenml/models/v2/core/model_version.py +6 -3
  27. zenml/models/v2/core/pipeline_run.py +19 -3
  28. zenml/models/v2/core/run_metadata.py +30 -9
  29. zenml/models/v2/core/step_run.py +6 -4
  30. zenml/models/v2/misc/run_metadata.py +38 -0
  31. zenml/orchestrators/input_utils.py +19 -6
  32. zenml/orchestrators/publish_utils.py +12 -5
  33. zenml/orchestrators/step_launcher.py +7 -3
  34. zenml/orchestrators/step_run_utils.py +18 -6
  35. zenml/orchestrators/step_runner.py +39 -2
  36. zenml/orchestrators/utils.py +0 -21
  37. zenml/pipelines/pipeline_decorator.py +4 -0
  38. zenml/pipelines/pipeline_definition.py +14 -3
  39. zenml/pipelines/run_utils.py +9 -5
  40. zenml/steps/base_step.py +11 -1
  41. zenml/steps/entrypoint_function_utils.py +4 -2
  42. zenml/steps/step_decorator.py +4 -0
  43. zenml/steps/utils.py +23 -7
  44. zenml/types.py +4 -0
  45. zenml/utils/metadata_utils.py +186 -153
  46. zenml/utils/string_utils.py +41 -16
  47. zenml/utils/visualization_utils.py +4 -1
  48. zenml/zen_server/routers/workspaces_endpoints.py +19 -19
  49. zenml/zen_server/template_execution/utils.py +1 -0
  50. zenml/zen_stores/migrations/versions/b73bc71f1106_remove_component_spec_path.py +36 -0
  51. zenml/zen_stores/migrations/versions/cc269488e5a9_separate_run_metadata.py +135 -0
  52. zenml/zen_stores/schemas/__init__.py +5 -1
  53. zenml/zen_stores/schemas/artifact_schemas.py +12 -11
  54. zenml/zen_stores/schemas/component_schemas.py +0 -3
  55. zenml/zen_stores/schemas/model_schemas.py +13 -11
  56. zenml/zen_stores/schemas/pipeline_run_schemas.py +44 -16
  57. zenml/zen_stores/schemas/run_metadata_schemas.py +66 -31
  58. zenml/zen_stores/schemas/step_run_schemas.py +32 -12
  59. zenml/zen_stores/schemas/utils.py +47 -3
  60. zenml/zen_stores/sql_zen_store.py +117 -34
  61. {zenml_nightly-0.70.0.dev20241128.dist-info → zenml_nightly-0.70.0.dev20241130.dist-info}/METADATA +1 -1
  62. {zenml_nightly-0.70.0.dev20241128.dist-info → zenml_nightly-0.70.0.dev20241130.dist-info}/RECORD +65 -62
  63. {zenml_nightly-0.70.0.dev20241128.dist-info → zenml_nightly-0.70.0.dev20241130.dist-info}/LICENSE +0 -0
  64. {zenml_nightly-0.70.0.dev20241128.dist-info → zenml_nightly-0.70.0.dev20241130.dist-info}/WHEEL +0 -0
  65. {zenml_nightly-0.70.0.dev20241128.dist-info → zenml_nightly-0.70.0.dev20241130.dist-info}/entry_points.txt +0 -0
zenml/VERSION CHANGED
@@ -1 +1 @@
1
- 0.70.0.dev20241128
1
+ 0.70.0.dev20241130
@@ -45,7 +45,13 @@ class ArtifactConfig(BaseModel):
45
45
  ```
46
46
 
47
47
  Attributes:
48
- name: The name of the artifact.
48
+ name: The name of the artifact:
49
+ - static string e.g. "name"
50
+ - dynamic string e.g. "name_{date}_{time}_{custom_placeholder}"
51
+ If you use any placeholders besides `date` and `time`,
52
+ you need to provide the values for them in the `substitutions`
53
+ argument of the step decorator or the `substitutions` argument
54
+ of `with_options` of the step.
49
55
  version: The version of the artifact.
50
56
  tags: The tags of the artifact.
51
57
  run_metadata: Metadata to add to the artifact.
zenml/artifacts/utils.py CHANGED
@@ -14,6 +14,7 @@
14
14
  """Utility functions for handling artifacts."""
15
15
 
16
16
  import base64
17
+ import contextlib
17
18
  import os
18
19
  import tempfile
19
20
  import zipfile
@@ -41,7 +42,6 @@ from zenml.enums import (
41
42
  ArtifactSaveType,
42
43
  ArtifactType,
43
44
  ExecutionStatus,
44
- MetadataResourceTypes,
45
45
  StackComponentType,
46
46
  VisualizationType,
47
47
  )
@@ -404,50 +404,71 @@ def log_artifact_metadata(
404
404
  artifact_version: The version of the artifact to log metadata for. If
405
405
  not provided, when being called inside a step that produces an
406
406
  artifact named `artifact_name`, the metadata will be associated to
407
- the corresponding newly created artifact. Or, if not provided when
408
- being called outside a step, or in a step that does not produce
409
- any artifact named `artifact_name`, the metadata will be associated
410
- to the latest version of that artifact.
407
+ the corresponding newly created artifact.
411
408
 
412
409
  Raises:
413
410
  ValueError: If no artifact name is provided and the function is not
414
411
  called inside a step with a single output, or, if neither an
415
412
  artifact nor an output with the given name exists.
413
+
416
414
  """
417
415
  logger.warning(
418
416
  "The `log_artifact_metadata` function is deprecated and will soon be "
419
417
  "removed. Please use `log_metadata` instead."
420
418
  )
421
- try:
419
+
420
+ from zenml import log_metadata
421
+
422
+ if artifact_name and artifact_version:
423
+ assert artifact_name is not None
424
+
425
+ log_metadata(
426
+ metadata=metadata,
427
+ artifact_name=artifact_name,
428
+ artifact_version=artifact_version,
429
+ )
430
+
431
+ step_context = None
432
+ with contextlib.suppress(RuntimeError):
422
433
  step_context = get_step_context()
423
- in_step_outputs = (artifact_name in step_context._outputs) or (
424
- not artifact_name and len(step_context._outputs) == 1
434
+
435
+ if step_context and artifact_name in step_context._outputs.keys():
436
+ log_metadata(
437
+ metadata=metadata,
438
+ artifact_name=artifact_name,
439
+ infer_artifact=True,
425
440
  )
426
- except RuntimeError:
427
- step_context = None
428
- in_step_outputs = False
429
-
430
- if not step_context or not in_step_outputs or artifact_version:
431
- if not artifact_name:
432
- raise ValueError(
433
- "Artifact name must be provided unless the function is called "
434
- "inside a step with a single output."
435
- )
441
+ elif step_context and len(step_context._outputs) == 1:
442
+ single_output_name = list(step_context._outputs.keys())[0]
443
+
444
+ log_metadata(
445
+ metadata=metadata,
446
+ artifact_name=single_output_name,
447
+ infer_artifact=True,
448
+ )
449
+ elif artifact_name:
436
450
  client = Client()
437
- response = client.get_artifact_version(artifact_name, artifact_version)
438
- client.create_run_metadata(
451
+ logger.warning(
452
+ "Deprecation warning! Currently, you are calling "
453
+ "`log_artifact_metadata` from a context, where we use the "
454
+ "`artifact_name` to fetch it and link the metadata to its "
455
+ "latest version. This behavior is deprecated and will be "
456
+ "removed in the future. To circumvent this, please check"
457
+ "the `log_metadata` function."
458
+ )
459
+ artifact_version_model = client.get_artifact_version(
460
+ name_id_or_prefix=artifact_name
461
+ )
462
+ log_metadata(
439
463
  metadata=metadata,
440
- resource_id=response.id,
441
- resource_type=MetadataResourceTypes.ARTIFACT_VERSION,
464
+ artifact_version_id=artifact_version_model.id,
442
465
  )
443
-
444
466
  else:
445
- try:
446
- step_context.add_output_metadata(
447
- metadata=metadata, output_name=artifact_name
448
- )
449
- except StepContextError as e:
450
- raise ValueError(e)
467
+ raise ValueError(
468
+ "You need to call `log_artifact_metadata` either within a step "
469
+ "(potentially with an artifact name) or outside of a step with an "
470
+ "artifact name (and/or version)."
471
+ )
451
472
 
452
473
 
453
474
  # -----------------
@@ -689,7 +710,11 @@ def _link_artifact_version_to_the_step_and_model(
689
710
  client.zen_store.update_run_step(
690
711
  step_run_id=step_run.id,
691
712
  step_run_update=StepRunUpdate(
692
- outputs={artifact_version.artifact.name: artifact_version.id}
713
+ outputs={
714
+ artifact_version.artifact.name: [
715
+ artifact_version.id,
716
+ ]
717
+ }
693
718
  ),
694
719
  )
695
720
  error_message = "model"
zenml/cli/__init__.py CHANGED
@@ -2410,6 +2410,21 @@ stack, use the `--stack` option.
2410
2410
  zenml pipeline run <PIPELINE_SOURCE_PATH> --stack=<STACK_ID_OR_NAME>
2411
2411
  ```
2412
2412
 
2413
+ If you want to create a run template based on your pipeline that can later be used to trigger a run either from the dashboard or through an HTTP request:
2414
+
2415
+ ```bash
2416
+ zenml pipeline create-run-template <PIPELINE_SOURCE_PATH> \
2417
+ --name=<TEMPLATE_NAME>
2418
+
2419
+ To specify a config file, use the `--config/-c` option. If you would like to use a different stack than the active one, use the `--stack` option.
2420
+
2421
+ ```bash
2422
+ zenml pipeline create-run-template <PIPELINE_SOURCE_PATH> \
2423
+ --name=<TEMPLATE_NAME> \
2424
+ --config=<PATH_TO_CONFIG_YAML> \
2425
+ --stack=<STACK_ID_OR_NAME>
2426
+ ```
2427
+
2413
2428
  Tagging your resources with ZenML
2414
2429
  ---------------------------------
2415
2430
 
zenml/cli/base.py CHANGED
@@ -79,19 +79,19 @@ class ZenMLProjectTemplateLocation(BaseModel):
79
79
  ZENML_PROJECT_TEMPLATES = dict(
80
80
  e2e_batch=ZenMLProjectTemplateLocation(
81
81
  github_url="zenml-io/template-e2e-batch",
82
- github_tag="2024.11.20", # Make sure it is aligned with .github/workflows/update-templates-to-examples.yml
82
+ github_tag="2024.11.28", # Make sure it is aligned with .github/workflows/update-templates-to-examples.yml
83
83
  ),
84
84
  starter=ZenMLProjectTemplateLocation(
85
85
  github_url="zenml-io/template-starter",
86
- github_tag="2024.10.30", # Make sure it is aligned with .github/workflows/update-templates-to-examples.yml
86
+ github_tag="2024.11.28", # Make sure it is aligned with .github/workflows/update-templates-to-examples.yml
87
87
  ),
88
88
  nlp=ZenMLProjectTemplateLocation(
89
89
  github_url="zenml-io/template-nlp",
90
- github_tag="2024.10.30", # Make sure it is aligned with .github/workflows/update-templates-to-examples.yml
90
+ github_tag="2024.11.28", # Make sure it is aligned with .github/workflows/update-templates-to-examples.yml
91
91
  ),
92
92
  llm_finetuning=ZenMLProjectTemplateLocation(
93
93
  github_url="zenml-io/template-llm-finetuning",
94
- github_tag="2024.11.08", # Make sure it is aligned with .github/workflows/update-templates-to-examples.yml
94
+ github_tag="2024.11.28", # Make sure it is aligned with .github/workflows/update-templates-to-examples.yml
95
95
  ),
96
96
  )
97
97
 
zenml/cli/pipeline.py CHANGED
@@ -315,6 +315,86 @@ def run_pipeline(
315
315
  pipeline_instance()
316
316
 
317
317
 
318
+ @pipeline.command(
319
+ "create-run-template",
320
+ help="Create a run template for a pipeline. The SOURCE argument needs to "
321
+ "be an importable source path resolving to a ZenML pipeline instance, e.g. "
322
+ "`my_module.my_pipeline_instance`.",
323
+ )
324
+ @click.argument("source")
325
+ @click.option(
326
+ "--name",
327
+ "-n",
328
+ type=str,
329
+ required=True,
330
+ help="Name for the template",
331
+ )
332
+ @click.option(
333
+ "--config",
334
+ "-c",
335
+ "config_path",
336
+ type=click.Path(exists=True, dir_okay=False),
337
+ required=False,
338
+ help="Path to configuration file for the build.",
339
+ )
340
+ @click.option(
341
+ "--stack",
342
+ "-s",
343
+ "stack_name_or_id",
344
+ type=str,
345
+ required=False,
346
+ help="Name or ID of the stack to use for the build.",
347
+ )
348
+ def create_run_template(
349
+ source: str,
350
+ name: str,
351
+ config_path: Optional[str] = None,
352
+ stack_name_or_id: Optional[str] = None,
353
+ ) -> None:
354
+ """Create a run template for a pipeline.
355
+
356
+ Args:
357
+ source: Importable source resolving to a pipeline instance.
358
+ name: Name of the run template.
359
+ config_path: Path to pipeline configuration file.
360
+ stack_name_or_id: Name or ID of the stack for which the template should
361
+ be created.
362
+ """
363
+ if not Client().root:
364
+ cli_utils.warning(
365
+ "You're running the `zenml pipeline create-run-template` command "
366
+ "without a ZenML repository. Your current working directory will "
367
+ "be used as the source root relative to which the registered step "
368
+ "classes will be resolved. To silence this warning, run `zenml "
369
+ "init` at your source code root."
370
+ )
371
+
372
+ try:
373
+ pipeline_instance = source_utils.load(source)
374
+ except ModuleNotFoundError as e:
375
+ source_root = source_utils.get_source_root()
376
+ cli_utils.error(
377
+ f"Unable to import module `{e.name}`. Make sure the source path is "
378
+ f"relative to your source root `{source_root}`."
379
+ )
380
+ except AttributeError as e:
381
+ cli_utils.error("Unable to load attribute from module: " + str(e))
382
+
383
+ if not isinstance(pipeline_instance, Pipeline):
384
+ cli_utils.error(
385
+ f"The given source path `{source}` does not resolve to a pipeline "
386
+ "object."
387
+ )
388
+
389
+ with cli_utils.temporary_active_stack(stack_name_or_id=stack_name_or_id):
390
+ pipeline_instance = pipeline_instance.with_options(
391
+ config_path=config_path
392
+ )
393
+ template = pipeline_instance.create_run_template(name=name)
394
+
395
+ cli_utils.declare(f"Created run template `{template.id}`.")
396
+
397
+
318
398
  @pipeline.command("list", help="List all registered pipelines.")
319
399
  @list_options(PipelineFilter)
320
400
  def list_pipelines(**kwargs: Any) -> None:
zenml/cli/server.py CHANGED
@@ -469,7 +469,7 @@ def logs(
469
469
  if server is None:
470
470
  cli_utils.error(
471
471
  "The local ZenML dashboard is not running. Please call `zenml "
472
- "up` first to start the ZenML dashboard locally."
472
+ "login --local` first to start the ZenML dashboard locally."
473
473
  )
474
474
 
475
475
  from zenml.zen_server.deploy.deployer import LocalServerDeployer
zenml/cli/stack.py CHANGED
@@ -1129,14 +1129,12 @@ def export_stack(
1129
1129
  def _import_stack_component(
1130
1130
  component_type: StackComponentType,
1131
1131
  component_dict: Dict[str, Any],
1132
- component_spec_path: Optional[str] = None,
1133
1132
  ) -> UUID:
1134
1133
  """Import a single stack component with given type/config.
1135
1134
 
1136
1135
  Args:
1137
1136
  component_type: The type of component to import.
1138
1137
  component_dict: Dict representation of the component to import.
1139
- component_spec_path: Path to the component spec file.
1140
1138
 
1141
1139
  Returns:
1142
1140
  The ID of the imported component.
@@ -1172,7 +1170,6 @@ def _import_stack_component(
1172
1170
  component_type=component_type,
1173
1171
  flavor=flavor,
1174
1172
  configuration=config,
1175
- component_spec_path=component_spec_path,
1176
1173
  )
1177
1174
  return component.id
1178
1175
 
@@ -573,7 +573,6 @@ def generate_stack_component_copy_command(
573
573
  component_type=component_to_copy.type,
574
574
  configuration=component_to_copy.configuration,
575
575
  labels=component_to_copy.labels,
576
- component_spec_path=component_to_copy.component_spec_path,
577
576
  )
578
577
  print_model_url(get_component_url(copied_component))
579
578
 
zenml/cli/utils.py CHANGED
@@ -711,11 +711,6 @@ def print_stack_component_configuration(
711
711
 
712
712
  console.print(rich_table)
713
713
 
714
- if component.component_spec_path:
715
- declare(
716
- f"Component spec path for `mlstacks`: {component.component_spec_path}"
717
- )
718
-
719
714
 
720
715
  def expand_argument_value_from_file(name: str, value: str) -> str:
721
716
  """Expands the value of an argument pointing to a file into the contents of that file.
zenml/client.py CHANGED
@@ -60,7 +60,6 @@ from zenml.constants import (
60
60
  from zenml.enums import (
61
61
  ArtifactType,
62
62
  LogicalOperators,
63
- MetadataResourceTypes,
64
63
  ModelStages,
65
64
  OAuthDeviceStatus,
66
65
  PluginSubType,
@@ -137,6 +136,7 @@ from zenml.models import (
137
136
  PipelineRunFilter,
138
137
  PipelineRunResponse,
139
138
  RunMetadataRequest,
139
+ RunMetadataResource,
140
140
  RunTemplateFilter,
141
141
  RunTemplateRequest,
142
142
  RunTemplateResponse,
@@ -1979,7 +1979,6 @@ class Client(metaclass=ClientMetaClass):
1979
1979
  flavor: str,
1980
1980
  component_type: StackComponentType,
1981
1981
  configuration: Dict[str, str],
1982
- component_spec_path: Optional[str] = None,
1983
1982
  labels: Optional[Dict[str, Any]] = None,
1984
1983
  ) -> "ComponentResponse":
1985
1984
  """Registers a stack component.
@@ -1987,7 +1986,6 @@ class Client(metaclass=ClientMetaClass):
1987
1986
  Args:
1988
1987
  name: The name of the stack component.
1989
1988
  flavor: The flavor of the stack component.
1990
- component_spec_path: The path to the stack spec file.
1991
1989
  component_type: The type of the stack component.
1992
1990
  configuration: The configuration of the stack component.
1993
1991
  labels: The labels of the stack component.
@@ -2016,7 +2014,6 @@ class Client(metaclass=ClientMetaClass):
2016
2014
  name=name,
2017
2015
  type=component_type,
2018
2016
  flavor=flavor,
2019
- component_spec_path=component_spec_path,
2020
2017
  configuration=configuration,
2021
2018
  user=self.active_user.id,
2022
2019
  workspace=self.active_workspace.id,
@@ -2033,7 +2030,6 @@ class Client(metaclass=ClientMetaClass):
2033
2030
  name_id_or_prefix: Optional[Union[UUID, str]],
2034
2031
  component_type: StackComponentType,
2035
2032
  name: Optional[str] = None,
2036
- component_spec_path: Optional[str] = None,
2037
2033
  configuration: Optional[Dict[str, Any]] = None,
2038
2034
  labels: Optional[Dict[str, Any]] = None,
2039
2035
  disconnect: Optional[bool] = None,
@@ -2047,7 +2043,6 @@ class Client(metaclass=ClientMetaClass):
2047
2043
  update.
2048
2044
  component_type: The type of the stack component to update.
2049
2045
  name: The new name of the stack component.
2050
- component_spec_path: The new path to the stack spec file.
2051
2046
  configuration: The new configuration of the stack component.
2052
2047
  labels: The new labels of the stack component.
2053
2048
  disconnect: Whether to disconnect the stack component from its
@@ -2072,7 +2067,6 @@ class Client(metaclass=ClientMetaClass):
2072
2067
  update_model = ComponentUpdate(
2073
2068
  workspace=self.active_workspace.id,
2074
2069
  user=self.active_user.id,
2075
- component_spec_path=component_spec_path,
2076
2070
  )
2077
2071
 
2078
2072
  if name is not None:
@@ -4438,23 +4432,20 @@ class Client(metaclass=ClientMetaClass):
4438
4432
  def create_run_metadata(
4439
4433
  self,
4440
4434
  metadata: Dict[str, "MetadataType"],
4441
- resource_id: UUID,
4442
- resource_type: MetadataResourceTypes,
4435
+ resources: List[RunMetadataResource],
4443
4436
  stack_component_id: Optional[UUID] = None,
4437
+ publisher_step_id: Optional[UUID] = None,
4444
4438
  ) -> None:
4445
4439
  """Create run metadata.
4446
4440
 
4447
4441
  Args:
4448
4442
  metadata: The metadata to create as a dictionary of key-value pairs.
4449
- resource_id: The ID of the resource for which the
4450
- metadata was produced.
4451
- resource_type: The type of the resource for which the
4443
+ resources: The list of IDs and types of the resources for that the
4452
4444
  metadata was produced.
4453
4445
  stack_component_id: The ID of the stack component that produced
4454
4446
  the metadata.
4455
-
4456
- Returns:
4457
- None
4447
+ publisher_step_id: The ID of the step execution that publishes
4448
+ this metadata automatically.
4458
4449
  """
4459
4450
  from zenml.metadata.metadata_types import get_metadata_type
4460
4451
 
@@ -4483,14 +4474,13 @@ class Client(metaclass=ClientMetaClass):
4483
4474
  run_metadata = RunMetadataRequest(
4484
4475
  workspace=self.active_workspace.id,
4485
4476
  user=self.active_user.id,
4486
- resource_id=resource_id,
4487
- resource_type=resource_type,
4477
+ resources=resources,
4488
4478
  stack_component_id=stack_component_id,
4479
+ publisher_step_id=publisher_step_id,
4489
4480
  values=values,
4490
4481
  types=types,
4491
4482
  )
4492
4483
  self.zen_store.create_run_metadata(run_metadata)
4493
- return None
4494
4484
 
4495
4485
  # -------------------------------- Secrets ---------------------------------
4496
4486
 
zenml/config/compiler.py CHANGED
@@ -99,7 +99,10 @@ class Compiler:
99
99
 
100
100
  self._apply_stack_default_settings(pipeline=pipeline, stack=stack)
101
101
  if run_configuration.run_name:
102
- self._verify_run_name(run_configuration.run_name)
102
+ self._verify_run_name(
103
+ run_configuration.run_name,
104
+ pipeline.configuration.substitutions,
105
+ )
103
106
 
104
107
  pipeline_settings = self._filter_and_validate_settings(
105
108
  settings=pipeline.configuration.settings,
@@ -305,16 +308,22 @@ class Compiler:
305
308
  return default_settings
306
309
 
307
310
  @staticmethod
308
- def _verify_run_name(run_name: str) -> None:
311
+ def _verify_run_name(
312
+ run_name: str,
313
+ substitutions: Dict[str, str],
314
+ ) -> None:
309
315
  """Verifies that the run name contains only valid placeholders.
310
316
 
311
317
  Args:
312
318
  run_name: The run name to verify.
319
+ substitutions: The substitutions to be used in the run name.
313
320
 
314
321
  Raises:
315
322
  ValueError: If the run name contains invalid placeholders.
316
323
  """
317
- valid_placeholder_names = {"date", "time"}
324
+ valid_placeholder_names = {"date", "time"}.union(
325
+ set(substitutions.keys())
326
+ )
318
327
  placeholders = {
319
328
  v[1] for v in string.Formatter().parse(run_name) if v[1]
320
329
  }
@@ -13,6 +13,7 @@
13
13
  # permissions and limitations under the License.
14
14
  """Pipeline configuration classes."""
15
15
 
16
+ from datetime import datetime
16
17
  from typing import TYPE_CHECKING, Any, Dict, List, Optional
17
18
 
18
19
  from pydantic import SerializeAsAny, field_validator
@@ -46,6 +47,25 @@ class PipelineConfigurationUpdate(StrictBaseModel):
46
47
  model: Optional[Model] = None
47
48
  parameters: Optional[Dict[str, Any]] = None
48
49
  retry: Optional[StepRetryConfig] = None
50
+ substitutions: Dict[str, str] = {}
51
+
52
+ def _get_full_substitutions(
53
+ self, start_time: Optional[datetime]
54
+ ) -> Dict[str, str]:
55
+ """Returns the full substitutions dict.
56
+
57
+ Args:
58
+ start_time: Start time of the pipeline run.
59
+
60
+ Returns:
61
+ The full substitutions dict including date and time.
62
+ """
63
+ if start_time is None:
64
+ start_time = datetime.utcnow()
65
+ ret = self.substitutions.copy()
66
+ ret.setdefault("date", start_time.strftime("%Y_%m_%d"))
67
+ ret.setdefault("time", start_time.strftime("%H_%M_%S_%f"))
68
+ return ret
49
69
 
50
70
 
51
71
  class PipelineConfiguration(PipelineConfigurationUpdate):
@@ -52,3 +52,4 @@ class PipelineRunConfiguration(
52
52
  retry: Optional[StepRetryConfig] = None
53
53
  failure_hook_source: Optional[SourceWithValidator] = None
54
54
  success_hook_source: Optional[SourceWithValidator] = None
55
+ substitutions: Dict[str, str] = {}
@@ -13,6 +13,7 @@
13
13
  # permissions and limitations under the License.
14
14
  """Pipeline configuration classes."""
15
15
 
16
+ from datetime import datetime
16
17
  from typing import (
17
18
  TYPE_CHECKING,
18
19
  Any,
@@ -49,6 +50,7 @@ from zenml.utils.pydantic_utils import before_validator_handler
49
50
 
50
51
  if TYPE_CHECKING:
51
52
  from zenml.config import DockerSettings, ResourceSettings
53
+ from zenml.config.pipeline_configurations import PipelineConfiguration
52
54
 
53
55
  logger = get_logger(__name__)
54
56
 
@@ -152,6 +154,7 @@ class StepConfigurationUpdate(StrictBaseModel):
152
154
  success_hook_source: Optional[SourceWithValidator] = None
153
155
  model: Optional[Model] = None
154
156
  retry: Optional[StepRetryConfig] = None
157
+ substitutions: Dict[str, str] = {}
155
158
 
156
159
  outputs: Mapping[str, PartialArtifactConfiguration] = {}
157
160
 
@@ -237,6 +240,24 @@ class StepConfiguration(PartialStepConfiguration):
237
240
  model_or_dict = model_or_dict.model_dump()
238
241
  return DockerSettings.model_validate(model_or_dict)
239
242
 
243
+ def _get_full_substitutions(
244
+ self,
245
+ pipeline_config: "PipelineConfiguration",
246
+ start_time: Optional[datetime],
247
+ ) -> Dict[str, str]:
248
+ """Get the full set of substitutions for this step configuration.
249
+
250
+ Args:
251
+ pipeline_config: The pipeline configuration.
252
+ start_time: The start time of the pipeline run.
253
+
254
+ Returns:
255
+ The full set of substitutions for this step configuration.
256
+ """
257
+ ret = pipeline_config._get_full_substitutions(start_time)
258
+ ret.update(self.substitutions)
259
+ return ret
260
+
240
261
 
241
262
  class InputSpec(StrictBaseModel):
242
263
  """Step input specification."""
zenml/enums.py CHANGED
@@ -60,6 +60,7 @@ class VisualizationType(StrEnum):
60
60
  HTML = "html"
61
61
  IMAGE = "image"
62
62
  MARKDOWN = "markdown"
63
+ JSON = "json"
63
64
 
64
65
 
65
66
  class ZenMLServiceType(StrEnum):
@@ -31,7 +31,7 @@ class FeastIntegration(Integration):
31
31
 
32
32
  NAME = FEAST
33
33
  # click is added to keep the feast click version in sync with ZenML's click
34
- REQUIREMENTS = ["feast", "click>=8.0.1,<8.1.4"]
34
+ REQUIREMENTS = ["feast>=0.12.0", "click>=8.0.1,<8.1.4"]
35
35
  REQUIREMENTS_IGNORED_ON_UNINSTALL = ["click", "pandas"]
36
36
 
37
37
  @classmethod
@@ -16,7 +16,7 @@
16
16
  from typing import Any, Dict, List, Union, cast
17
17
 
18
18
  import pandas as pd
19
- from feast import FeatureStore # type: ignore
19
+ from feast import FeatureService, FeatureStore # type: ignore
20
20
  from feast.infra.registry.base_registry import BaseRegistry # type: ignore
21
21
 
22
22
  from zenml.feature_stores.base_feature_store import BaseFeatureStore
@@ -43,14 +43,14 @@ class FeastFeatureStore(BaseFeatureStore):
43
43
  def get_historical_features(
44
44
  self,
45
45
  entity_df: Union[pd.DataFrame, str],
46
- features: List[str],
46
+ features: Union[List[str], FeatureService],
47
47
  full_feature_names: bool = False,
48
48
  ) -> pd.DataFrame:
49
49
  """Returns the historical features for training or batch scoring.
50
50
 
51
51
  Args:
52
52
  entity_df: The entity DataFrame or entity name.
53
- features: The features to retrieve.
53
+ features: The features to retrieve or a FeatureService.
54
54
  full_feature_names: Whether to return the full feature names.
55
55
 
56
56
  Raise:
@@ -70,14 +70,14 @@ class FeastFeatureStore(BaseFeatureStore):
70
70
  def get_online_features(
71
71
  self,
72
72
  entity_rows: List[Dict[str, Any]],
73
- features: List[str],
73
+ features: Union[List[str], FeatureService],
74
74
  full_feature_names: bool = False,
75
75
  ) -> Dict[str, Any]:
76
76
  """Returns the latest online feature data.
77
77
 
78
78
  Args:
79
79
  entity_rows: The entity rows to retrieve.
80
- features: The features to retrieve.
80
+ features: The features to retrieve or a FeatureService.
81
81
  full_feature_names: Whether to return the full feature names.
82
82
 
83
83
  Raise:
@@ -118,17 +118,21 @@ class FeastFeatureStore(BaseFeatureStore):
118
118
  fs = FeatureStore(repo_path=self.config.feast_repo)
119
119
  return [ds.name for ds in fs.list_entities()]
120
120
 
121
- def get_feature_services(self) -> List[str]:
122
- """Returns the feature service names.
121
+ def get_feature_services(self) -> List[FeatureService]:
122
+ """Returns the feature services.
123
123
 
124
124
  Raise:
125
125
  ConnectionError: If the online component (Redis) is not available.
126
126
 
127
127
  Returns:
128
- The feature service names.
128
+ The feature services.
129
129
  """
130
130
  fs = FeatureStore(repo_path=self.config.feast_repo)
131
- return [ds.name for ds in fs.list_feature_services()]
131
+ feature_services: List[FeatureService] = list(
132
+ fs.list_feature_services()
133
+ )
134
+
135
+ return feature_services
132
136
 
133
137
  def get_feature_views(self) -> List[str]:
134
138
  """Returns the feature view names.