ob-metaflow-stubs 6.0.3.116__py2.py3-none-any.whl → 6.0.3.117__py2.py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (140) hide show
  1. metaflow-stubs/__init__.pyi +625 -622
  2. metaflow-stubs/cards.pyi +6 -6
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +3 -3
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +3 -3
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +94 -58
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +3 -3
  21. metaflow-stubs/plugins/__init__.pyi +3 -3
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  27. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  28. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  30. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  33. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +6 -6
  34. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +8 -8
  35. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  36. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  43. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  45. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  52. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  54. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  55. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  57. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  58. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  59. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  61. metaflow-stubs/plugins/cards/card_client.pyi +4 -4
  62. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  69. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  73. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  74. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  75. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  76. metaflow-stubs/plugins/datatools/__init__.pyi +4 -4
  77. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  78. metaflow-stubs/plugins/datatools/s3/__init__.pyi +4 -4
  79. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  80. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  82. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  83. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  84. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  85. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  86. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  87. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  88. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  90. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  94. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  95. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  96. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  97. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  98. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  100. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  101. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  102. metaflow-stubs/plugins/package_cli.pyi +2 -2
  103. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/perimeters.pyi +2 -2
  105. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  109. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  112. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  116. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  118. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  121. metaflow-stubs/procpoll.pyi +2 -2
  122. metaflow-stubs/profilers/__init__.pyi +2 -2
  123. metaflow-stubs/pylint_wrapper.pyi +2 -2
  124. metaflow-stubs/runner/__init__.pyi +2 -2
  125. metaflow-stubs/runner/deployer.pyi +3 -3
  126. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  127. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  128. metaflow-stubs/runner/nbrun.pyi +2 -2
  129. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  130. metaflow-stubs/runner/utils.pyi +2 -2
  131. metaflow-stubs/system/__init__.pyi +3 -3
  132. metaflow-stubs/system/system_logger.pyi +3 -3
  133. metaflow-stubs/system/system_monitor.pyi +2 -2
  134. metaflow-stubs/tagging_util.pyi +2 -2
  135. metaflow-stubs/tuple_util.pyi +2 -2
  136. {ob_metaflow_stubs-6.0.3.116.dist-info → ob_metaflow_stubs-6.0.3.117.dist-info}/METADATA +1 -1
  137. ob_metaflow_stubs-6.0.3.117.dist-info/RECORD +140 -0
  138. {ob_metaflow_stubs-6.0.3.116.dist-info → ob_metaflow_stubs-6.0.3.117.dist-info}/WHEEL +1 -1
  139. ob_metaflow_stubs-6.0.3.116.dist-info/RECORD +0 -140
  140. {ob_metaflow_stubs-6.0.3.116.dist-info → ob_metaflow_stubs-6.0.3.117.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.27.1+obcheckpoint(0.1.1);ob(v1) #
4
- # Generated on 2024-11-04T20:32:12.972782 #
3
+ # MF version: 2.12.27.1+obcheckpoint(0.1.2);ob(v1) #
4
+ # Generated on 2024-11-13T19:05:29.340728 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.datastore.inputs
12
- import typing
13
11
  import metaflow.flowspec
14
- import metaflow.metaflow_current
15
- import metaflow.runner.metaflow_runner
16
12
  import datetime
17
- import metaflow.client.core
18
13
  import metaflow_extensions.obcheckpoint.plugins.machine_learning_utilities.datastructures
19
- import metaflow.events
14
+ import metaflow.client.core
20
15
  import metaflow.parameters
21
16
  import metaflow._vendor.click.types
17
+ import metaflow.events
18
+ import metaflow.metaflow_current
19
+ import metaflow.runner.metaflow_runner
20
+ import metaflow.datastore.inputs
21
+ import typing
22
22
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
23
23
  StepFlag = typing.NewType("StepFlag", bool)
24
24
 
@@ -513,55 +513,153 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
513
513
  ...
514
514
 
515
515
  @typing.overload
516
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
516
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
517
517
  """
518
- Specifies the number of times the task corresponding
519
- to a step needs to be retried.
518
+ Enables loading / saving of models within a step.
520
519
 
521
- This decorator is useful for handling transient errors, such as networking issues.
522
- If your task contains operations that can't be retried safely, e.g. database updates,
523
- it is advisable to annotate it with `@retry(times=0)`.
524
520
 
525
- This can be used in conjunction with the `@catch` decorator. The `@catch`
526
- decorator will execute a no-op task after all retries have been exhausted,
527
- ensuring that the flow execution can continue.
521
+ Parameters
522
+ ----------
523
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
524
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
525
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
526
+ - `current.checkpoint`
527
+ - `current.model`
528
+ - `current.huggingface_hub`
529
+
530
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
531
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
532
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
533
+
534
+ temp_dir_root : str, default: None
535
+ The root directory under which `current.model.loaded` will store loaded models
536
+
537
+
538
+
539
+ """
540
+ ...
541
+
542
+ @typing.overload
543
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
544
+ ...
545
+
546
+ @typing.overload
547
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
548
+ ...
549
+
550
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
551
+ """
552
+ Enables loading / saving of models within a step.
553
+
528
554
 
529
555
  Parameters
530
556
  ----------
531
- times : int, default 3
532
- Number of times to retry this task.
533
- minutes_between_retries : int, default 2
534
- Number of minutes between retries.
557
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
558
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
559
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
560
+ - `current.checkpoint`
561
+ - `current.model`
562
+ - `current.huggingface_hub`
563
+
564
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
565
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
566
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
567
+
568
+ temp_dir_root : str, default: None
569
+ The root directory under which `current.model.loaded` will store loaded models
570
+
571
+
572
+
535
573
  """
536
574
  ...
537
575
 
538
576
  @typing.overload
539
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
577
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
578
+ """
579
+ Specifies a timeout for your step.
580
+
581
+ This decorator is useful if this step may hang indefinitely.
582
+
583
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
584
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
585
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
586
+
587
+ Note that all the values specified in parameters are added together so if you specify
588
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
589
+
590
+ Parameters
591
+ ----------
592
+ seconds : int, default 0
593
+ Number of seconds to wait prior to timing out.
594
+ minutes : int, default 0
595
+ Number of minutes to wait prior to timing out.
596
+ hours : int, default 0
597
+ Number of hours to wait prior to timing out.
598
+ """
540
599
  ...
541
600
 
542
601
  @typing.overload
543
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
602
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
544
603
  ...
545
604
 
546
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
605
+ @typing.overload
606
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
607
+ ...
608
+
609
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
547
610
  """
548
- Specifies the number of times the task corresponding
549
- to a step needs to be retried.
611
+ Specifies a timeout for your step.
550
612
 
551
- This decorator is useful for handling transient errors, such as networking issues.
552
- If your task contains operations that can't be retried safely, e.g. database updates,
553
- it is advisable to annotate it with `@retry(times=0)`.
613
+ This decorator is useful if this step may hang indefinitely.
554
614
 
555
- This can be used in conjunction with the `@catch` decorator. The `@catch`
556
- decorator will execute a no-op task after all retries have been exhausted,
557
- ensuring that the flow execution can continue.
615
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
616
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
617
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
618
+
619
+ Note that all the values specified in parameters are added together so if you specify
620
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
558
621
 
559
622
  Parameters
560
623
  ----------
561
- times : int, default 3
562
- Number of times to retry this task.
563
- minutes_between_retries : int, default 2
564
- Number of minutes between retries.
624
+ seconds : int, default 0
625
+ Number of seconds to wait prior to timing out.
626
+ minutes : int, default 0
627
+ Number of minutes to wait prior to timing out.
628
+ hours : int, default 0
629
+ Number of hours to wait prior to timing out.
630
+ """
631
+ ...
632
+
633
+ @typing.overload
634
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
635
+ """
636
+ Specifies secrets to be retrieved and injected as environment variables prior to
637
+ the execution of a step.
638
+
639
+ Parameters
640
+ ----------
641
+ sources : List[Union[str, Dict[str, Any]]], default: []
642
+ List of secret specs, defining how the secrets are to be retrieved
643
+ """
644
+ ...
645
+
646
+ @typing.overload
647
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
648
+ ...
649
+
650
+ @typing.overload
651
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
652
+ ...
653
+
654
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
655
+ """
656
+ Specifies secrets to be retrieved and injected as environment variables prior to
657
+ the execution of a step.
658
+
659
+ Parameters
660
+ ----------
661
+ sources : List[Union[str, Dict[str, Any]]], default: []
662
+ List of secret specs, defining how the secrets are to be retrieved
565
663
  """
566
664
  ...
567
665
 
@@ -617,136 +715,74 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
617
715
  ...
618
716
 
619
717
  @typing.overload
620
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
718
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
621
719
  """
622
- Specifies the Conda environment for the step.
720
+ Specifies the number of times the task corresponding
721
+ to a step needs to be retried.
623
722
 
624
- Information in this decorator will augment any
625
- attributes set in the `@conda_base` flow-level decorator. Hence,
626
- you can use `@conda_base` to set packages required by all
627
- steps and use `@conda` to specify step-specific overrides.
723
+ This decorator is useful for handling transient errors, such as networking issues.
724
+ If your task contains operations that can't be retried safely, e.g. database updates,
725
+ it is advisable to annotate it with `@retry(times=0)`.
726
+
727
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
728
+ decorator will execute a no-op task after all retries have been exhausted,
729
+ ensuring that the flow execution can continue.
628
730
 
629
731
  Parameters
630
732
  ----------
631
- packages : Dict[str, str], default {}
632
- Packages to use for this step. The key is the name of the package
633
- and the value is the version to use.
634
- libraries : Dict[str, str], default {}
635
- Supported for backward compatibility. When used with packages, packages will take precedence.
636
- python : str, optional, default None
637
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
638
- that the version used will correspond to the version of the Python interpreter used to start the run.
639
- disabled : bool, default False
640
- If set to True, disables @conda.
733
+ times : int, default 3
734
+ Number of times to retry this task.
735
+ minutes_between_retries : int, default 2
736
+ Number of minutes between retries.
641
737
  """
642
738
  ...
643
739
 
644
740
  @typing.overload
645
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
741
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
646
742
  ...
647
743
 
648
744
  @typing.overload
649
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
745
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
650
746
  ...
651
747
 
652
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
748
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
653
749
  """
654
- Specifies the Conda environment for the step.
750
+ Specifies the number of times the task corresponding
751
+ to a step needs to be retried.
655
752
 
656
- Information in this decorator will augment any
657
- attributes set in the `@conda_base` flow-level decorator. Hence,
658
- you can use `@conda_base` to set packages required by all
659
- steps and use `@conda` to specify step-specific overrides.
753
+ This decorator is useful for handling transient errors, such as networking issues.
754
+ If your task contains operations that can't be retried safely, e.g. database updates,
755
+ it is advisable to annotate it with `@retry(times=0)`.
756
+
757
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
758
+ decorator will execute a no-op task after all retries have been exhausted,
759
+ ensuring that the flow execution can continue.
660
760
 
661
761
  Parameters
662
762
  ----------
663
- packages : Dict[str, str], default {}
664
- Packages to use for this step. The key is the name of the package
665
- and the value is the version to use.
666
- libraries : Dict[str, str], default {}
667
- Supported for backward compatibility. When used with packages, packages will take precedence.
668
- python : str, optional, default None
669
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
670
- that the version used will correspond to the version of the Python interpreter used to start the run.
671
- disabled : bool, default False
672
- If set to True, disables @conda.
763
+ times : int, default 3
764
+ Number of times to retry this task.
765
+ minutes_between_retries : int, default 2
766
+ Number of minutes between retries.
673
767
  """
674
768
  ...
675
769
 
676
770
  @typing.overload
677
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
771
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
678
772
  """
679
- Specifies the resources needed when executing this step.
680
-
681
- Use `@resources` to specify the resource requirements
682
- independently of the specific compute layer (`@batch`, `@kubernetes`).
683
-
684
- You can choose the compute layer on the command line by executing e.g.
685
- ```
686
- python myflow.py run --with batch
687
- ```
688
- or
689
- ```
690
- python myflow.py run --with kubernetes
691
- ```
692
- which executes the flow on the desired system using the
693
- requirements specified in `@resources`.
694
-
695
- Parameters
696
- ----------
697
- cpu : int, default 1
698
- Number of CPUs required for this step.
699
- gpu : int, optional, default None
700
- Number of GPUs required for this step.
701
- disk : int, optional, default None
702
- Disk size (in MB) required for this step. Only applies on Kubernetes.
703
- memory : int, default 4096
704
- Memory size (in MB) required for this step.
705
- shared_memory : int, optional, default None
706
- The value for the size (in MiB) of the /dev/shm volume for this step.
707
- This parameter maps to the `--shm-size` option in Docker.
773
+ Decorator prototype for all step decorators. This function gets specialized
774
+ and imported for all decorators types by _import_plugin_decorators().
708
775
  """
709
776
  ...
710
777
 
711
778
  @typing.overload
712
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
713
- ...
714
-
715
- @typing.overload
716
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
779
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
717
780
  ...
718
781
 
719
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
782
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
720
783
  """
721
- Specifies the resources needed when executing this step.
722
-
723
- Use `@resources` to specify the resource requirements
724
- independently of the specific compute layer (`@batch`, `@kubernetes`).
725
-
726
- You can choose the compute layer on the command line by executing e.g.
727
- ```
728
- python myflow.py run --with batch
729
- ```
730
- or
731
- ```
732
- python myflow.py run --with kubernetes
733
- ```
734
- which executes the flow on the desired system using the
735
- requirements specified in `@resources`.
736
-
737
- Parameters
738
- ----------
739
- cpu : int, default 1
740
- Number of CPUs required for this step.
741
- gpu : int, optional, default None
742
- Number of GPUs required for this step.
743
- disk : int, optional, default None
744
- Disk size (in MB) required for this step. Only applies on Kubernetes.
745
- memory : int, default 4096
746
- Memory size (in MB) required for this step.
747
- shared_memory : int, optional, default None
748
- The value for the size (in MiB) of the /dev/shm volume for this step.
749
- This parameter maps to the `--shm-size` option in Docker.
784
+ Decorator prototype for all step decorators. This function gets specialized
785
+ and imported for all decorators types by _import_plugin_decorators().
750
786
  """
751
787
  ...
752
788
 
@@ -768,188 +804,244 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
768
804
  ...
769
805
 
770
806
  @typing.overload
771
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
807
+ def checkpoint(*, load_policy: str = "fresh", temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
772
808
  """
773
- Specifies a timeout for your step.
774
-
775
- This decorator is useful if this step may hang indefinitely.
776
-
777
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
778
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
779
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
809
+ Enables checkpointing for a step.
780
810
 
781
- Note that all the values specified in parameters are added together so if you specify
782
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
783
811
 
784
812
  Parameters
785
813
  ----------
786
- seconds : int, default 0
787
- Number of seconds to wait prior to timing out.
788
- minutes : int, default 0
789
- Number of minutes to wait prior to timing out.
790
- hours : int, default 0
791
- Number of hours to wait prior to timing out.
814
+ load_policy : str, default: "fresh"
815
+ The policy for loading the checkpoint. The following policies are supported:
816
+ - "eager": Loads the the latest available checkpoint within the namespace.
817
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
818
+ will be loaded at the start of the task.
819
+ - "none": Do not load any checkpoint
820
+ - "fresh": Loads the lastest checkpoint created within the running Task.
821
+ This mode helps loading checkpoints across various retry attempts of the same task.
822
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
823
+ created within the task will be loaded when the task is retries execution on failure.
824
+
825
+ temp_dir_root : str, default: None
826
+ The root directory under which `current.checkpoint.directory` will be created.
827
+
828
+
829
+
792
830
  """
793
831
  ...
794
832
 
795
833
  @typing.overload
796
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
834
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
797
835
  ...
798
836
 
799
837
  @typing.overload
800
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
838
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
801
839
  ...
802
840
 
803
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
841
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = "fresh", temp_dir_root: str = None):
804
842
  """
805
- Specifies a timeout for your step.
806
-
807
- This decorator is useful if this step may hang indefinitely.
808
-
809
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
810
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
811
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
843
+ Enables checkpointing for a step.
812
844
 
813
- Note that all the values specified in parameters are added together so if you specify
814
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
815
845
 
816
846
  Parameters
817
847
  ----------
818
- seconds : int, default 0
819
- Number of seconds to wait prior to timing out.
820
- minutes : int, default 0
821
- Number of minutes to wait prior to timing out.
822
- hours : int, default 0
823
- Number of hours to wait prior to timing out.
848
+ load_policy : str, default: "fresh"
849
+ The policy for loading the checkpoint. The following policies are supported:
850
+ - "eager": Loads the the latest available checkpoint within the namespace.
851
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
852
+ will be loaded at the start of the task.
853
+ - "none": Do not load any checkpoint
854
+ - "fresh": Loads the lastest checkpoint created within the running Task.
855
+ This mode helps loading checkpoints across various retry attempts of the same task.
856
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
857
+ created within the task will be loaded when the task is retries execution on failure.
858
+
859
+ temp_dir_root : str, default: None
860
+ The root directory under which `current.checkpoint.directory` will be created.
861
+
862
+
863
+
824
864
  """
825
865
  ...
826
866
 
827
867
  @typing.overload
828
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
868
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
829
869
  """
830
- Enables loading / saving of models within a step.
870
+ Specifies the Conda environment for the step.
831
871
 
872
+ Information in this decorator will augment any
873
+ attributes set in the `@conda_base` flow-level decorator. Hence,
874
+ you can use `@conda_base` to set packages required by all
875
+ steps and use `@conda` to specify step-specific overrides.
832
876
 
833
877
  Parameters
834
878
  ----------
835
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
836
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
837
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
838
- - `current.checkpoint`
839
- - `current.model`
840
- - `current.huggingface_hub`
841
-
842
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
843
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
844
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
845
-
846
- temp_dir_root : str, default: None
847
- The root directory under which `current.model.loaded` will store loaded models
848
-
849
-
850
-
879
+ packages : Dict[str, str], default {}
880
+ Packages to use for this step. The key is the name of the package
881
+ and the value is the version to use.
882
+ libraries : Dict[str, str], default {}
883
+ Supported for backward compatibility. When used with packages, packages will take precedence.
884
+ python : str, optional, default None
885
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
886
+ that the version used will correspond to the version of the Python interpreter used to start the run.
887
+ disabled : bool, default False
888
+ If set to True, disables @conda.
851
889
  """
852
890
  ...
853
891
 
854
892
  @typing.overload
855
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
893
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
856
894
  ...
857
895
 
858
896
  @typing.overload
859
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
897
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
860
898
  ...
861
899
 
862
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
900
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
863
901
  """
864
- Enables loading / saving of models within a step.
902
+ Specifies the Conda environment for the step.
865
903
 
904
+ Information in this decorator will augment any
905
+ attributes set in the `@conda_base` flow-level decorator. Hence,
906
+ you can use `@conda_base` to set packages required by all
907
+ steps and use `@conda` to specify step-specific overrides.
866
908
 
867
909
  Parameters
868
910
  ----------
869
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
870
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
871
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
872
- - `current.checkpoint`
873
- - `current.model`
874
- - `current.huggingface_hub`
875
-
876
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
877
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
878
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
911
+ packages : Dict[str, str], default {}
912
+ Packages to use for this step. The key is the name of the package
913
+ and the value is the version to use.
914
+ libraries : Dict[str, str], default {}
915
+ Supported for backward compatibility. When used with packages, packages will take precedence.
916
+ python : str, optional, default None
917
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
918
+ that the version used will correspond to the version of the Python interpreter used to start the run.
919
+ disabled : bool, default False
920
+ If set to True, disables @conda.
921
+ """
922
+ ...
923
+
924
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
925
+ """
926
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
879
927
 
880
- temp_dir_root : str, default: None
881
- The root directory under which `current.model.loaded` will store loaded models
928
+ Parameters
929
+ ----------
930
+ temp_dir_root : str, optional
931
+ The root directory that will hold the temporary directory where objects will be downloaded.
882
932
 
933
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
934
+ The list of models to load.
883
935
 
884
936
 
885
937
  """
886
938
  ...
887
939
 
888
940
  @typing.overload
889
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
941
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
890
942
  """
891
- Specifies that the step will success under all circumstances.
943
+ Specifies the resources needed when executing this step.
892
944
 
893
- The decorator will create an optional artifact, specified by `var`, which
894
- contains the exception raised. You can use it to detect the presence
895
- of errors, indicating that all happy-path artifacts produced by the step
896
- are missing.
945
+ Use `@resources` to specify the resource requirements
946
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
947
+
948
+ You can choose the compute layer on the command line by executing e.g.
949
+ ```
950
+ python myflow.py run --with batch
951
+ ```
952
+ or
953
+ ```
954
+ python myflow.py run --with kubernetes
955
+ ```
956
+ which executes the flow on the desired system using the
957
+ requirements specified in `@resources`.
897
958
 
898
959
  Parameters
899
960
  ----------
900
- var : str, optional, default None
901
- Name of the artifact in which to store the caught exception.
902
- If not specified, the exception is not stored.
903
- print_exception : bool, default True
904
- Determines whether or not the exception is printed to
905
- stdout when caught.
961
+ cpu : int, default 1
962
+ Number of CPUs required for this step.
963
+ gpu : int, optional, default None
964
+ Number of GPUs required for this step.
965
+ disk : int, optional, default None
966
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
967
+ memory : int, default 4096
968
+ Memory size (in MB) required for this step.
969
+ shared_memory : int, optional, default None
970
+ The value for the size (in MiB) of the /dev/shm volume for this step.
971
+ This parameter maps to the `--shm-size` option in Docker.
906
972
  """
907
973
  ...
908
974
 
909
975
  @typing.overload
910
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
976
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
911
977
  ...
912
978
 
913
979
  @typing.overload
914
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
980
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
915
981
  ...
916
982
 
917
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
983
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
918
984
  """
919
- Specifies that the step will success under all circumstances.
985
+ Specifies the resources needed when executing this step.
920
986
 
921
- The decorator will create an optional artifact, specified by `var`, which
922
- contains the exception raised. You can use it to detect the presence
923
- of errors, indicating that all happy-path artifacts produced by the step
924
- are missing.
987
+ Use `@resources` to specify the resource requirements
988
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
989
+
990
+ You can choose the compute layer on the command line by executing e.g.
991
+ ```
992
+ python myflow.py run --with batch
993
+ ```
994
+ or
995
+ ```
996
+ python myflow.py run --with kubernetes
997
+ ```
998
+ which executes the flow on the desired system using the
999
+ requirements specified in `@resources`.
925
1000
 
926
1001
  Parameters
927
1002
  ----------
928
- var : str, optional, default None
929
- Name of the artifact in which to store the caught exception.
930
- If not specified, the exception is not stored.
931
- print_exception : bool, default True
932
- Determines whether or not the exception is printed to
933
- stdout when caught.
1003
+ cpu : int, default 1
1004
+ Number of CPUs required for this step.
1005
+ gpu : int, optional, default None
1006
+ Number of GPUs required for this step.
1007
+ disk : int, optional, default None
1008
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1009
+ memory : int, default 4096
1010
+ Memory size (in MB) required for this step.
1011
+ shared_memory : int, optional, default None
1012
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1013
+ This parameter maps to the `--shm-size` option in Docker.
934
1014
  """
935
1015
  ...
936
1016
 
937
1017
  @typing.overload
938
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1018
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
939
1019
  """
940
- Decorator prototype for all step decorators. This function gets specialized
941
- and imported for all decorators types by _import_plugin_decorators().
1020
+ Specifies environment variables to be set prior to the execution of a step.
1021
+
1022
+ Parameters
1023
+ ----------
1024
+ vars : Dict[str, str], default {}
1025
+ Dictionary of environment variables to set.
942
1026
  """
943
1027
  ...
944
1028
 
945
1029
  @typing.overload
946
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1030
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
947
1031
  ...
948
1032
 
949
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1033
+ @typing.overload
1034
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1035
+ ...
1036
+
1037
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
950
1038
  """
951
- Decorator prototype for all step decorators. This function gets specialized
952
- and imported for all decorators types by _import_plugin_decorators().
1039
+ Specifies environment variables to be set prior to the execution of a step.
1040
+
1041
+ Parameters
1042
+ ----------
1043
+ vars : Dict[str, str], default {}
1044
+ Dictionary of environment variables to set.
953
1045
  """
954
1046
  ...
955
1047
 
@@ -1023,231 +1115,146 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1023
1115
  ...
1024
1116
 
1025
1117
  @typing.overload
1026
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1027
- """
1028
- Specifies the PyPI packages for the step.
1029
-
1030
- Information in this decorator will augment any
1031
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1032
- you can use `@pypi_base` to set packages required by all
1033
- steps and use `@pypi` to specify step-specific overrides.
1034
-
1035
- Parameters
1036
- ----------
1037
- packages : Dict[str, str], default: {}
1038
- Packages to use for this step. The key is the name of the package
1039
- and the value is the version to use.
1040
- python : str, optional, default: None
1041
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1042
- that the version used will correspond to the version of the Python interpreter used to start the run.
1043
- """
1044
- ...
1045
-
1046
- @typing.overload
1047
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1048
- ...
1049
-
1050
- @typing.overload
1051
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1052
- ...
1053
-
1054
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1055
- """
1056
- Specifies the PyPI packages for the step.
1057
-
1058
- Information in this decorator will augment any
1059
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1060
- you can use `@pypi_base` to set packages required by all
1061
- steps and use `@pypi` to specify step-specific overrides.
1062
-
1063
- Parameters
1064
- ----------
1065
- packages : Dict[str, str], default: {}
1066
- Packages to use for this step. The key is the name of the package
1067
- and the value is the version to use.
1068
- python : str, optional, default: None
1069
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1070
- that the version used will correspond to the version of the Python interpreter used to start the run.
1071
- """
1072
- ...
1073
-
1074
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1075
- """
1076
- Decorator that helps cache, version and store models/datasets from huggingface hub.
1077
-
1078
- Parameters
1079
- ----------
1080
- temp_dir_root : str, optional
1081
- The root directory that will hold the temporary directory where objects will be downloaded.
1082
-
1083
-
1084
- """
1085
- ...
1086
-
1087
- @typing.overload
1088
- def checkpoint(*, load_policy: str = "fresh", temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1118
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1089
1119
  """
1090
- Enables checkpointing for a step.
1120
+ Specifies that the step will success under all circumstances.
1091
1121
 
1122
+ The decorator will create an optional artifact, specified by `var`, which
1123
+ contains the exception raised. You can use it to detect the presence
1124
+ of errors, indicating that all happy-path artifacts produced by the step
1125
+ are missing.
1092
1126
 
1093
1127
  Parameters
1094
1128
  ----------
1095
- load_policy : str, default: "fresh"
1096
- The policy for loading the checkpoint. The following policies are supported:
1097
- - "eager": Loads the the latest available checkpoint within the namespace.
1098
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1099
- will be loaded at the start of the task.
1100
- - "none": Do not load any checkpoint
1101
- - "fresh": Loads the lastest checkpoint created within the running Task.
1102
- This mode helps loading checkpoints across various retry attempts of the same task.
1103
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1104
- created within the task will be loaded when the task is retries execution on failure.
1105
-
1106
- temp_dir_root : str, default: None
1107
- The root directory under which `current.checkpoint.directory` will be created.
1108
-
1109
-
1110
-
1129
+ var : str, optional, default None
1130
+ Name of the artifact in which to store the caught exception.
1131
+ If not specified, the exception is not stored.
1132
+ print_exception : bool, default True
1133
+ Determines whether or not the exception is printed to
1134
+ stdout when caught.
1111
1135
  """
1112
1136
  ...
1113
1137
 
1114
1138
  @typing.overload
1115
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1139
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1116
1140
  ...
1117
1141
 
1118
1142
  @typing.overload
1119
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1143
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1120
1144
  ...
1121
1145
 
1122
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = "fresh", temp_dir_root: str = None):
1146
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1123
1147
  """
1124
- Enables checkpointing for a step.
1125
-
1126
-
1127
- Parameters
1128
- ----------
1129
- load_policy : str, default: "fresh"
1130
- The policy for loading the checkpoint. The following policies are supported:
1131
- - "eager": Loads the the latest available checkpoint within the namespace.
1132
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1133
- will be loaded at the start of the task.
1134
- - "none": Do not load any checkpoint
1135
- - "fresh": Loads the lastest checkpoint created within the running Task.
1136
- This mode helps loading checkpoints across various retry attempts of the same task.
1137
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1138
- created within the task will be loaded when the task is retries execution on failure.
1139
-
1140
- temp_dir_root : str, default: None
1141
- The root directory under which `current.checkpoint.directory` will be created.
1142
-
1143
-
1148
+ Specifies that the step will success under all circumstances.
1144
1149
 
1145
- """
1146
- ...
1147
-
1148
- @typing.overload
1149
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1150
- """
1151
- Specifies secrets to be retrieved and injected as environment variables prior to
1152
- the execution of a step.
1150
+ The decorator will create an optional artifact, specified by `var`, which
1151
+ contains the exception raised. You can use it to detect the presence
1152
+ of errors, indicating that all happy-path artifacts produced by the step
1153
+ are missing.
1153
1154
 
1154
1155
  Parameters
1155
1156
  ----------
1156
- sources : List[Union[str, Dict[str, Any]]], default: []
1157
- List of secret specs, defining how the secrets are to be retrieved
1157
+ var : str, optional, default None
1158
+ Name of the artifact in which to store the caught exception.
1159
+ If not specified, the exception is not stored.
1160
+ print_exception : bool, default True
1161
+ Determines whether or not the exception is printed to
1162
+ stdout when caught.
1158
1163
  """
1159
1164
  ...
1160
1165
 
1161
1166
  @typing.overload
1162
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1163
- ...
1164
-
1165
- @typing.overload
1166
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1167
- ...
1168
-
1169
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1167
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1170
1168
  """
1171
- Specifies secrets to be retrieved and injected as environment variables prior to
1172
- the execution of a step.
1169
+ Specifies the PyPI packages for the step.
1173
1170
 
1174
- Parameters
1175
- ----------
1176
- sources : List[Union[str, Dict[str, Any]]], default: []
1177
- List of secret specs, defining how the secrets are to be retrieved
1178
- """
1179
- ...
1180
-
1181
- @typing.overload
1182
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1183
- """
1184
- Specifies environment variables to be set prior to the execution of a step.
1171
+ Information in this decorator will augment any
1172
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1173
+ you can use `@pypi_base` to set packages required by all
1174
+ steps and use `@pypi` to specify step-specific overrides.
1185
1175
 
1186
1176
  Parameters
1187
1177
  ----------
1188
- vars : Dict[str, str], default {}
1189
- Dictionary of environment variables to set.
1178
+ packages : Dict[str, str], default: {}
1179
+ Packages to use for this step. The key is the name of the package
1180
+ and the value is the version to use.
1181
+ python : str, optional, default: None
1182
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1183
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1190
1184
  """
1191
1185
  ...
1192
1186
 
1193
1187
  @typing.overload
1194
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1188
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1195
1189
  ...
1196
1190
 
1197
1191
  @typing.overload
1198
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1192
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1199
1193
  ...
1200
1194
 
1201
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1195
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1202
1196
  """
1203
- Specifies environment variables to be set prior to the execution of a step.
1197
+ Specifies the PyPI packages for the step.
1198
+
1199
+ Information in this decorator will augment any
1200
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1201
+ you can use `@pypi_base` to set packages required by all
1202
+ steps and use `@pypi` to specify step-specific overrides.
1204
1203
 
1205
1204
  Parameters
1206
1205
  ----------
1207
- vars : Dict[str, str], default {}
1208
- Dictionary of environment variables to set.
1206
+ packages : Dict[str, str], default: {}
1207
+ Packages to use for this step. The key is the name of the package
1208
+ and the value is the version to use.
1209
+ python : str, optional, default: None
1210
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1211
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1209
1212
  """
1210
1213
  ...
1211
1214
 
1212
1215
  @typing.overload
1213
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1216
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1214
1217
  """
1215
- Specifies the event(s) that this flow depends on.
1218
+ Specifies the flow(s) that this flow depends on.
1216
1219
 
1217
1220
  ```
1218
- @trigger(event='foo')
1221
+ @trigger_on_finish(flow='FooFlow')
1219
1222
  ```
1220
1223
  or
1221
1224
  ```
1222
- @trigger(events=['foo', 'bar'])
1225
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1223
1226
  ```
1227
+ This decorator respects the @project decorator and triggers the flow
1228
+ when upstream runs within the same namespace complete successfully
1224
1229
 
1225
- Additionally, you can specify the parameter mappings
1226
- to map event payload to Metaflow parameters for the flow.
1230
+ Additionally, you can specify project aware upstream flow dependencies
1231
+ by specifying the fully qualified project_flow_name.
1227
1232
  ```
1228
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1233
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1229
1234
  ```
1230
1235
  or
1231
1236
  ```
1232
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1233
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1237
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1234
1238
  ```
1235
1239
 
1236
- 'parameters' can also be a list of strings and tuples like so:
1237
- ```
1238
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1239
- ```
1240
- This is equivalent to:
1240
+ You can also specify just the project or project branch (other values will be
1241
+ inferred from the current project or project branch):
1241
1242
  ```
1242
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1243
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1243
1244
  ```
1244
1245
 
1246
+ Note that `branch` is typically one of:
1247
+ - `prod`
1248
+ - `user.bob`
1249
+ - `test.my_experiment`
1250
+ - `prod.staging`
1251
+
1245
1252
  Parameters
1246
1253
  ----------
1247
- event : Union[str, Dict[str, Any]], optional, default None
1248
- Event dependency for this flow.
1249
- events : List[Union[str, Dict[str, Any]]], default []
1250
- Events dependency for this flow.
1254
+ flow : Union[str, Dict[str, str]], optional, default None
1255
+ Upstream flow dependency for this flow.
1256
+ flows : List[Union[str, Dict[str, str]]], default []
1257
+ Upstream flow dependencies for this flow.
1251
1258
  options : Dict[str, Any], default {}
1252
1259
  Backend-specific configuration for tuning eventing behavior.
1253
1260
 
@@ -1256,47 +1263,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1256
1263
  ...
1257
1264
 
1258
1265
  @typing.overload
1259
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1266
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1260
1267
  ...
1261
1268
 
1262
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1269
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1263
1270
  """
1264
- Specifies the event(s) that this flow depends on.
1271
+ Specifies the flow(s) that this flow depends on.
1265
1272
 
1266
1273
  ```
1267
- @trigger(event='foo')
1274
+ @trigger_on_finish(flow='FooFlow')
1268
1275
  ```
1269
1276
  or
1270
1277
  ```
1271
- @trigger(events=['foo', 'bar'])
1278
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1272
1279
  ```
1280
+ This decorator respects the @project decorator and triggers the flow
1281
+ when upstream runs within the same namespace complete successfully
1273
1282
 
1274
- Additionally, you can specify the parameter mappings
1275
- to map event payload to Metaflow parameters for the flow.
1283
+ Additionally, you can specify project aware upstream flow dependencies
1284
+ by specifying the fully qualified project_flow_name.
1276
1285
  ```
1277
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1286
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1278
1287
  ```
1279
1288
  or
1280
1289
  ```
1281
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1282
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1290
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1283
1291
  ```
1284
1292
 
1285
- 'parameters' can also be a list of strings and tuples like so:
1286
- ```
1287
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1288
- ```
1289
- This is equivalent to:
1293
+ You can also specify just the project or project branch (other values will be
1294
+ inferred from the current project or project branch):
1290
1295
  ```
1291
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1296
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1292
1297
  ```
1293
1298
 
1299
+ Note that `branch` is typically one of:
1300
+ - `prod`
1301
+ - `user.bob`
1302
+ - `test.my_experiment`
1303
+ - `prod.staging`
1304
+
1294
1305
  Parameters
1295
1306
  ----------
1296
- event : Union[str, Dict[str, Any]], optional, default None
1297
- Event dependency for this flow.
1298
- events : List[Union[str, Dict[str, Any]]], default []
1299
- Events dependency for this flow.
1307
+ flow : Union[str, Dict[str, str]], optional, default None
1308
+ Upstream flow dependency for this flow.
1309
+ flows : List[Union[str, Dict[str, str]]], default []
1310
+ Upstream flow dependencies for this flow.
1300
1311
  options : Dict[str, Any], default {}
1301
1312
  Backend-specific configuration for tuning eventing behavior.
1302
1313
 
@@ -1304,151 +1315,45 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1304
1315
  """
1305
1316
  ...
1306
1317
 
1307
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1308
- """
1309
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1310
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1311
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1312
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1313
- starts only after all sensors finish.
1314
-
1315
- Parameters
1316
- ----------
1317
- timeout : int
1318
- Time, in seconds before the task times out and fails. (Default: 3600)
1319
- poke_interval : int
1320
- Time in seconds that the job should wait in between each try. (Default: 60)
1321
- mode : str
1322
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1323
- exponential_backoff : bool
1324
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1325
- pool : str
1326
- the slot pool this task should run in,
1327
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1328
- soft_fail : bool
1329
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1330
- name : str
1331
- Name of the sensor on Airflow
1332
- description : str
1333
- Description of sensor in the Airflow UI
1334
- bucket_key : Union[str, List[str]]
1335
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1336
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1337
- bucket_name : str
1338
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1339
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1340
- wildcard_match : bool
1341
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1342
- aws_conn_id : str
1343
- a reference to the s3 connection on Airflow. (Default: None)
1344
- verify : bool
1345
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1346
- """
1347
- ...
1348
-
1349
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1350
- """
1351
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1352
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1353
-
1354
- Parameters
1355
- ----------
1356
- timeout : int
1357
- Time, in seconds before the task times out and fails. (Default: 3600)
1358
- poke_interval : int
1359
- Time in seconds that the job should wait in between each try. (Default: 60)
1360
- mode : str
1361
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1362
- exponential_backoff : bool
1363
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1364
- pool : str
1365
- the slot pool this task should run in,
1366
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1367
- soft_fail : bool
1368
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1369
- name : str
1370
- Name of the sensor on Airflow
1371
- description : str
1372
- Description of sensor in the Airflow UI
1373
- external_dag_id : str
1374
- The dag_id that contains the task you want to wait for.
1375
- external_task_ids : List[str]
1376
- The list of task_ids that you want to wait for.
1377
- If None (default value) the sensor waits for the DAG. (Default: None)
1378
- allowed_states : List[str]
1379
- Iterable of allowed states, (Default: ['success'])
1380
- failed_states : List[str]
1381
- Iterable of failed or dis-allowed states. (Default: None)
1382
- execution_delta : datetime.timedelta
1383
- time difference with the previous execution to look at,
1384
- the default is the same logical date as the current task or DAG. (Default: None)
1385
- check_existence: bool
1386
- Set to True to check if the external task exists or check if
1387
- the DAG to wait for exists. (Default: True)
1388
- """
1389
- ...
1390
-
1391
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1392
- """
1393
- Specifies what flows belong to the same project.
1394
-
1395
- A project-specific namespace is created for all flows that
1396
- use the same `@project(name)`.
1397
-
1398
- Parameters
1399
- ----------
1400
- name : str
1401
- Project name. Make sure that the name is unique amongst all
1402
- projects that use the same production scheduler. The name may
1403
- contain only lowercase alphanumeric characters and underscores.
1404
-
1405
-
1406
- """
1407
- ...
1408
-
1409
1318
  @typing.overload
1410
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1319
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1411
1320
  """
1412
- Specifies the flow(s) that this flow depends on.
1321
+ Specifies the event(s) that this flow depends on.
1413
1322
 
1414
1323
  ```
1415
- @trigger_on_finish(flow='FooFlow')
1324
+ @trigger(event='foo')
1416
1325
  ```
1417
1326
  or
1418
1327
  ```
1419
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1328
+ @trigger(events=['foo', 'bar'])
1420
1329
  ```
1421
- This decorator respects the @project decorator and triggers the flow
1422
- when upstream runs within the same namespace complete successfully
1423
1330
 
1424
- Additionally, you can specify project aware upstream flow dependencies
1425
- by specifying the fully qualified project_flow_name.
1331
+ Additionally, you can specify the parameter mappings
1332
+ to map event payload to Metaflow parameters for the flow.
1426
1333
  ```
1427
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1334
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1428
1335
  ```
1429
1336
  or
1430
1337
  ```
1431
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1338
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1339
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1432
1340
  ```
1433
1341
 
1434
- You can also specify just the project or project branch (other values will be
1435
- inferred from the current project or project branch):
1342
+ 'parameters' can also be a list of strings and tuples like so:
1436
1343
  ```
1437
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1344
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1345
+ ```
1346
+ This is equivalent to:
1347
+ ```
1348
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1438
1349
  ```
1439
-
1440
- Note that `branch` is typically one of:
1441
- - `prod`
1442
- - `user.bob`
1443
- - `test.my_experiment`
1444
- - `prod.staging`
1445
1350
 
1446
1351
  Parameters
1447
1352
  ----------
1448
- flow : Union[str, Dict[str, str]], optional, default None
1449
- Upstream flow dependency for this flow.
1450
- flows : List[Union[str, Dict[str, str]]], default []
1451
- Upstream flow dependencies for this flow.
1353
+ event : Union[str, Dict[str, Any]], optional, default None
1354
+ Event dependency for this flow.
1355
+ events : List[Union[str, Dict[str, Any]]], default []
1356
+ Events dependency for this flow.
1452
1357
  options : Dict[str, Any], default {}
1453
1358
  Backend-specific configuration for tuning eventing behavior.
1454
1359
 
@@ -1457,51 +1362,47 @@ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] =
1457
1362
  ...
1458
1363
 
1459
1364
  @typing.overload
1460
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1365
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1461
1366
  ...
1462
1367
 
1463
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1368
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1464
1369
  """
1465
- Specifies the flow(s) that this flow depends on.
1370
+ Specifies the event(s) that this flow depends on.
1466
1371
 
1467
1372
  ```
1468
- @trigger_on_finish(flow='FooFlow')
1373
+ @trigger(event='foo')
1469
1374
  ```
1470
1375
  or
1471
1376
  ```
1472
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1377
+ @trigger(events=['foo', 'bar'])
1473
1378
  ```
1474
- This decorator respects the @project decorator and triggers the flow
1475
- when upstream runs within the same namespace complete successfully
1476
1379
 
1477
- Additionally, you can specify project aware upstream flow dependencies
1478
- by specifying the fully qualified project_flow_name.
1380
+ Additionally, you can specify the parameter mappings
1381
+ to map event payload to Metaflow parameters for the flow.
1479
1382
  ```
1480
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1383
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1481
1384
  ```
1482
1385
  or
1483
1386
  ```
1484
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1387
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1388
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1485
1389
  ```
1486
1390
 
1487
- You can also specify just the project or project branch (other values will be
1488
- inferred from the current project or project branch):
1391
+ 'parameters' can also be a list of strings and tuples like so:
1489
1392
  ```
1490
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1393
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1394
+ ```
1395
+ This is equivalent to:
1396
+ ```
1397
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1491
1398
  ```
1492
-
1493
- Note that `branch` is typically one of:
1494
- - `prod`
1495
- - `user.bob`
1496
- - `test.my_experiment`
1497
- - `prod.staging`
1498
1399
 
1499
1400
  Parameters
1500
1401
  ----------
1501
- flow : Union[str, Dict[str, str]], optional, default None
1502
- Upstream flow dependency for this flow.
1503
- flows : List[Union[str, Dict[str, str]]], default []
1504
- Upstream flow dependencies for this flow.
1402
+ event : Union[str, Dict[str, Any]], optional, default None
1403
+ Event dependency for this flow.
1404
+ events : List[Union[str, Dict[str, Any]]], default []
1405
+ Events dependency for this flow.
1505
1406
  options : Dict[str, Any], default {}
1506
1407
  Backend-specific configuration for tuning eventing behavior.
1507
1408
 
@@ -1509,52 +1410,76 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1509
1410
  """
1510
1411
  ...
1511
1412
 
1512
- @typing.overload
1513
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1413
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1514
1414
  """
1515
- Specifies the times when the flow should be run when running on a
1516
- production scheduler.
1415
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1416
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1417
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1418
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1419
+ starts only after all sensors finish.
1517
1420
 
1518
1421
  Parameters
1519
1422
  ----------
1520
- hourly : bool, default False
1521
- Run the workflow hourly.
1522
- daily : bool, default True
1523
- Run the workflow daily.
1524
- weekly : bool, default False
1525
- Run the workflow weekly.
1526
- cron : str, optional, default None
1527
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1528
- specified by this expression.
1529
- timezone : str, optional, default None
1530
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1531
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1423
+ timeout : int
1424
+ Time, in seconds before the task times out and fails. (Default: 3600)
1425
+ poke_interval : int
1426
+ Time in seconds that the job should wait in between each try. (Default: 60)
1427
+ mode : str
1428
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1429
+ exponential_backoff : bool
1430
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1431
+ pool : str
1432
+ the slot pool this task should run in,
1433
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1434
+ soft_fail : bool
1435
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1436
+ name : str
1437
+ Name of the sensor on Airflow
1438
+ description : str
1439
+ Description of sensor in the Airflow UI
1440
+ bucket_key : Union[str, List[str]]
1441
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1442
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1443
+ bucket_name : str
1444
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1445
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1446
+ wildcard_match : bool
1447
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1448
+ aws_conn_id : str
1449
+ a reference to the s3 connection on Airflow. (Default: None)
1450
+ verify : bool
1451
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1532
1452
  """
1533
1453
  ...
1534
1454
 
1535
- @typing.overload
1536
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1537
- ...
1538
-
1539
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1455
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1540
1456
  """
1541
- Specifies the times when the flow should be run when running on a
1542
- production scheduler.
1457
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1458
+
1459
+ User code call
1460
+ -----------
1461
+ @nim(
1462
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1463
+ backend='managed'
1464
+ )
1465
+
1466
+ Valid backend options
1467
+ ---------------------
1468
+ - 'managed': Outerbounds selects a compute provider based on the model.
1469
+ - 🚧 'dataplane': Run in your account.
1470
+
1471
+ Valid model options
1472
+ ----------------
1473
+ - 'meta/llama3-8b-instruct': 8B parameter model
1474
+ - 'meta/llama3-70b-instruct': 70B parameter model
1475
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1543
1476
 
1544
1477
  Parameters
1545
1478
  ----------
1546
- hourly : bool, default False
1547
- Run the workflow hourly.
1548
- daily : bool, default True
1549
- Run the workflow daily.
1550
- weekly : bool, default False
1551
- Run the workflow weekly.
1552
- cron : str, optional, default None
1553
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1554
- specified by this expression.
1555
- timezone : str, optional, default None
1556
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1557
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1479
+ models: list[NIM]
1480
+ List of NIM containers running models in sidecars.
1481
+ backend: str
1482
+ Compute provider to run the NIM container.
1558
1483
  """
1559
1484
  ...
1560
1485
 
@@ -1607,6 +1532,48 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1607
1532
  """
1608
1533
  ...
1609
1534
 
1535
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1536
+ """
1537
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1538
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1539
+
1540
+ Parameters
1541
+ ----------
1542
+ timeout : int
1543
+ Time, in seconds before the task times out and fails. (Default: 3600)
1544
+ poke_interval : int
1545
+ Time in seconds that the job should wait in between each try. (Default: 60)
1546
+ mode : str
1547
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1548
+ exponential_backoff : bool
1549
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1550
+ pool : str
1551
+ the slot pool this task should run in,
1552
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1553
+ soft_fail : bool
1554
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1555
+ name : str
1556
+ Name of the sensor on Airflow
1557
+ description : str
1558
+ Description of sensor in the Airflow UI
1559
+ external_dag_id : str
1560
+ The dag_id that contains the task you want to wait for.
1561
+ external_task_ids : List[str]
1562
+ The list of task_ids that you want to wait for.
1563
+ If None (default value) the sensor waits for the DAG. (Default: None)
1564
+ allowed_states : List[str]
1565
+ Iterable of allowed states, (Default: ['success'])
1566
+ failed_states : List[str]
1567
+ Iterable of failed or dis-allowed states. (Default: None)
1568
+ execution_delta : datetime.timedelta
1569
+ time difference with the previous execution to look at,
1570
+ the default is the same logical date as the current task or DAG. (Default: None)
1571
+ check_existence: bool
1572
+ Set to True to check if the external task exists or check if
1573
+ the DAG to wait for exists. (Default: True)
1574
+ """
1575
+ ...
1576
+
1610
1577
  @typing.overload
1611
1578
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1612
1579
  """
@@ -1646,34 +1613,70 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1646
1613
  """
1647
1614
  ...
1648
1615
 
1649
- def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1616
+ @typing.overload
1617
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1650
1618
  """
1651
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1619
+ Specifies the times when the flow should be run when running on a
1620
+ production scheduler.
1652
1621
 
1653
- User code call
1654
- -----------
1655
- @nim(
1656
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1657
- backend='managed'
1658
- )
1622
+ Parameters
1623
+ ----------
1624
+ hourly : bool, default False
1625
+ Run the workflow hourly.
1626
+ daily : bool, default True
1627
+ Run the workflow daily.
1628
+ weekly : bool, default False
1629
+ Run the workflow weekly.
1630
+ cron : str, optional, default None
1631
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1632
+ specified by this expression.
1633
+ timezone : str, optional, default None
1634
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1635
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1636
+ """
1637
+ ...
1638
+
1639
+ @typing.overload
1640
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1641
+ ...
1642
+
1643
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1644
+ """
1645
+ Specifies the times when the flow should be run when running on a
1646
+ production scheduler.
1659
1647
 
1660
- Valid backend options
1661
- ---------------------
1662
- - 'managed': Outerbounds selects a compute provider based on the model.
1663
- - 🚧 'dataplane': Run in your account.
1648
+ Parameters
1649
+ ----------
1650
+ hourly : bool, default False
1651
+ Run the workflow hourly.
1652
+ daily : bool, default True
1653
+ Run the workflow daily.
1654
+ weekly : bool, default False
1655
+ Run the workflow weekly.
1656
+ cron : str, optional, default None
1657
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1658
+ specified by this expression.
1659
+ timezone : str, optional, default None
1660
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1661
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1662
+ """
1663
+ ...
1664
+
1665
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1666
+ """
1667
+ Specifies what flows belong to the same project.
1664
1668
 
1665
- Valid model options
1666
- ----------------
1667
- - 'meta/llama3-8b-instruct': 8B parameter model
1668
- - 'meta/llama3-70b-instruct': 70B parameter model
1669
- - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1669
+ A project-specific namespace is created for all flows that
1670
+ use the same `@project(name)`.
1670
1671
 
1671
1672
  Parameters
1672
1673
  ----------
1673
- models: list[NIM]
1674
- List of NIM containers running models in sidecars.
1675
- backend: str
1676
- Compute provider to run the NIM container.
1674
+ name : str
1675
+ Project name. Make sure that the name is unique amongst all
1676
+ projects that use the same production scheduler. The name may
1677
+ contain only lowercase alphanumeric characters and underscores.
1678
+
1679
+
1677
1680
  """
1678
1681
  ...
1679
1682