ob-metaflow-stubs 5.8.2__py2.py3-none-any.whl → 5.9__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. metaflow-stubs/__init__.pyi +453 -449
  2. metaflow-stubs/cards.pyi +4 -4
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +5 -5
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +27 -25
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  26. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  28. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/argo/argo_client.pyi +6 -2
  30. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  32. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +5 -5
  33. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +6 -6
  34. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -3
  35. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  38. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -4
  42. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  44. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  45. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -3
  51. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  52. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  54. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  55. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  56. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  57. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  58. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  60. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  61. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  64. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  68. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  72. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  73. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  74. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  75. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  76. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  77. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  79. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  80. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  81. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  82. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  83. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  84. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  86. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  87. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  88. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  89. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  90. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  93. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +18 -0
  95. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +7 -3
  96. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +26 -4
  97. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +8 -2
  98. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -4
  99. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  100. metaflow-stubs/plugins/logs_cli.pyi +2 -2
  101. metaflow-stubs/plugins/package_cli.pyi +2 -2
  102. metaflow-stubs/plugins/parallel_decorator.pyi +5 -5
  103. metaflow-stubs/plugins/perimeters.pyi +2 -2
  104. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  105. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  108. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  110. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  111. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  114. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  115. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  117. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  118. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  119. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  120. metaflow-stubs/procpoll.pyi +2 -2
  121. metaflow-stubs/profilers/__init__.pyi +2 -2
  122. metaflow-stubs/pylint_wrapper.pyi +2 -2
  123. metaflow-stubs/runner/__init__.pyi +2 -2
  124. metaflow-stubs/runner/deployer.pyi +8 -6
  125. metaflow-stubs/runner/metaflow_runner.pyi +6 -6
  126. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  127. metaflow-stubs/runner/nbrun.pyi +2 -2
  128. metaflow-stubs/runner/subprocess_manager.pyi +4 -4
  129. metaflow-stubs/runner/utils.pyi +5 -3
  130. metaflow-stubs/system/__init__.pyi +2 -2
  131. metaflow-stubs/system/system_logger.pyi +3 -3
  132. metaflow-stubs/system/system_monitor.pyi +2 -2
  133. metaflow-stubs/tagging_util.pyi +2 -2
  134. metaflow-stubs/tuple_util.pyi +2 -2
  135. {ob_metaflow_stubs-5.8.2.dist-info → ob_metaflow_stubs-5.9.dist-info}/METADATA +1 -1
  136. ob_metaflow_stubs-5.9.dist-info/RECORD +139 -0
  137. ob_metaflow_stubs-5.8.2.dist-info/RECORD +0 -138
  138. {ob_metaflow_stubs-5.8.2.dist-info → ob_metaflow_stubs-5.9.dist-info}/WHEEL +0 -0
  139. {ob_metaflow_stubs-5.8.2.dist-info → ob_metaflow_stubs-5.9.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.19.1+ob(v1) #
4
- # Generated on 2024-09-09T18:18:13.623870 #
3
+ # MF version: 2.12.20.1+ob(v1) #
4
+ # Generated on 2024-09-16T17:51:44.698904 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.parameters
12
- import datetime
13
11
  import metaflow._vendor.click.types
14
- import metaflow.metaflow_current
15
12
  import metaflow.datastore.inputs
16
- import metaflow.flowspec
13
+ import metaflow.parameters
17
14
  import typing
18
15
  import metaflow.runner.metaflow_runner
16
+ import metaflow.metaflow_current
19
17
  import metaflow.events
18
+ import datetime
20
19
  import metaflow.client.core
20
+ import metaflow.flowspec
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -437,58 +437,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
437
437
  """
438
438
  ...
439
439
 
440
- @typing.overload
441
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
442
- """
443
- Decorator prototype for all step decorators. This function gets specialized
444
- and imported for all decorators types by _import_plugin_decorators().
445
- """
446
- ...
447
-
448
- @typing.overload
449
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
450
- ...
451
-
452
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
453
- """
454
- Decorator prototype for all step decorators. This function gets specialized
455
- and imported for all decorators types by _import_plugin_decorators().
456
- """
457
- ...
458
-
459
- @typing.overload
460
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
461
- """
462
- Specifies secrets to be retrieved and injected as environment variables prior to
463
- the execution of a step.
464
-
465
- Parameters
466
- ----------
467
- sources : List[Union[str, Dict[str, Any]]], default: []
468
- List of secret specs, defining how the secrets are to be retrieved
469
- """
470
- ...
471
-
472
- @typing.overload
473
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
474
- ...
475
-
476
- @typing.overload
477
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
478
- ...
479
-
480
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
481
- """
482
- Specifies secrets to be retrieved and injected as environment variables prior to
483
- the execution of a step.
484
-
485
- Parameters
486
- ----------
487
- sources : List[Union[str, Dict[str, Any]]], default: []
488
- List of secret specs, defining how the secrets are to be retrieved
489
- """
490
- ...
491
-
492
440
  @typing.overload
493
441
  def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
494
442
  """
@@ -541,51 +489,79 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
541
489
  ...
542
490
 
543
491
  @typing.overload
544
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
492
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
545
493
  """
546
- Specifies the PyPI packages for the step.
494
+ Specifies the resources needed when executing this step.
547
495
 
548
- Information in this decorator will augment any
549
- attributes set in the `@pyi_base` flow-level decorator. Hence,
550
- you can use `@pypi_base` to set packages required by all
551
- steps and use `@pypi` to specify step-specific overrides.
496
+ Use `@resources` to specify the resource requirements
497
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
498
+
499
+ You can choose the compute layer on the command line by executing e.g.
500
+ ```
501
+ python myflow.py run --with batch
502
+ ```
503
+ or
504
+ ```
505
+ python myflow.py run --with kubernetes
506
+ ```
507
+ which executes the flow on the desired system using the
508
+ requirements specified in `@resources`.
552
509
 
553
510
  Parameters
554
511
  ----------
555
- packages : Dict[str, str], default: {}
556
- Packages to use for this step. The key is the name of the package
557
- and the value is the version to use.
558
- python : str, optional, default: None
559
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
560
- that the version used will correspond to the version of the Python interpreter used to start the run.
512
+ cpu : int, default 1
513
+ Number of CPUs required for this step.
514
+ gpu : int, default 0
515
+ Number of GPUs required for this step.
516
+ disk : int, optional, default None
517
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
518
+ memory : int, default 4096
519
+ Memory size (in MB) required for this step.
520
+ shared_memory : int, optional, default None
521
+ The value for the size (in MiB) of the /dev/shm volume for this step.
522
+ This parameter maps to the `--shm-size` option in Docker.
561
523
  """
562
524
  ...
563
525
 
564
526
  @typing.overload
565
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
527
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
566
528
  ...
567
529
 
568
530
  @typing.overload
569
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
531
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
570
532
  ...
571
533
 
572
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
534
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
573
535
  """
574
- Specifies the PyPI packages for the step.
536
+ Specifies the resources needed when executing this step.
575
537
 
576
- Information in this decorator will augment any
577
- attributes set in the `@pyi_base` flow-level decorator. Hence,
578
- you can use `@pypi_base` to set packages required by all
579
- steps and use `@pypi` to specify step-specific overrides.
538
+ Use `@resources` to specify the resource requirements
539
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
540
+
541
+ You can choose the compute layer on the command line by executing e.g.
542
+ ```
543
+ python myflow.py run --with batch
544
+ ```
545
+ or
546
+ ```
547
+ python myflow.py run --with kubernetes
548
+ ```
549
+ which executes the flow on the desired system using the
550
+ requirements specified in `@resources`.
580
551
 
581
552
  Parameters
582
553
  ----------
583
- packages : Dict[str, str], default: {}
584
- Packages to use for this step. The key is the name of the package
585
- and the value is the version to use.
586
- python : str, optional, default: None
587
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
588
- that the version used will correspond to the version of the Python interpreter used to start the run.
554
+ cpu : int, default 1
555
+ Number of CPUs required for this step.
556
+ gpu : int, default 0
557
+ Number of GPUs required for this step.
558
+ disk : int, optional, default None
559
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
560
+ memory : int, default 4096
561
+ Memory size (in MB) required for this step.
562
+ shared_memory : int, optional, default None
563
+ The value for the size (in MiB) of the /dev/shm volume for this step.
564
+ This parameter maps to the `--shm-size` option in Docker.
589
565
  """
590
566
  ...
591
567
 
@@ -620,7 +596,56 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
620
596
  """
621
597
  ...
622
598
 
623
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
599
+ @typing.overload
600
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
601
+ """
602
+ Specifies that the step will success under all circumstances.
603
+
604
+ The decorator will create an optional artifact, specified by `var`, which
605
+ contains the exception raised. You can use it to detect the presence
606
+ of errors, indicating that all happy-path artifacts produced by the step
607
+ are missing.
608
+
609
+ Parameters
610
+ ----------
611
+ var : str, optional, default None
612
+ Name of the artifact in which to store the caught exception.
613
+ If not specified, the exception is not stored.
614
+ print_exception : bool, default True
615
+ Determines whether or not the exception is printed to
616
+ stdout when caught.
617
+ """
618
+ ...
619
+
620
+ @typing.overload
621
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
622
+ ...
623
+
624
+ @typing.overload
625
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
626
+ ...
627
+
628
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
629
+ """
630
+ Specifies that the step will success under all circumstances.
631
+
632
+ The decorator will create an optional artifact, specified by `var`, which
633
+ contains the exception raised. You can use it to detect the presence
634
+ of errors, indicating that all happy-path artifacts produced by the step
635
+ are missing.
636
+
637
+ Parameters
638
+ ----------
639
+ var : str, optional, default None
640
+ Name of the artifact in which to store the caught exception.
641
+ If not specified, the exception is not stored.
642
+ print_exception : bool, default True
643
+ Determines whether or not the exception is printed to
644
+ stdout when caught.
645
+ """
646
+ ...
647
+
648
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
624
649
  """
625
650
  Specifies that this step should execute on Kubernetes.
626
651
 
@@ -649,6 +674,10 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
649
674
  Kubernetes secrets to use when launching pod in Kubernetes. These
650
675
  secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
651
676
  in Metaflow configuration.
677
+ node_selector: Union[Dict[str,str], str], optional, default None
678
+ Kubernetes node selector(s) to apply to the pod running the task.
679
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
680
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
652
681
  namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
653
682
  Kubernetes namespace to use when launching pod in Kubernetes.
654
683
  gpu : int, optional, default None
@@ -683,137 +712,61 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
683
712
  ...
684
713
 
685
714
  @typing.overload
686
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
715
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
687
716
  """
688
- Specifies that the step will success under all circumstances.
689
-
690
- The decorator will create an optional artifact, specified by `var`, which
691
- contains the exception raised. You can use it to detect the presence
692
- of errors, indicating that all happy-path artifacts produced by the step
693
- are missing.
717
+ Specifies secrets to be retrieved and injected as environment variables prior to
718
+ the execution of a step.
694
719
 
695
720
  Parameters
696
721
  ----------
697
- var : str, optional, default None
698
- Name of the artifact in which to store the caught exception.
699
- If not specified, the exception is not stored.
700
- print_exception : bool, default True
701
- Determines whether or not the exception is printed to
702
- stdout when caught.
722
+ sources : List[Union[str, Dict[str, Any]]], default: []
723
+ List of secret specs, defining how the secrets are to be retrieved
703
724
  """
704
725
  ...
705
726
 
706
727
  @typing.overload
707
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
728
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
708
729
  ...
709
730
 
710
731
  @typing.overload
711
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
732
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
712
733
  ...
713
734
 
714
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
735
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
715
736
  """
716
- Specifies that the step will success under all circumstances.
717
-
718
- The decorator will create an optional artifact, specified by `var`, which
719
- contains the exception raised. You can use it to detect the presence
720
- of errors, indicating that all happy-path artifacts produced by the step
721
- are missing.
737
+ Specifies secrets to be retrieved and injected as environment variables prior to
738
+ the execution of a step.
722
739
 
723
740
  Parameters
724
741
  ----------
725
- var : str, optional, default None
726
- Name of the artifact in which to store the caught exception.
727
- If not specified, the exception is not stored.
728
- print_exception : bool, default True
729
- Determines whether or not the exception is printed to
730
- stdout when caught.
742
+ sources : List[Union[str, Dict[str, Any]]], default: []
743
+ List of secret specs, defining how the secrets are to be retrieved
731
744
  """
732
745
  ...
733
746
 
734
747
  @typing.overload
735
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
748
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
736
749
  """
737
- Specifies the resources needed when executing this step.
750
+ Internal decorator to support Fast bakery
751
+ """
752
+ ...
753
+
754
+ @typing.overload
755
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
756
+ ...
757
+
758
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
759
+ """
760
+ Internal decorator to support Fast bakery
761
+ """
762
+ ...
763
+
764
+ @typing.overload
765
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
766
+ """
767
+ Specifies a timeout for your step.
738
768
 
739
- Use `@resources` to specify the resource requirements
740
- independently of the specific compute layer (`@batch`, `@kubernetes`).
741
-
742
- You can choose the compute layer on the command line by executing e.g.
743
- ```
744
- python myflow.py run --with batch
745
- ```
746
- or
747
- ```
748
- python myflow.py run --with kubernetes
749
- ```
750
- which executes the flow on the desired system using the
751
- requirements specified in `@resources`.
752
-
753
- Parameters
754
- ----------
755
- cpu : int, default 1
756
- Number of CPUs required for this step.
757
- gpu : int, default 0
758
- Number of GPUs required for this step.
759
- disk : int, optional, default None
760
- Disk size (in MB) required for this step. Only applies on Kubernetes.
761
- memory : int, default 4096
762
- Memory size (in MB) required for this step.
763
- shared_memory : int, optional, default None
764
- The value for the size (in MiB) of the /dev/shm volume for this step.
765
- This parameter maps to the `--shm-size` option in Docker.
766
- """
767
- ...
768
-
769
- @typing.overload
770
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
771
- ...
772
-
773
- @typing.overload
774
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
775
- ...
776
-
777
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
778
- """
779
- Specifies the resources needed when executing this step.
780
-
781
- Use `@resources` to specify the resource requirements
782
- independently of the specific compute layer (`@batch`, `@kubernetes`).
783
-
784
- You can choose the compute layer on the command line by executing e.g.
785
- ```
786
- python myflow.py run --with batch
787
- ```
788
- or
789
- ```
790
- python myflow.py run --with kubernetes
791
- ```
792
- which executes the flow on the desired system using the
793
- requirements specified in `@resources`.
794
-
795
- Parameters
796
- ----------
797
- cpu : int, default 1
798
- Number of CPUs required for this step.
799
- gpu : int, default 0
800
- Number of GPUs required for this step.
801
- disk : int, optional, default None
802
- Disk size (in MB) required for this step. Only applies on Kubernetes.
803
- memory : int, default 4096
804
- Memory size (in MB) required for this step.
805
- shared_memory : int, optional, default None
806
- The value for the size (in MiB) of the /dev/shm volume for this step.
807
- This parameter maps to the `--shm-size` option in Docker.
808
- """
809
- ...
810
-
811
- @typing.overload
812
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
813
- """
814
- Specifies a timeout for your step.
815
-
816
- This decorator is useful if this step may hang indefinitely.
769
+ This decorator is useful if this step may hang indefinitely.
817
770
 
818
771
  This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
819
772
  A timeout is considered to be an exception thrown by the step. It will cause the step to be
@@ -918,23 +871,6 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
918
871
  """
919
872
  ...
920
873
 
921
- @typing.overload
922
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
923
- """
924
- Internal decorator to support Fast bakery
925
- """
926
- ...
927
-
928
- @typing.overload
929
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
930
- ...
931
-
932
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
933
- """
934
- Internal decorator to support Fast bakery
935
- """
936
- ...
937
-
938
874
  @typing.overload
939
875
  def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
940
876
  """
@@ -992,48 +928,39 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
992
928
  """
993
929
  ...
994
930
 
995
- def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
931
+ @typing.overload
932
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
996
933
  """
997
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
998
-
999
- User code call
1000
- -----------
1001
- @nim(
1002
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1003
- backend='managed'
1004
- )
1005
-
1006
- Valid backend options
1007
- ---------------------
1008
- - 'managed': Outerbounds selects a compute provider based on the model.
1009
- - 🚧 'dataplane': Run in your account.
1010
-
1011
- Valid model options
1012
- ----------------
1013
- - 'meta/llama3-8b-instruct': 8B parameter model
1014
- - 'meta/llama3-70b-instruct': 70B parameter model
1015
- - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1016
-
1017
- Parameters
1018
- ----------
1019
- models: list[NIM]
1020
- List of NIM containers running models in sidecars.
1021
- backend: str
1022
- Compute provider to run the NIM container.
934
+ Decorator prototype for all step decorators. This function gets specialized
935
+ and imported for all decorators types by _import_plugin_decorators().
1023
936
  """
1024
937
  ...
1025
938
 
1026
939
  @typing.overload
1027
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
940
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
+ ...
942
+
943
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1028
944
  """
1029
- Specifies the PyPI packages for all steps of the flow.
945
+ Decorator prototype for all step decorators. This function gets specialized
946
+ and imported for all decorators types by _import_plugin_decorators().
947
+ """
948
+ ...
949
+
950
+ @typing.overload
951
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
952
+ """
953
+ Specifies the PyPI packages for the step.
1030
954
 
1031
- Use `@pypi_base` to set common packages required by all
955
+ Information in this decorator will augment any
956
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
957
+ you can use `@pypi_base` to set packages required by all
1032
958
  steps and use `@pypi` to specify step-specific overrides.
959
+
1033
960
  Parameters
1034
961
  ----------
1035
962
  packages : Dict[str, str], default: {}
1036
- Packages to use for this flow. The key is the name of the package
963
+ Packages to use for this step. The key is the name of the package
1037
964
  and the value is the version to use.
1038
965
  python : str, optional, default: None
1039
966
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -1042,19 +969,26 @@ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[s
1042
969
  ...
1043
970
 
1044
971
  @typing.overload
1045
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
972
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1046
973
  ...
1047
974
 
1048
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
975
+ @typing.overload
976
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
977
+ ...
978
+
979
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1049
980
  """
1050
- Specifies the PyPI packages for all steps of the flow.
981
+ Specifies the PyPI packages for the step.
1051
982
 
1052
- Use `@pypi_base` to set common packages required by all
983
+ Information in this decorator will augment any
984
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
985
+ you can use `@pypi_base` to set packages required by all
1053
986
  steps and use `@pypi` to specify step-specific overrides.
987
+
1054
988
  Parameters
1055
989
  ----------
1056
990
  packages : Dict[str, str], default: {}
1057
- Packages to use for this flow. The key is the name of the package
991
+ Packages to use for this step. The key is the name of the package
1058
992
  and the value is the version to use.
1059
993
  python : str, optional, default: None
1060
994
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -1062,6 +996,48 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1062
996
  """
1063
997
  ...
1064
998
 
999
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1000
+ """
1001
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1002
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1003
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1004
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1005
+ starts only after all sensors finish.
1006
+
1007
+ Parameters
1008
+ ----------
1009
+ timeout : int
1010
+ Time, in seconds before the task times out and fails. (Default: 3600)
1011
+ poke_interval : int
1012
+ Time in seconds that the job should wait in between each try. (Default: 60)
1013
+ mode : str
1014
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1015
+ exponential_backoff : bool
1016
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1017
+ pool : str
1018
+ the slot pool this task should run in,
1019
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1020
+ soft_fail : bool
1021
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1022
+ name : str
1023
+ Name of the sensor on Airflow
1024
+ description : str
1025
+ Description of sensor in the Airflow UI
1026
+ bucket_key : Union[str, List[str]]
1027
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1028
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1029
+ bucket_name : str
1030
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1031
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1032
+ wildcard_match : bool
1033
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1034
+ aws_conn_id : str
1035
+ a reference to the s3 connection on Airflow. (Default: None)
1036
+ verify : bool
1037
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1038
+ """
1039
+ ...
1040
+
1065
1041
  def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1066
1042
  """
1067
1043
  Specifies what flows belong to the same project.
@@ -1080,111 +1056,183 @@ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typ
1080
1056
  """
1081
1057
  ...
1082
1058
 
1083
- @typing.overload
1084
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1059
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1085
1060
  """
1086
- Specifies the times when the flow should be run when running on a
1087
- production scheduler.
1061
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1062
+
1063
+ User code call
1064
+ -----------
1065
+ @nim(
1066
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1067
+ backend='managed'
1068
+ )
1069
+
1070
+ Valid backend options
1071
+ ---------------------
1072
+ - 'managed': Outerbounds selects a compute provider based on the model.
1073
+ - 🚧 'dataplane': Run in your account.
1074
+
1075
+ Valid model options
1076
+ ----------------
1077
+ - 'meta/llama3-8b-instruct': 8B parameter model
1078
+ - 'meta/llama3-70b-instruct': 70B parameter model
1079
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1088
1080
 
1089
1081
  Parameters
1090
1082
  ----------
1091
- hourly : bool, default False
1092
- Run the workflow hourly.
1093
- daily : bool, default True
1094
- Run the workflow daily.
1095
- weekly : bool, default False
1096
- Run the workflow weekly.
1097
- cron : str, optional, default None
1098
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1099
- specified by this expression.
1100
- timezone : str, optional, default None
1101
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1102
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1083
+ models: list[NIM]
1084
+ List of NIM containers running models in sidecars.
1085
+ backend: str
1086
+ Compute provider to run the NIM container.
1103
1087
  """
1104
1088
  ...
1105
1089
 
1106
1090
  @typing.overload
1107
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1108
- ...
1109
-
1110
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1091
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1111
1092
  """
1112
- Specifies the times when the flow should be run when running on a
1113
- production scheduler.
1093
+ Specifies the flow(s) that this flow depends on.
1094
+
1095
+ ```
1096
+ @trigger_on_finish(flow='FooFlow')
1097
+ ```
1098
+ or
1099
+ ```
1100
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1101
+ ```
1102
+ This decorator respects the @project decorator and triggers the flow
1103
+ when upstream runs within the same namespace complete successfully
1104
+
1105
+ Additionally, you can specify project aware upstream flow dependencies
1106
+ by specifying the fully qualified project_flow_name.
1107
+ ```
1108
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1109
+ ```
1110
+ or
1111
+ ```
1112
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1113
+ ```
1114
+
1115
+ You can also specify just the project or project branch (other values will be
1116
+ inferred from the current project or project branch):
1117
+ ```
1118
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1119
+ ```
1120
+
1121
+ Note that `branch` is typically one of:
1122
+ - `prod`
1123
+ - `user.bob`
1124
+ - `test.my_experiment`
1125
+ - `prod.staging`
1126
+
1127
+ Parameters
1128
+ ----------
1129
+ flow : Union[str, Dict[str, str]], optional, default None
1130
+ Upstream flow dependency for this flow.
1131
+ flows : List[Union[str, Dict[str, str]]], default []
1132
+ Upstream flow dependencies for this flow.
1133
+ options : Dict[str, Any], default {}
1134
+ Backend-specific configuration for tuning eventing behavior.
1135
+
1114
1136
 
1115
- Parameters
1116
- ----------
1117
- hourly : bool, default False
1118
- Run the workflow hourly.
1119
- daily : bool, default True
1120
- Run the workflow daily.
1121
- weekly : bool, default False
1122
- Run the workflow weekly.
1123
- cron : str, optional, default None
1124
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1125
- specified by this expression.
1126
- timezone : str, optional, default None
1127
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1128
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1129
1137
  """
1130
1138
  ...
1131
1139
 
1132
1140
  @typing.overload
1133
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1141
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1142
+ ...
1143
+
1144
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1134
1145
  """
1135
- Specifies the Conda environment for all steps of the flow.
1146
+ Specifies the flow(s) that this flow depends on.
1136
1147
 
1137
- Use `@conda_base` to set common libraries required by all
1138
- steps and use `@conda` to specify step-specific additions.
1148
+ ```
1149
+ @trigger_on_finish(flow='FooFlow')
1150
+ ```
1151
+ or
1152
+ ```
1153
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1154
+ ```
1155
+ This decorator respects the @project decorator and triggers the flow
1156
+ when upstream runs within the same namespace complete successfully
1157
+
1158
+ Additionally, you can specify project aware upstream flow dependencies
1159
+ by specifying the fully qualified project_flow_name.
1160
+ ```
1161
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1162
+ ```
1163
+ or
1164
+ ```
1165
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1166
+ ```
1167
+
1168
+ You can also specify just the project or project branch (other values will be
1169
+ inferred from the current project or project branch):
1170
+ ```
1171
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1172
+ ```
1173
+
1174
+ Note that `branch` is typically one of:
1175
+ - `prod`
1176
+ - `user.bob`
1177
+ - `test.my_experiment`
1178
+ - `prod.staging`
1139
1179
 
1140
1180
  Parameters
1141
1181
  ----------
1142
- packages : Dict[str, str], default {}
1182
+ flow : Union[str, Dict[str, str]], optional, default None
1183
+ Upstream flow dependency for this flow.
1184
+ flows : List[Union[str, Dict[str, str]]], default []
1185
+ Upstream flow dependencies for this flow.
1186
+ options : Dict[str, Any], default {}
1187
+ Backend-specific configuration for tuning eventing behavior.
1188
+
1189
+
1190
+ """
1191
+ ...
1192
+
1193
+ @typing.overload
1194
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1195
+ """
1196
+ Specifies the PyPI packages for all steps of the flow.
1197
+
1198
+ Use `@pypi_base` to set common packages required by all
1199
+ steps and use `@pypi` to specify step-specific overrides.
1200
+ Parameters
1201
+ ----------
1202
+ packages : Dict[str, str], default: {}
1143
1203
  Packages to use for this flow. The key is the name of the package
1144
1204
  and the value is the version to use.
1145
- libraries : Dict[str, str], default {}
1146
- Supported for backward compatibility. When used with packages, packages will take precedence.
1147
- python : str, optional, default None
1205
+ python : str, optional, default: None
1148
1206
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1149
1207
  that the version used will correspond to the version of the Python interpreter used to start the run.
1150
- disabled : bool, default False
1151
- If set to True, disables Conda.
1152
1208
  """
1153
1209
  ...
1154
1210
 
1155
1211
  @typing.overload
1156
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1212
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1157
1213
  ...
1158
1214
 
1159
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1215
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1160
1216
  """
1161
- Specifies the Conda environment for all steps of the flow.
1162
-
1163
- Use `@conda_base` to set common libraries required by all
1164
- steps and use `@conda` to specify step-specific additions.
1217
+ Specifies the PyPI packages for all steps of the flow.
1165
1218
 
1219
+ Use `@pypi_base` to set common packages required by all
1220
+ steps and use `@pypi` to specify step-specific overrides.
1166
1221
  Parameters
1167
1222
  ----------
1168
- packages : Dict[str, str], default {}
1223
+ packages : Dict[str, str], default: {}
1169
1224
  Packages to use for this flow. The key is the name of the package
1170
1225
  and the value is the version to use.
1171
- libraries : Dict[str, str], default {}
1172
- Supported for backward compatibility. When used with packages, packages will take precedence.
1173
- python : str, optional, default None
1226
+ python : str, optional, default: None
1174
1227
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1175
1228
  that the version used will correspond to the version of the Python interpreter used to start the run.
1176
- disabled : bool, default False
1177
- If set to True, disables Conda.
1178
1229
  """
1179
1230
  ...
1180
1231
 
1181
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1232
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1182
1233
  """
1183
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1184
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1185
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1186
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1187
- starts only after all sensors finish.
1234
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1235
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1188
1236
 
1189
1237
  Parameters
1190
1238
  ----------
@@ -1205,18 +1253,70 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1205
1253
  Name of the sensor on Airflow
1206
1254
  description : str
1207
1255
  Description of sensor in the Airflow UI
1208
- bucket_key : Union[str, List[str]]
1209
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1210
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1211
- bucket_name : str
1212
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1213
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1214
- wildcard_match : bool
1215
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1216
- aws_conn_id : str
1217
- a reference to the s3 connection on Airflow. (Default: None)
1218
- verify : bool
1219
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1256
+ external_dag_id : str
1257
+ The dag_id that contains the task you want to wait for.
1258
+ external_task_ids : List[str]
1259
+ The list of task_ids that you want to wait for.
1260
+ If None (default value) the sensor waits for the DAG. (Default: None)
1261
+ allowed_states : List[str]
1262
+ Iterable of allowed states, (Default: ['success'])
1263
+ failed_states : List[str]
1264
+ Iterable of failed or dis-allowed states. (Default: None)
1265
+ execution_delta : datetime.timedelta
1266
+ time difference with the previous execution to look at,
1267
+ the default is the same logical date as the current task or DAG. (Default: None)
1268
+ check_existence: bool
1269
+ Set to True to check if the external task exists or check if
1270
+ the DAG to wait for exists. (Default: True)
1271
+ """
1272
+ ...
1273
+
1274
+ @typing.overload
1275
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1276
+ """
1277
+ Specifies the Conda environment for all steps of the flow.
1278
+
1279
+ Use `@conda_base` to set common libraries required by all
1280
+ steps and use `@conda` to specify step-specific additions.
1281
+
1282
+ Parameters
1283
+ ----------
1284
+ packages : Dict[str, str], default {}
1285
+ Packages to use for this flow. The key is the name of the package
1286
+ and the value is the version to use.
1287
+ libraries : Dict[str, str], default {}
1288
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1289
+ python : str, optional, default None
1290
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1291
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1292
+ disabled : bool, default False
1293
+ If set to True, disables Conda.
1294
+ """
1295
+ ...
1296
+
1297
+ @typing.overload
1298
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1299
+ ...
1300
+
1301
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1302
+ """
1303
+ Specifies the Conda environment for all steps of the flow.
1304
+
1305
+ Use `@conda_base` to set common libraries required by all
1306
+ steps and use `@conda` to specify step-specific additions.
1307
+
1308
+ Parameters
1309
+ ----------
1310
+ packages : Dict[str, str], default {}
1311
+ Packages to use for this flow. The key is the name of the package
1312
+ and the value is the version to use.
1313
+ libraries : Dict[str, str], default {}
1314
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1315
+ python : str, optional, default None
1316
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1317
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1318
+ disabled : bool, default False
1319
+ If set to True, disables Conda.
1220
1320
  """
1221
1321
  ...
1222
1322
 
@@ -1315,148 +1415,52 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1315
1415
  """
1316
1416
  ...
1317
1417
 
1318
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1319
- """
1320
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1321
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1322
-
1323
- Parameters
1324
- ----------
1325
- timeout : int
1326
- Time, in seconds before the task times out and fails. (Default: 3600)
1327
- poke_interval : int
1328
- Time in seconds that the job should wait in between each try. (Default: 60)
1329
- mode : str
1330
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1331
- exponential_backoff : bool
1332
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1333
- pool : str
1334
- the slot pool this task should run in,
1335
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1336
- soft_fail : bool
1337
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1338
- name : str
1339
- Name of the sensor on Airflow
1340
- description : str
1341
- Description of sensor in the Airflow UI
1342
- external_dag_id : str
1343
- The dag_id that contains the task you want to wait for.
1344
- external_task_ids : List[str]
1345
- The list of task_ids that you want to wait for.
1346
- If None (default value) the sensor waits for the DAG. (Default: None)
1347
- allowed_states : List[str]
1348
- Iterable of allowed states, (Default: ['success'])
1349
- failed_states : List[str]
1350
- Iterable of failed or dis-allowed states. (Default: None)
1351
- execution_delta : datetime.timedelta
1352
- time difference with the previous execution to look at,
1353
- the default is the same logical date as the current task or DAG. (Default: None)
1354
- check_existence: bool
1355
- Set to True to check if the external task exists or check if
1356
- the DAG to wait for exists. (Default: True)
1357
- """
1358
- ...
1359
-
1360
1418
  @typing.overload
1361
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1419
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1362
1420
  """
1363
- Specifies the flow(s) that this flow depends on.
1364
-
1365
- ```
1366
- @trigger_on_finish(flow='FooFlow')
1367
- ```
1368
- or
1369
- ```
1370
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1371
- ```
1372
- This decorator respects the @project decorator and triggers the flow
1373
- when upstream runs within the same namespace complete successfully
1374
-
1375
- Additionally, you can specify project aware upstream flow dependencies
1376
- by specifying the fully qualified project_flow_name.
1377
- ```
1378
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1379
- ```
1380
- or
1381
- ```
1382
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1383
- ```
1384
-
1385
- You can also specify just the project or project branch (other values will be
1386
- inferred from the current project or project branch):
1387
- ```
1388
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1389
- ```
1390
-
1391
- Note that `branch` is typically one of:
1392
- - `prod`
1393
- - `user.bob`
1394
- - `test.my_experiment`
1395
- - `prod.staging`
1421
+ Specifies the times when the flow should be run when running on a
1422
+ production scheduler.
1396
1423
 
1397
1424
  Parameters
1398
1425
  ----------
1399
- flow : Union[str, Dict[str, str]], optional, default None
1400
- Upstream flow dependency for this flow.
1401
- flows : List[Union[str, Dict[str, str]]], default []
1402
- Upstream flow dependencies for this flow.
1403
- options : Dict[str, Any], default {}
1404
- Backend-specific configuration for tuning eventing behavior.
1405
-
1406
-
1426
+ hourly : bool, default False
1427
+ Run the workflow hourly.
1428
+ daily : bool, default True
1429
+ Run the workflow daily.
1430
+ weekly : bool, default False
1431
+ Run the workflow weekly.
1432
+ cron : str, optional, default None
1433
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1434
+ specified by this expression.
1435
+ timezone : str, optional, default None
1436
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1437
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1407
1438
  """
1408
1439
  ...
1409
1440
 
1410
1441
  @typing.overload
1411
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1442
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1412
1443
  ...
1413
1444
 
1414
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1445
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1415
1446
  """
1416
- Specifies the flow(s) that this flow depends on.
1417
-
1418
- ```
1419
- @trigger_on_finish(flow='FooFlow')
1420
- ```
1421
- or
1422
- ```
1423
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1424
- ```
1425
- This decorator respects the @project decorator and triggers the flow
1426
- when upstream runs within the same namespace complete successfully
1427
-
1428
- Additionally, you can specify project aware upstream flow dependencies
1429
- by specifying the fully qualified project_flow_name.
1430
- ```
1431
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1432
- ```
1433
- or
1434
- ```
1435
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1436
- ```
1437
-
1438
- You can also specify just the project or project branch (other values will be
1439
- inferred from the current project or project branch):
1440
- ```
1441
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1442
- ```
1443
-
1444
- Note that `branch` is typically one of:
1445
- - `prod`
1446
- - `user.bob`
1447
- - `test.my_experiment`
1448
- - `prod.staging`
1447
+ Specifies the times when the flow should be run when running on a
1448
+ production scheduler.
1449
1449
 
1450
1450
  Parameters
1451
1451
  ----------
1452
- flow : Union[str, Dict[str, str]], optional, default None
1453
- Upstream flow dependency for this flow.
1454
- flows : List[Union[str, Dict[str, str]]], default []
1455
- Upstream flow dependencies for this flow.
1456
- options : Dict[str, Any], default {}
1457
- Backend-specific configuration for tuning eventing behavior.
1458
-
1459
-
1452
+ hourly : bool, default False
1453
+ Run the workflow hourly.
1454
+ daily : bool, default True
1455
+ Run the workflow daily.
1456
+ weekly : bool, default False
1457
+ Run the workflow weekly.
1458
+ cron : str, optional, default None
1459
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1460
+ specified by this expression.
1461
+ timezone : str, optional, default None
1462
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1463
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1460
1464
  """
1461
1465
  ...
1462
1466