ob-metaflow-stubs 6.0.3.103__py2.py3-none-any.whl → 6.0.3.103rc3__py2.py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (140) hide show
  1. metaflow-stubs/__init__.pyi +466 -466
  2. metaflow-stubs/cards.pyi +4 -4
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +2 -2
  5. metaflow-stubs/client/core.pyi +4 -4
  6. metaflow-stubs/client/filecache.pyi +1 -1
  7. metaflow-stubs/clone_util.pyi +1 -1
  8. metaflow-stubs/events.pyi +1 -1
  9. metaflow-stubs/exception.pyi +1 -1
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +3 -3
  13. metaflow-stubs/info_file.pyi +1 -1
  14. metaflow-stubs/metadata/metadata.pyi +2 -2
  15. metaflow-stubs/metadata/util.pyi +1 -1
  16. metaflow-stubs/metaflow_config.pyi +1 -1
  17. metaflow-stubs/metaflow_current.pyi +17 -17
  18. metaflow-stubs/mflog/mflog.pyi +1 -1
  19. metaflow-stubs/multicore_utils.pyi +1 -1
  20. metaflow-stubs/parameters.pyi +1 -1
  21. metaflow-stubs/plugins/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  23. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  24. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  25. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  26. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  27. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  30. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  31. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  32. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  33. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  34. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +5 -5
  35. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +1 -1
  36. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  37. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  38. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  39. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  40. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  41. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  42. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  43. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  44. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  45. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  46. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  47. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +1 -1
  48. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  49. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  50. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  51. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +1 -1
  52. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  53. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  54. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  55. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  56. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  57. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  58. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  59. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  60. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  61. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  62. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  63. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  64. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  66. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  67. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  68. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  70. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  71. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  72. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  73. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  74. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  75. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  76. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  77. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  78. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  79. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  80. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  81. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  82. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  83. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  84. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  85. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  86. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  87. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  88. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  89. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  90. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  91. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  92. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  93. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  94. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  95. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  96. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  97. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +1 -1
  98. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  99. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  100. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  101. metaflow-stubs/plugins/logs_cli.pyi +2 -2
  102. metaflow-stubs/plugins/package_cli.pyi +1 -1
  103. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/perimeters.pyi +1 -1
  105. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  107. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  108. metaflow-stubs/plugins/pypi/conda_environment.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  110. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  111. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  112. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  113. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  114. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  115. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  118. metaflow-stubs/plugins/tag_cli.pyi +2 -2
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  120. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  121. metaflow-stubs/procpoll.pyi +1 -1
  122. metaflow-stubs/profilers/__init__.pyi +1 -1
  123. metaflow-stubs/pylint_wrapper.pyi +1 -1
  124. metaflow-stubs/runner/__init__.pyi +1 -1
  125. metaflow-stubs/runner/deployer.pyi +3 -3
  126. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  127. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  128. metaflow-stubs/runner/nbrun.pyi +1 -1
  129. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  130. metaflow-stubs/runner/utils.pyi +1 -1
  131. metaflow-stubs/system/__init__.pyi +2 -2
  132. metaflow-stubs/system/system_logger.pyi +2 -2
  133. metaflow-stubs/system/system_monitor.pyi +1 -1
  134. metaflow-stubs/tagging_util.pyi +1 -1
  135. metaflow-stubs/tuple_util.pyi +1 -1
  136. {ob_metaflow_stubs-6.0.3.103.dist-info → ob_metaflow_stubs-6.0.3.103rc3.dist-info}/METADATA +1 -1
  137. ob_metaflow_stubs-6.0.3.103rc3.dist-info/RECORD +140 -0
  138. ob_metaflow_stubs-6.0.3.103.dist-info/RECORD +0 -140
  139. {ob_metaflow_stubs-6.0.3.103.dist-info → ob_metaflow_stubs-6.0.3.103rc3.dist-info}/WHEEL +0 -0
  140. {ob_metaflow_stubs-6.0.3.103.dist-info → ob_metaflow_stubs-6.0.3.103rc3.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T20:42:33.459900 #
4
+ # Generated on 2024-09-20T21:42:44.532164 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow._vendor.click.types
11
+ import metaflow.flowspec
12
+ import metaflow.parameters
13
+ import typing
12
14
  import metaflow.datastore.inputs
15
+ import metaflow._vendor.click.types
13
16
  import metaflow.runner.metaflow_runner
17
+ import metaflow_extensions.obcheckpoint.plugins.machine_learning_utilities.datastructures
14
18
  import datetime
15
- import metaflow.flowspec
16
19
  import metaflow.events
17
- import metaflow_extensions.obcheckpoint.plugins.machine_learning_utilities.datastructures
18
20
  import metaflow.metaflow_current
19
- import metaflow.parameters
20
21
  import metaflow.client.core
21
- import typing
22
22
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
23
23
  StepFlag = typing.NewType("StepFlag", bool)
24
24
 
@@ -435,147 +435,139 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
435
435
  ...
436
436
 
437
437
  @typing.overload
438
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
438
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
439
439
  """
440
- Decorator prototype for all step decorators. This function gets specialized
441
- and imported for all decorators types by _import_plugin_decorators().
440
+ Specifies that the step will success under all circumstances.
441
+
442
+ The decorator will create an optional artifact, specified by `var`, which
443
+ contains the exception raised. You can use it to detect the presence
444
+ of errors, indicating that all happy-path artifacts produced by the step
445
+ are missing.
446
+
447
+ Parameters
448
+ ----------
449
+ var : str, optional, default None
450
+ Name of the artifact in which to store the caught exception.
451
+ If not specified, the exception is not stored.
452
+ print_exception : bool, default True
453
+ Determines whether or not the exception is printed to
454
+ stdout when caught.
442
455
  """
443
456
  ...
444
457
 
445
458
  @typing.overload
446
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
459
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
447
460
  ...
448
461
 
449
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
462
+ @typing.overload
463
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
464
+ ...
465
+
466
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
450
467
  """
451
- Decorator prototype for all step decorators. This function gets specialized
452
- and imported for all decorators types by _import_plugin_decorators().
468
+ Specifies that the step will success under all circumstances.
469
+
470
+ The decorator will create an optional artifact, specified by `var`, which
471
+ contains the exception raised. You can use it to detect the presence
472
+ of errors, indicating that all happy-path artifacts produced by the step
473
+ are missing.
474
+
475
+ Parameters
476
+ ----------
477
+ var : str, optional, default None
478
+ Name of the artifact in which to store the caught exception.
479
+ If not specified, the exception is not stored.
480
+ print_exception : bool, default True
481
+ Determines whether or not the exception is printed to
482
+ stdout when caught.
453
483
  """
454
484
  ...
455
485
 
456
486
  @typing.overload
457
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
487
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
458
488
  """
459
- Specifies the resources needed when executing this step.
460
-
461
- Use `@resources` to specify the resource requirements
462
- independently of the specific compute layer (`@batch`, `@kubernetes`).
463
-
464
- You can choose the compute layer on the command line by executing e.g.
465
- ```
466
- python myflow.py run --with batch
467
- ```
468
- or
469
- ```
470
- python myflow.py run --with kubernetes
471
- ```
472
- which executes the flow on the desired system using the
473
- requirements specified in `@resources`.
489
+ Specifies environment variables to be set prior to the execution of a step.
474
490
 
475
491
  Parameters
476
492
  ----------
477
- cpu : int, default 1
478
- Number of CPUs required for this step.
479
- gpu : int, default 0
480
- Number of GPUs required for this step.
481
- disk : int, optional, default None
482
- Disk size (in MB) required for this step. Only applies on Kubernetes.
483
- memory : int, default 4096
484
- Memory size (in MB) required for this step.
485
- shared_memory : int, optional, default None
486
- The value for the size (in MiB) of the /dev/shm volume for this step.
487
- This parameter maps to the `--shm-size` option in Docker.
493
+ vars : Dict[str, str], default {}
494
+ Dictionary of environment variables to set.
488
495
  """
489
496
  ...
490
497
 
491
498
  @typing.overload
492
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
499
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
493
500
  ...
494
501
 
495
502
  @typing.overload
496
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
503
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
497
504
  ...
498
505
 
499
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
506
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
500
507
  """
501
- Specifies the resources needed when executing this step.
502
-
503
- Use `@resources` to specify the resource requirements
504
- independently of the specific compute layer (`@batch`, `@kubernetes`).
505
-
506
- You can choose the compute layer on the command line by executing e.g.
507
- ```
508
- python myflow.py run --with batch
509
- ```
510
- or
511
- ```
512
- python myflow.py run --with kubernetes
513
- ```
514
- which executes the flow on the desired system using the
515
- requirements specified in `@resources`.
508
+ Specifies environment variables to be set prior to the execution of a step.
516
509
 
517
510
  Parameters
518
511
  ----------
519
- cpu : int, default 1
520
- Number of CPUs required for this step.
521
- gpu : int, default 0
522
- Number of GPUs required for this step.
523
- disk : int, optional, default None
524
- Disk size (in MB) required for this step. Only applies on Kubernetes.
525
- memory : int, default 4096
526
- Memory size (in MB) required for this step.
527
- shared_memory : int, optional, default None
528
- The value for the size (in MiB) of the /dev/shm volume for this step.
529
- This parameter maps to the `--shm-size` option in Docker.
512
+ vars : Dict[str, str], default {}
513
+ Dictionary of environment variables to set.
530
514
  """
531
515
  ...
532
516
 
533
517
  @typing.overload
534
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
518
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
535
519
  """
536
- Specifies that the step will success under all circumstances.
520
+ Specifies a timeout for your step.
537
521
 
538
- The decorator will create an optional artifact, specified by `var`, which
539
- contains the exception raised. You can use it to detect the presence
540
- of errors, indicating that all happy-path artifacts produced by the step
541
- are missing.
522
+ This decorator is useful if this step may hang indefinitely.
523
+
524
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
525
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
526
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
527
+
528
+ Note that all the values specified in parameters are added together so if you specify
529
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
542
530
 
543
531
  Parameters
544
532
  ----------
545
- var : str, optional, default None
546
- Name of the artifact in which to store the caught exception.
547
- If not specified, the exception is not stored.
548
- print_exception : bool, default True
549
- Determines whether or not the exception is printed to
550
- stdout when caught.
533
+ seconds : int, default 0
534
+ Number of seconds to wait prior to timing out.
535
+ minutes : int, default 0
536
+ Number of minutes to wait prior to timing out.
537
+ hours : int, default 0
538
+ Number of hours to wait prior to timing out.
551
539
  """
552
540
  ...
553
541
 
554
542
  @typing.overload
555
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
543
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
556
544
  ...
557
545
 
558
546
  @typing.overload
559
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
547
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
560
548
  ...
561
549
 
562
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
550
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
563
551
  """
564
- Specifies that the step will success under all circumstances.
552
+ Specifies a timeout for your step.
565
553
 
566
- The decorator will create an optional artifact, specified by `var`, which
567
- contains the exception raised. You can use it to detect the presence
568
- of errors, indicating that all happy-path artifacts produced by the step
569
- are missing.
554
+ This decorator is useful if this step may hang indefinitely.
555
+
556
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
557
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
558
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
559
+
560
+ Note that all the values specified in parameters are added together so if you specify
561
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
570
562
 
571
563
  Parameters
572
564
  ----------
573
- var : str, optional, default None
574
- Name of the artifact in which to store the caught exception.
575
- If not specified, the exception is not stored.
576
- print_exception : bool, default True
577
- Determines whether or not the exception is printed to
578
- stdout when caught.
565
+ seconds : int, default 0
566
+ Number of seconds to wait prior to timing out.
567
+ minutes : int, default 0
568
+ Number of minutes to wait prior to timing out.
569
+ hours : int, default 0
570
+ Number of hours to wait prior to timing out.
579
571
  """
580
572
  ...
581
573
 
@@ -633,191 +625,115 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
633
625
  ...
634
626
 
635
627
  @typing.overload
636
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
628
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
637
629
  """
638
- Specifies the PyPI packages for the step.
639
-
640
- Information in this decorator will augment any
641
- attributes set in the `@pyi_base` flow-level decorator. Hence,
642
- you can use `@pypi_base` to set packages required by all
643
- steps and use `@pypi` to specify step-specific overrides.
644
-
645
- Parameters
646
- ----------
647
- packages : Dict[str, str], default: {}
648
- Packages to use for this step. The key is the name of the package
649
- and the value is the version to use.
650
- python : str, optional, default: None
651
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
652
- that the version used will correspond to the version of the Python interpreter used to start the run.
630
+ Internal decorator to support Fast bakery
653
631
  """
654
632
  ...
655
633
 
656
634
  @typing.overload
657
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
635
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
636
+ ...
637
+
638
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
639
+ """
640
+ Internal decorator to support Fast bakery
641
+ """
658
642
  ...
659
643
 
660
644
  @typing.overload
661
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
645
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
646
+ """
647
+ Decorator prototype for all step decorators. This function gets specialized
648
+ and imported for all decorators types by _import_plugin_decorators().
649
+ """
662
650
  ...
663
651
 
664
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
652
+ @typing.overload
653
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
654
+ ...
655
+
656
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
665
657
  """
666
- Specifies the PyPI packages for the step.
667
-
668
- Information in this decorator will augment any
669
- attributes set in the `@pyi_base` flow-level decorator. Hence,
670
- you can use `@pypi_base` to set packages required by all
671
- steps and use `@pypi` to specify step-specific overrides.
672
-
673
- Parameters
674
- ----------
675
- packages : Dict[str, str], default: {}
676
- Packages to use for this step. The key is the name of the package
677
- and the value is the version to use.
678
- python : str, optional, default: None
679
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
680
- that the version used will correspond to the version of the Python interpreter used to start the run.
658
+ Decorator prototype for all step decorators. This function gets specialized
659
+ and imported for all decorators types by _import_plugin_decorators().
681
660
  """
682
661
  ...
683
662
 
684
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
663
+ @typing.overload
664
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
685
665
  """
686
- Specifies that this step should execute on Kubernetes.
666
+ Specifies the resources needed when executing this step.
667
+
668
+ Use `@resources` to specify the resource requirements
669
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
670
+
671
+ You can choose the compute layer on the command line by executing e.g.
672
+ ```
673
+ python myflow.py run --with batch
674
+ ```
675
+ or
676
+ ```
677
+ python myflow.py run --with kubernetes
678
+ ```
679
+ which executes the flow on the desired system using the
680
+ requirements specified in `@resources`.
687
681
 
688
682
  Parameters
689
683
  ----------
690
684
  cpu : int, default 1
691
- Number of CPUs required for this step. If `@resources` is
692
- also present, the maximum value from all decorators is used.
685
+ Number of CPUs required for this step.
686
+ gpu : int, default 0
687
+ Number of GPUs required for this step.
688
+ disk : int, optional, default None
689
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
693
690
  memory : int, default 4096
694
- Memory size (in MB) required for this step. If
695
- `@resources` is also present, the maximum value from all decorators is
696
- used.
697
- disk : int, default 10240
698
- Disk size (in MB) required for this step. If
699
- `@resources` is also present, the maximum value from all decorators is
700
- used.
701
- image : str, optional, default None
702
- Docker image to use when launching on Kubernetes. If not specified, and
703
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
704
- not, a default Docker image mapping to the current version of Python is used.
705
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
706
- If given, the imagePullPolicy to be applied to the Docker image of the step.
707
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
708
- Kubernetes service account to use when launching pod in Kubernetes.
709
- secrets : List[str], optional, default None
710
- Kubernetes secrets to use when launching pod in Kubernetes. These
711
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
712
- in Metaflow configuration.
713
- node_selector: Union[Dict[str,str], str], optional, default None
714
- Kubernetes node selector(s) to apply to the pod running the task.
715
- Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
716
- or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
717
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
718
- Kubernetes namespace to use when launching pod in Kubernetes.
719
- gpu : int, optional, default None
720
- Number of GPUs required for this step. A value of zero implies that
721
- the scheduled node should not have GPUs.
722
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
723
- The vendor of the GPUs to be used for this step.
724
- tolerations : List[str], default []
725
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
726
- Kubernetes tolerations to use when launching pod in Kubernetes.
727
- use_tmpfs : bool, default False
728
- This enables an explicit tmpfs mount for this step.
729
- tmpfs_tempdir : bool, default True
730
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
731
- tmpfs_size : int, optional, default: None
732
- The value for the size (in MiB) of the tmpfs mount for this step.
733
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
734
- memory allocated for this step.
735
- tmpfs_path : str, optional, default /metaflow_temp
736
- Path to tmpfs mount for this step.
737
- persistent_volume_claims : Dict[str, str], optional, default None
738
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
739
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
740
- shared_memory: int, optional
741
- Shared memory size (in MiB) required for this step
742
- port: int, optional
743
- Port number to specify in the Kubernetes job object
744
- compute_pool : str, optional, default None
745
- Compute pool to be used for for this step.
746
- If not specified, any accessible compute pool within the perimeter is used.
747
- """
748
- ...
749
-
750
- @typing.overload
751
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
752
- """
753
- Specifies a timeout for your step.
754
-
755
- This decorator is useful if this step may hang indefinitely.
756
-
757
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
758
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
759
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
760
-
761
- Note that all the values specified in parameters are added together so if you specify
762
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
763
-
764
- Parameters
765
- ----------
766
- seconds : int, default 0
767
- Number of seconds to wait prior to timing out.
768
- minutes : int, default 0
769
- Number of minutes to wait prior to timing out.
770
- hours : int, default 0
771
- Number of hours to wait prior to timing out.
691
+ Memory size (in MB) required for this step.
692
+ shared_memory : int, optional, default None
693
+ The value for the size (in MiB) of the /dev/shm volume for this step.
694
+ This parameter maps to the `--shm-size` option in Docker.
772
695
  """
773
696
  ...
774
697
 
775
698
  @typing.overload
776
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
699
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
777
700
  ...
778
701
 
779
702
  @typing.overload
780
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
703
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
781
704
  ...
782
705
 
783
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
706
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
784
707
  """
785
- Specifies a timeout for your step.
786
-
787
- This decorator is useful if this step may hang indefinitely.
708
+ Specifies the resources needed when executing this step.
788
709
 
789
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
790
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
791
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
710
+ Use `@resources` to specify the resource requirements
711
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
792
712
 
793
- Note that all the values specified in parameters are added together so if you specify
794
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
713
+ You can choose the compute layer on the command line by executing e.g.
714
+ ```
715
+ python myflow.py run --with batch
716
+ ```
717
+ or
718
+ ```
719
+ python myflow.py run --with kubernetes
720
+ ```
721
+ which executes the flow on the desired system using the
722
+ requirements specified in `@resources`.
795
723
 
796
724
  Parameters
797
725
  ----------
798
- seconds : int, default 0
799
- Number of seconds to wait prior to timing out.
800
- minutes : int, default 0
801
- Number of minutes to wait prior to timing out.
802
- hours : int, default 0
803
- Number of hours to wait prior to timing out.
804
- """
805
- ...
806
-
807
- @typing.overload
808
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
809
- """
810
- Internal decorator to support Fast bakery
811
- """
812
- ...
813
-
814
- @typing.overload
815
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
816
- ...
817
-
818
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
819
- """
820
- Internal decorator to support Fast bakery
726
+ cpu : int, default 1
727
+ Number of CPUs required for this step.
728
+ gpu : int, default 0
729
+ Number of GPUs required for this step.
730
+ disk : int, optional, default None
731
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
732
+ memory : int, default 4096
733
+ Memory size (in MB) required for this step.
734
+ shared_memory : int, optional, default None
735
+ The value for the size (in MiB) of the /dev/shm volume for this step.
736
+ This parameter maps to the `--shm-size` option in Docker.
821
737
  """
822
738
  ...
823
739
 
@@ -854,34 +770,69 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
854
770
  """
855
771
  ...
856
772
 
857
- @typing.overload
858
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
859
- """
860
- Specifies environment variables to be set prior to the execution of a step.
861
-
862
- Parameters
863
- ----------
864
- vars : Dict[str, str], default {}
865
- Dictionary of environment variables to set.
866
- """
867
- ...
868
-
869
- @typing.overload
870
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
871
- ...
872
-
873
- @typing.overload
874
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
875
- ...
876
-
877
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
773
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
878
774
  """
879
- Specifies environment variables to be set prior to the execution of a step.
775
+ Specifies that this step should execute on Kubernetes.
880
776
 
881
777
  Parameters
882
778
  ----------
883
- vars : Dict[str, str], default {}
884
- Dictionary of environment variables to set.
779
+ cpu : int, default 1
780
+ Number of CPUs required for this step. If `@resources` is
781
+ also present, the maximum value from all decorators is used.
782
+ memory : int, default 4096
783
+ Memory size (in MB) required for this step. If
784
+ `@resources` is also present, the maximum value from all decorators is
785
+ used.
786
+ disk : int, default 10240
787
+ Disk size (in MB) required for this step. If
788
+ `@resources` is also present, the maximum value from all decorators is
789
+ used.
790
+ image : str, optional, default None
791
+ Docker image to use when launching on Kubernetes. If not specified, and
792
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
793
+ not, a default Docker image mapping to the current version of Python is used.
794
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
795
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
796
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
797
+ Kubernetes service account to use when launching pod in Kubernetes.
798
+ secrets : List[str], optional, default None
799
+ Kubernetes secrets to use when launching pod in Kubernetes. These
800
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
801
+ in Metaflow configuration.
802
+ node_selector: Union[Dict[str,str], str], optional, default None
803
+ Kubernetes node selector(s) to apply to the pod running the task.
804
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
805
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
806
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
807
+ Kubernetes namespace to use when launching pod in Kubernetes.
808
+ gpu : int, optional, default None
809
+ Number of GPUs required for this step. A value of zero implies that
810
+ the scheduled node should not have GPUs.
811
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
812
+ The vendor of the GPUs to be used for this step.
813
+ tolerations : List[str], default []
814
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
815
+ Kubernetes tolerations to use when launching pod in Kubernetes.
816
+ use_tmpfs : bool, default False
817
+ This enables an explicit tmpfs mount for this step.
818
+ tmpfs_tempdir : bool, default True
819
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
820
+ tmpfs_size : int, optional, default: None
821
+ The value for the size (in MiB) of the tmpfs mount for this step.
822
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
823
+ memory allocated for this step.
824
+ tmpfs_path : str, optional, default /metaflow_temp
825
+ Path to tmpfs mount for this step.
826
+ persistent_volume_claims : Dict[str, str], optional, default None
827
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
828
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
829
+ shared_memory: int, optional
830
+ Shared memory size (in MiB) required for this step
831
+ port: int, optional
832
+ Port number to specify in the Kubernetes job object
833
+ compute_pool : str, optional, default None
834
+ Compute pool to be used for for this step.
835
+ If not specified, any accessible compute pool within the perimeter is used.
885
836
  """
886
837
  ...
887
838
 
@@ -1115,26 +1066,96 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1115
1066
  """
1116
1067
  ...
1117
1068
 
1118
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1069
+ @typing.overload
1070
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1119
1071
  """
1120
- Specifies what flows belong to the same project.
1072
+ Specifies the PyPI packages for the step.
1121
1073
 
1122
- A project-specific namespace is created for all flows that
1123
- use the same `@project(name)`.
1074
+ Information in this decorator will augment any
1075
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1076
+ you can use `@pypi_base` to set packages required by all
1077
+ steps and use `@pypi` to specify step-specific overrides.
1124
1078
 
1125
1079
  Parameters
1126
1080
  ----------
1127
- name : str
1128
- Project name. Make sure that the name is unique amongst all
1129
- projects that use the same production scheduler. The name may
1130
- contain only lowercase alphanumeric characters and underscores.
1131
-
1132
-
1081
+ packages : Dict[str, str], default: {}
1082
+ Packages to use for this step. The key is the name of the package
1083
+ and the value is the version to use.
1084
+ python : str, optional, default: None
1085
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1086
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1133
1087
  """
1134
1088
  ...
1135
1089
 
1136
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1137
- """
1090
+ @typing.overload
1091
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1092
+ ...
1093
+
1094
+ @typing.overload
1095
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1096
+ ...
1097
+
1098
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1099
+ """
1100
+ Specifies the PyPI packages for the step.
1101
+
1102
+ Information in this decorator will augment any
1103
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1104
+ you can use `@pypi_base` to set packages required by all
1105
+ steps and use `@pypi` to specify step-specific overrides.
1106
+
1107
+ Parameters
1108
+ ----------
1109
+ packages : Dict[str, str], default: {}
1110
+ Packages to use for this step. The key is the name of the package
1111
+ and the value is the version to use.
1112
+ python : str, optional, default: None
1113
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1114
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1115
+ """
1116
+ ...
1117
+
1118
+ @typing.overload
1119
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1120
+ """
1121
+ Specifies the PyPI packages for all steps of the flow.
1122
+
1123
+ Use `@pypi_base` to set common packages required by all
1124
+ steps and use `@pypi` to specify step-specific overrides.
1125
+ Parameters
1126
+ ----------
1127
+ packages : Dict[str, str], default: {}
1128
+ Packages to use for this flow. The key is the name of the package
1129
+ and the value is the version to use.
1130
+ python : str, optional, default: None
1131
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1132
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1133
+ """
1134
+ ...
1135
+
1136
+ @typing.overload
1137
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1138
+ ...
1139
+
1140
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1141
+ """
1142
+ Specifies the PyPI packages for all steps of the flow.
1143
+
1144
+ Use `@pypi_base` to set common packages required by all
1145
+ steps and use `@pypi` to specify step-specific overrides.
1146
+ Parameters
1147
+ ----------
1148
+ packages : Dict[str, str], default: {}
1149
+ Packages to use for this flow. The key is the name of the package
1150
+ and the value is the version to use.
1151
+ python : str, optional, default: None
1152
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1153
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1154
+ """
1155
+ ...
1156
+
1157
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1158
+ """
1138
1159
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1139
1160
  before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1140
1161
  and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
@@ -1176,48 +1197,93 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1176
1197
  ...
1177
1198
 
1178
1199
  @typing.overload
1179
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1200
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1180
1201
  """
1181
- Specifies the flow(s) that this flow depends on.
1202
+ Specifies the Conda environment for all steps of the flow.
1203
+
1204
+ Use `@conda_base` to set common libraries required by all
1205
+ steps and use `@conda` to specify step-specific additions.
1206
+
1207
+ Parameters
1208
+ ----------
1209
+ packages : Dict[str, str], default {}
1210
+ Packages to use for this flow. The key is the name of the package
1211
+ and the value is the version to use.
1212
+ libraries : Dict[str, str], default {}
1213
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1214
+ python : str, optional, default None
1215
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1216
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1217
+ disabled : bool, default False
1218
+ If set to True, disables Conda.
1219
+ """
1220
+ ...
1221
+
1222
+ @typing.overload
1223
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1224
+ ...
1225
+
1226
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1227
+ """
1228
+ Specifies the Conda environment for all steps of the flow.
1229
+
1230
+ Use `@conda_base` to set common libraries required by all
1231
+ steps and use `@conda` to specify step-specific additions.
1232
+
1233
+ Parameters
1234
+ ----------
1235
+ packages : Dict[str, str], default {}
1236
+ Packages to use for this flow. The key is the name of the package
1237
+ and the value is the version to use.
1238
+ libraries : Dict[str, str], default {}
1239
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1240
+ python : str, optional, default None
1241
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1242
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1243
+ disabled : bool, default False
1244
+ If set to True, disables Conda.
1245
+ """
1246
+ ...
1247
+
1248
+ @typing.overload
1249
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1250
+ """
1251
+ Specifies the event(s) that this flow depends on.
1182
1252
 
1183
1253
  ```
1184
- @trigger_on_finish(flow='FooFlow')
1254
+ @trigger(event='foo')
1185
1255
  ```
1186
1256
  or
1187
1257
  ```
1188
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1258
+ @trigger(events=['foo', 'bar'])
1189
1259
  ```
1190
- This decorator respects the @project decorator and triggers the flow
1191
- when upstream runs within the same namespace complete successfully
1192
1260
 
1193
- Additionally, you can specify project aware upstream flow dependencies
1194
- by specifying the fully qualified project_flow_name.
1261
+ Additionally, you can specify the parameter mappings
1262
+ to map event payload to Metaflow parameters for the flow.
1195
1263
  ```
1196
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1264
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1197
1265
  ```
1198
1266
  or
1199
1267
  ```
1200
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1268
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1269
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1201
1270
  ```
1202
1271
 
1203
- You can also specify just the project or project branch (other values will be
1204
- inferred from the current project or project branch):
1272
+ 'parameters' can also be a list of strings and tuples like so:
1205
1273
  ```
1206
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1274
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1275
+ ```
1276
+ This is equivalent to:
1277
+ ```
1278
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1207
1279
  ```
1208
-
1209
- Note that `branch` is typically one of:
1210
- - `prod`
1211
- - `user.bob`
1212
- - `test.my_experiment`
1213
- - `prod.staging`
1214
1280
 
1215
1281
  Parameters
1216
1282
  ----------
1217
- flow : Union[str, Dict[str, str]], optional, default None
1218
- Upstream flow dependency for this flow.
1219
- flows : List[Union[str, Dict[str, str]]], default []
1220
- Upstream flow dependencies for this flow.
1283
+ event : Union[str, Dict[str, Any]], optional, default None
1284
+ Event dependency for this flow.
1285
+ events : List[Union[str, Dict[str, Any]]], default []
1286
+ Events dependency for this flow.
1221
1287
  options : Dict[str, Any], default {}
1222
1288
  Backend-specific configuration for tuning eventing behavior.
1223
1289
 
@@ -1226,51 +1292,47 @@ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] =
1226
1292
  ...
1227
1293
 
1228
1294
  @typing.overload
1229
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1295
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1230
1296
  ...
1231
1297
 
1232
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1298
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1233
1299
  """
1234
- Specifies the flow(s) that this flow depends on.
1300
+ Specifies the event(s) that this flow depends on.
1235
1301
 
1236
1302
  ```
1237
- @trigger_on_finish(flow='FooFlow')
1303
+ @trigger(event='foo')
1238
1304
  ```
1239
1305
  or
1240
1306
  ```
1241
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1307
+ @trigger(events=['foo', 'bar'])
1242
1308
  ```
1243
- This decorator respects the @project decorator and triggers the flow
1244
- when upstream runs within the same namespace complete successfully
1245
1309
 
1246
- Additionally, you can specify project aware upstream flow dependencies
1247
- by specifying the fully qualified project_flow_name.
1310
+ Additionally, you can specify the parameter mappings
1311
+ to map event payload to Metaflow parameters for the flow.
1248
1312
  ```
1249
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1313
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1250
1314
  ```
1251
1315
  or
1252
1316
  ```
1253
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1317
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1318
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1254
1319
  ```
1255
1320
 
1256
- You can also specify just the project or project branch (other values will be
1257
- inferred from the current project or project branch):
1321
+ 'parameters' can also be a list of strings and tuples like so:
1258
1322
  ```
1259
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1323
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1324
+ ```
1325
+ This is equivalent to:
1326
+ ```
1327
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1260
1328
  ```
1261
-
1262
- Note that `branch` is typically one of:
1263
- - `prod`
1264
- - `user.bob`
1265
- - `test.my_experiment`
1266
- - `prod.staging`
1267
1329
 
1268
1330
  Parameters
1269
1331
  ----------
1270
- flow : Union[str, Dict[str, str]], optional, default None
1271
- Upstream flow dependency for this flow.
1272
- flows : List[Union[str, Dict[str, str]]], default []
1273
- Upstream flow dependencies for this flow.
1332
+ event : Union[str, Dict[str, Any]], optional, default None
1333
+ Event dependency for this flow.
1334
+ events : List[Union[str, Dict[str, Any]]], default []
1335
+ Events dependency for this flow.
1274
1336
  options : Dict[str, Any], default {}
1275
1337
  Backend-specific configuration for tuning eventing behavior.
1276
1338
 
@@ -1328,44 +1390,48 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1328
1390
  ...
1329
1391
 
1330
1392
  @typing.overload
1331
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1393
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1332
1394
  """
1333
- Specifies the event(s) that this flow depends on.
1395
+ Specifies the flow(s) that this flow depends on.
1334
1396
 
1335
1397
  ```
1336
- @trigger(event='foo')
1398
+ @trigger_on_finish(flow='FooFlow')
1337
1399
  ```
1338
1400
  or
1339
1401
  ```
1340
- @trigger(events=['foo', 'bar'])
1402
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1341
1403
  ```
1404
+ This decorator respects the @project decorator and triggers the flow
1405
+ when upstream runs within the same namespace complete successfully
1342
1406
 
1343
- Additionally, you can specify the parameter mappings
1344
- to map event payload to Metaflow parameters for the flow.
1407
+ Additionally, you can specify project aware upstream flow dependencies
1408
+ by specifying the fully qualified project_flow_name.
1345
1409
  ```
1346
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1410
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1347
1411
  ```
1348
1412
  or
1349
1413
  ```
1350
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1351
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1414
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1352
1415
  ```
1353
1416
 
1354
- 'parameters' can also be a list of strings and tuples like so:
1355
- ```
1356
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1357
- ```
1358
- This is equivalent to:
1417
+ You can also specify just the project or project branch (other values will be
1418
+ inferred from the current project or project branch):
1359
1419
  ```
1360
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1420
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1361
1421
  ```
1362
1422
 
1423
+ Note that `branch` is typically one of:
1424
+ - `prod`
1425
+ - `user.bob`
1426
+ - `test.my_experiment`
1427
+ - `prod.staging`
1428
+
1363
1429
  Parameters
1364
1430
  ----------
1365
- event : Union[str, Dict[str, Any]], optional, default None
1366
- Event dependency for this flow.
1367
- events : List[Union[str, Dict[str, Any]]], default []
1368
- Events dependency for this flow.
1431
+ flow : Union[str, Dict[str, str]], optional, default None
1432
+ Upstream flow dependency for this flow.
1433
+ flows : List[Union[str, Dict[str, str]]], default []
1434
+ Upstream flow dependencies for this flow.
1369
1435
  options : Dict[str, Any], default {}
1370
1436
  Backend-specific configuration for tuning eventing behavior.
1371
1437
 
@@ -1374,47 +1440,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1374
1440
  ...
1375
1441
 
1376
1442
  @typing.overload
1377
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1443
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1378
1444
  ...
1379
1445
 
1380
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1446
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1381
1447
  """
1382
- Specifies the event(s) that this flow depends on.
1448
+ Specifies the flow(s) that this flow depends on.
1383
1449
 
1384
1450
  ```
1385
- @trigger(event='foo')
1451
+ @trigger_on_finish(flow='FooFlow')
1386
1452
  ```
1387
1453
  or
1388
1454
  ```
1389
- @trigger(events=['foo', 'bar'])
1455
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1390
1456
  ```
1457
+ This decorator respects the @project decorator and triggers the flow
1458
+ when upstream runs within the same namespace complete successfully
1391
1459
 
1392
- Additionally, you can specify the parameter mappings
1393
- to map event payload to Metaflow parameters for the flow.
1460
+ Additionally, you can specify project aware upstream flow dependencies
1461
+ by specifying the fully qualified project_flow_name.
1394
1462
  ```
1395
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1463
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1396
1464
  ```
1397
1465
  or
1398
1466
  ```
1399
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1400
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1467
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1401
1468
  ```
1402
1469
 
1403
- 'parameters' can also be a list of strings and tuples like so:
1404
- ```
1405
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1406
- ```
1407
- This is equivalent to:
1470
+ You can also specify just the project or project branch (other values will be
1471
+ inferred from the current project or project branch):
1408
1472
  ```
1409
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1473
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1410
1474
  ```
1411
1475
 
1476
+ Note that `branch` is typically one of:
1477
+ - `prod`
1478
+ - `user.bob`
1479
+ - `test.my_experiment`
1480
+ - `prod.staging`
1481
+
1412
1482
  Parameters
1413
1483
  ----------
1414
- event : Union[str, Dict[str, Any]], optional, default None
1415
- Event dependency for this flow.
1416
- events : List[Union[str, Dict[str, Any]]], default []
1417
- Events dependency for this flow.
1484
+ flow : Union[str, Dict[str, str]], optional, default None
1485
+ Upstream flow dependency for this flow.
1486
+ flows : List[Union[str, Dict[str, str]]], default []
1487
+ Upstream flow dependencies for this flow.
1418
1488
  options : Dict[str, Any], default {}
1419
1489
  Backend-specific configuration for tuning eventing behavior.
1420
1490
 
@@ -1422,73 +1492,21 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1422
1492
  """
1423
1493
  ...
1424
1494
 
1425
- @typing.overload
1426
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1495
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1427
1496
  """
1428
- Specifies the PyPI packages for all steps of the flow.
1497
+ Specifies what flows belong to the same project.
1429
1498
 
1430
- Use `@pypi_base` to set common packages required by all
1431
- steps and use `@pypi` to specify step-specific overrides.
1432
- Parameters
1433
- ----------
1434
- packages : Dict[str, str], default: {}
1435
- Packages to use for this flow. The key is the name of the package
1436
- and the value is the version to use.
1437
- python : str, optional, default: None
1438
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1439
- that the version used will correspond to the version of the Python interpreter used to start the run.
1440
- """
1441
- ...
1442
-
1443
- @typing.overload
1444
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1445
- ...
1446
-
1447
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1448
- """
1449
- Specifies the PyPI packages for all steps of the flow.
1499
+ A project-specific namespace is created for all flows that
1500
+ use the same `@project(name)`.
1450
1501
 
1451
- Use `@pypi_base` to set common packages required by all
1452
- steps and use `@pypi` to specify step-specific overrides.
1453
1502
  Parameters
1454
1503
  ----------
1455
- packages : Dict[str, str], default: {}
1456
- Packages to use for this flow. The key is the name of the package
1457
- and the value is the version to use.
1458
- python : str, optional, default: None
1459
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1460
- that the version used will correspond to the version of the Python interpreter used to start the run.
1461
- """
1462
- ...
1463
-
1464
- def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1465
- """
1466
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1467
-
1468
- User code call
1469
- -----------
1470
- @nim(
1471
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1472
- backend='managed'
1473
- )
1474
-
1475
- Valid backend options
1476
- ---------------------
1477
- - 'managed': Outerbounds selects a compute provider based on the model.
1478
- - 🚧 'dataplane': Run in your account.
1504
+ name : str
1505
+ Project name. Make sure that the name is unique amongst all
1506
+ projects that use the same production scheduler. The name may
1507
+ contain only lowercase alphanumeric characters and underscores.
1479
1508
 
1480
- Valid model options
1481
- ----------------
1482
- - 'meta/llama3-8b-instruct': 8B parameter model
1483
- - 'meta/llama3-70b-instruct': 70B parameter model
1484
- - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1485
1509
 
1486
- Parameters
1487
- ----------
1488
- models: list[NIM]
1489
- List of NIM containers running models in sidecars.
1490
- backend: str
1491
- Compute provider to run the NIM container.
1492
1510
  """
1493
1511
  ...
1494
1512
 
@@ -1534,52 +1552,34 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1534
1552
  """
1535
1553
  ...
1536
1554
 
1537
- @typing.overload
1538
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1555
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1539
1556
  """
1540
- Specifies the Conda environment for all steps of the flow.
1557
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1541
1558
 
1542
- Use `@conda_base` to set common libraries required by all
1543
- steps and use `@conda` to specify step-specific additions.
1559
+ User code call
1560
+ -----------
1561
+ @nim(
1562
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1563
+ backend='managed'
1564
+ )
1544
1565
 
1545
- Parameters
1546
- ----------
1547
- packages : Dict[str, str], default {}
1548
- Packages to use for this flow. The key is the name of the package
1549
- and the value is the version to use.
1550
- libraries : Dict[str, str], default {}
1551
- Supported for backward compatibility. When used with packages, packages will take precedence.
1552
- python : str, optional, default None
1553
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1554
- that the version used will correspond to the version of the Python interpreter used to start the run.
1555
- disabled : bool, default False
1556
- If set to True, disables Conda.
1557
- """
1558
- ...
1559
-
1560
- @typing.overload
1561
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1562
- ...
1563
-
1564
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1565
- """
1566
- Specifies the Conda environment for all steps of the flow.
1566
+ Valid backend options
1567
+ ---------------------
1568
+ - 'managed': Outerbounds selects a compute provider based on the model.
1569
+ - 🚧 'dataplane': Run in your account.
1567
1570
 
1568
- Use `@conda_base` to set common libraries required by all
1569
- steps and use `@conda` to specify step-specific additions.
1571
+ Valid model options
1572
+ ----------------
1573
+ - 'meta/llama3-8b-instruct': 8B parameter model
1574
+ - 'meta/llama3-70b-instruct': 70B parameter model
1575
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1570
1576
 
1571
1577
  Parameters
1572
1578
  ----------
1573
- packages : Dict[str, str], default {}
1574
- Packages to use for this flow. The key is the name of the package
1575
- and the value is the version to use.
1576
- libraries : Dict[str, str], default {}
1577
- Supported for backward compatibility. When used with packages, packages will take precedence.
1578
- python : str, optional, default None
1579
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1580
- that the version used will correspond to the version of the Python interpreter used to start the run.
1581
- disabled : bool, default False
1582
- If set to True, disables Conda.
1579
+ models: list[NIM]
1580
+ List of NIM containers running models in sidecars.
1581
+ backend: str
1582
+ Compute provider to run the NIM container.
1583
1583
  """
1584
1584
  ...
1585
1585