ob-metaflow-stubs 6.0.3.103rc0__py2.py3-none-any.whl → 6.0.3.104__py2.py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (140) hide show
  1. metaflow-stubs/__init__.pyi +579 -579
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +2 -2
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +90 -90
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +3 -3
  21. metaflow-stubs/plugins/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  27. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  28. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  30. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  33. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +5 -5
  34. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +6 -6
  35. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  36. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  43. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  45. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  52. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  54. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  55. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  56. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  57. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  58. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  59. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  61. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  62. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  73. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  74. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  75. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  76. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  77. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  78. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  79. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  80. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  82. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  83. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  84. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  85. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  86. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  87. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  88. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  90. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  94. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  95. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  96. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  97. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  98. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  100. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  101. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  102. metaflow-stubs/plugins/package_cli.pyi +2 -2
  103. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/perimeters.pyi +2 -2
  105. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  112. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  116. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  118. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  121. metaflow-stubs/procpoll.pyi +2 -2
  122. metaflow-stubs/profilers/__init__.pyi +2 -2
  123. metaflow-stubs/pylint_wrapper.pyi +2 -2
  124. metaflow-stubs/runner/__init__.pyi +2 -2
  125. metaflow-stubs/runner/deployer.pyi +4 -4
  126. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  127. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  128. metaflow-stubs/runner/nbrun.pyi +2 -2
  129. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  130. metaflow-stubs/runner/utils.pyi +2 -2
  131. metaflow-stubs/system/__init__.pyi +3 -3
  132. metaflow-stubs/system/system_logger.pyi +2 -2
  133. metaflow-stubs/system/system_monitor.pyi +3 -3
  134. metaflow-stubs/tagging_util.pyi +2 -2
  135. metaflow-stubs/tuple_util.pyi +2 -2
  136. {ob_metaflow_stubs-6.0.3.103rc0.dist-info → ob_metaflow_stubs-6.0.3.104.dist-info}/METADATA +1 -1
  137. ob_metaflow_stubs-6.0.3.104.dist-info/RECORD +140 -0
  138. ob_metaflow_stubs-6.0.3.103rc0.dist-info/RECORD +0 -140
  139. {ob_metaflow_stubs-6.0.3.103rc0.dist-info → ob_metaflow_stubs-6.0.3.104.dist-info}/WHEEL +0 -0
  140. {ob_metaflow_stubs-6.0.3.103rc0.dist-info → ob_metaflow_stubs-6.0.3.104.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.10);ob(v1) #
4
- # Generated on 2024-09-20T18:44:03.631910 #
3
+ # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-09-20T21:46:07.326886 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
- import metaflow.datastore.inputs
13
11
  import metaflow.client.core
14
- import metaflow.events
15
- import metaflow.parameters
16
- import metaflow.runner.metaflow_runner
17
12
  import metaflow._vendor.click.types
13
+ import metaflow.runner.metaflow_runner
14
+ import metaflow.parameters
15
+ import metaflow.events
16
+ import metaflow.metaflow_current
17
+ import datetime
18
+ import metaflow.datastore.inputs
18
19
  import metaflow_extensions.obcheckpoint.plugins.machine_learning_utilities.datastructures
19
20
  import metaflow.flowspec
20
21
  import typing
21
- import metaflow.metaflow_current
22
22
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
23
23
  StepFlag = typing.NewType("StepFlag", bool)
24
24
 
@@ -434,6 +434,80 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
434
434
  """
435
435
  ...
436
436
 
437
+ @typing.overload
438
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
439
+ """
440
+ Internal decorator to support Fast bakery
441
+ """
442
+ ...
443
+
444
+ @typing.overload
445
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
446
+ ...
447
+
448
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
449
+ """
450
+ Internal decorator to support Fast bakery
451
+ """
452
+ ...
453
+
454
+ @typing.overload
455
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
456
+ """
457
+ Specifies the Conda environment for the step.
458
+
459
+ Information in this decorator will augment any
460
+ attributes set in the `@conda_base` flow-level decorator. Hence,
461
+ you can use `@conda_base` to set packages required by all
462
+ steps and use `@conda` to specify step-specific overrides.
463
+
464
+ Parameters
465
+ ----------
466
+ packages : Dict[str, str], default {}
467
+ Packages to use for this step. The key is the name of the package
468
+ and the value is the version to use.
469
+ libraries : Dict[str, str], default {}
470
+ Supported for backward compatibility. When used with packages, packages will take precedence.
471
+ python : str, optional, default None
472
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
473
+ that the version used will correspond to the version of the Python interpreter used to start the run.
474
+ disabled : bool, default False
475
+ If set to True, disables @conda.
476
+ """
477
+ ...
478
+
479
+ @typing.overload
480
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
481
+ ...
482
+
483
+ @typing.overload
484
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
485
+ ...
486
+
487
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
488
+ """
489
+ Specifies the Conda environment for the step.
490
+
491
+ Information in this decorator will augment any
492
+ attributes set in the `@conda_base` flow-level decorator. Hence,
493
+ you can use `@conda_base` to set packages required by all
494
+ steps and use `@conda` to specify step-specific overrides.
495
+
496
+ Parameters
497
+ ----------
498
+ packages : Dict[str, str], default {}
499
+ Packages to use for this step. The key is the name of the package
500
+ and the value is the version to use.
501
+ libraries : Dict[str, str], default {}
502
+ Supported for backward compatibility. When used with packages, packages will take precedence.
503
+ python : str, optional, default None
504
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
505
+ that the version used will correspond to the version of the Python interpreter used to start the run.
506
+ disabled : bool, default False
507
+ If set to True, disables @conda.
508
+ """
509
+ ...
510
+
437
511
  @typing.overload
438
512
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
439
513
  """
@@ -483,63 +557,6 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
483
557
  """
484
558
  ...
485
559
 
486
- @typing.overload
487
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
488
- """
489
- Specifies a timeout for your step.
490
-
491
- This decorator is useful if this step may hang indefinitely.
492
-
493
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
494
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
495
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
496
-
497
- Note that all the values specified in parameters are added together so if you specify
498
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
499
-
500
- Parameters
501
- ----------
502
- seconds : int, default 0
503
- Number of seconds to wait prior to timing out.
504
- minutes : int, default 0
505
- Number of minutes to wait prior to timing out.
506
- hours : int, default 0
507
- Number of hours to wait prior to timing out.
508
- """
509
- ...
510
-
511
- @typing.overload
512
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
513
- ...
514
-
515
- @typing.overload
516
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
517
- ...
518
-
519
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
520
- """
521
- Specifies a timeout for your step.
522
-
523
- This decorator is useful if this step may hang indefinitely.
524
-
525
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
526
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
527
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
528
-
529
- Note that all the values specified in parameters are added together so if you specify
530
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
531
-
532
- Parameters
533
- ----------
534
- seconds : int, default 0
535
- Number of seconds to wait prior to timing out.
536
- minutes : int, default 0
537
- Number of minutes to wait prior to timing out.
538
- hours : int, default 0
539
- Number of hours to wait prior to timing out.
540
- """
541
- ...
542
-
543
560
  @typing.overload
544
561
  def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
545
562
  """
@@ -574,26 +591,26 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
574
591
  ...
575
592
 
576
593
  @typing.overload
577
- def checkpoint(*, load_policy: str = "fresh", temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
594
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
578
595
  """
579
- Enables checkpointing for a step.
596
+ Enables loading / saving of models within a step.
580
597
 
581
598
 
582
599
  Parameters
583
600
  ----------
584
- load_policy : str, default: "fresh"
585
- The policy for loading the checkpoint. The following policies are supported:
586
- - "eager": Loads the the latest available checkpoint within the namespace.
587
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
588
- will be loaded at the start of the task.
589
- - "none": Do not load any checkpoint
590
- - "fresh": Loads the lastest checkpoint created within the running Task.
591
- This mode helps loading checkpoints across various retry attempts of the same task.
592
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
593
- created within the task will be loaded when the task is retries execution on failure.
601
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
602
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
603
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
604
+ - `current.checkpoint`
605
+ - `current.model`
606
+ - `current.huggingface_hub`
607
+
608
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
609
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
610
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
594
611
 
595
612
  temp_dir_root : str, default: None
596
- The root directory under which `current.checkpoint.directory` will be created.
613
+ The root directory under which `current.model.loaded` will store loaded models
597
614
 
598
615
 
599
616
 
@@ -601,33 +618,33 @@ def checkpoint(*, load_policy: str = "fresh", temp_dir_root: str = None) -> typi
601
618
  ...
602
619
 
603
620
  @typing.overload
604
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
621
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
605
622
  ...
606
623
 
607
624
  @typing.overload
608
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
625
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
609
626
  ...
610
627
 
611
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = "fresh", temp_dir_root: str = None):
628
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
612
629
  """
613
- Enables checkpointing for a step.
630
+ Enables loading / saving of models within a step.
614
631
 
615
632
 
616
633
  Parameters
617
634
  ----------
618
- load_policy : str, default: "fresh"
619
- The policy for loading the checkpoint. The following policies are supported:
620
- - "eager": Loads the the latest available checkpoint within the namespace.
621
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
622
- will be loaded at the start of the task.
623
- - "none": Do not load any checkpoint
624
- - "fresh": Loads the lastest checkpoint created within the running Task.
625
- This mode helps loading checkpoints across various retry attempts of the same task.
626
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
627
- created within the task will be loaded when the task is retries execution on failure.
635
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
636
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
637
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
638
+ - `current.checkpoint`
639
+ - `current.model`
640
+ - `current.huggingface_hub`
641
+
642
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
643
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
644
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
628
645
 
629
646
  temp_dir_root : str, default: None
630
- The root directory under which `current.checkpoint.directory` will be created.
647
+ The root directory under which `current.model.loaded` will store loaded models
631
648
 
632
649
 
633
650
 
@@ -686,60 +703,113 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
686
703
  ...
687
704
 
688
705
  @typing.overload
689
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
706
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
690
707
  """
691
- Specifies the number of times the task corresponding
692
- to a step needs to be retried.
693
-
694
- This decorator is useful for handling transient errors, such as networking issues.
695
- If your task contains operations that can't be retried safely, e.g. database updates,
696
- it is advisable to annotate it with `@retry(times=0)`.
708
+ Specifies the PyPI packages for the step.
697
709
 
698
- This can be used in conjunction with the `@catch` decorator. The `@catch`
699
- decorator will execute a no-op task after all retries have been exhausted,
700
- ensuring that the flow execution can continue.
710
+ Information in this decorator will augment any
711
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
712
+ you can use `@pypi_base` to set packages required by all
713
+ steps and use `@pypi` to specify step-specific overrides.
701
714
 
702
715
  Parameters
703
716
  ----------
704
- times : int, default 3
705
- Number of times to retry this task.
706
- minutes_between_retries : int, default 2
707
- Number of minutes between retries.
717
+ packages : Dict[str, str], default: {}
718
+ Packages to use for this step. The key is the name of the package
719
+ and the value is the version to use.
720
+ python : str, optional, default: None
721
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
722
+ that the version used will correspond to the version of the Python interpreter used to start the run.
708
723
  """
709
724
  ...
710
725
 
711
726
  @typing.overload
712
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
727
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
713
728
  ...
714
729
 
715
730
  @typing.overload
716
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
731
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
717
732
  ...
718
733
 
719
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
734
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
720
735
  """
721
- Specifies the number of times the task corresponding
722
- to a step needs to be retried.
723
-
724
- This decorator is useful for handling transient errors, such as networking issues.
725
- If your task contains operations that can't be retried safely, e.g. database updates,
726
- it is advisable to annotate it with `@retry(times=0)`.
736
+ Specifies the PyPI packages for the step.
727
737
 
728
- This can be used in conjunction with the `@catch` decorator. The `@catch`
729
- decorator will execute a no-op task after all retries have been exhausted,
730
- ensuring that the flow execution can continue.
738
+ Information in this decorator will augment any
739
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
740
+ you can use `@pypi_base` to set packages required by all
741
+ steps and use `@pypi` to specify step-specific overrides.
731
742
 
732
743
  Parameters
733
744
  ----------
734
- times : int, default 3
735
- Number of times to retry this task.
736
- minutes_between_retries : int, default 2
737
- Number of minutes between retries.
745
+ packages : Dict[str, str], default: {}
746
+ Packages to use for this step. The key is the name of the package
747
+ and the value is the version to use.
748
+ python : str, optional, default: None
749
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
750
+ that the version used will correspond to the version of the Python interpreter used to start the run.
738
751
  """
739
752
  ...
740
753
 
741
754
  @typing.overload
742
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
755
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
756
+ """
757
+ Specifies a timeout for your step.
758
+
759
+ This decorator is useful if this step may hang indefinitely.
760
+
761
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
762
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
763
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
764
+
765
+ Note that all the values specified in parameters are added together so if you specify
766
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
767
+
768
+ Parameters
769
+ ----------
770
+ seconds : int, default 0
771
+ Number of seconds to wait prior to timing out.
772
+ minutes : int, default 0
773
+ Number of minutes to wait prior to timing out.
774
+ hours : int, default 0
775
+ Number of hours to wait prior to timing out.
776
+ """
777
+ ...
778
+
779
+ @typing.overload
780
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
781
+ ...
782
+
783
+ @typing.overload
784
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
785
+ ...
786
+
787
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
788
+ """
789
+ Specifies a timeout for your step.
790
+
791
+ This decorator is useful if this step may hang indefinitely.
792
+
793
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
794
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
795
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
796
+
797
+ Note that all the values specified in parameters are added together so if you specify
798
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
799
+
800
+ Parameters
801
+ ----------
802
+ seconds : int, default 0
803
+ Number of seconds to wait prior to timing out.
804
+ minutes : int, default 0
805
+ Number of minutes to wait prior to timing out.
806
+ hours : int, default 0
807
+ Number of hours to wait prior to timing out.
808
+ """
809
+ ...
810
+
811
+ @typing.overload
812
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
743
813
  """
744
814
  Specifies environment variables to be set prior to the execution of a step.
745
815
 
@@ -770,51 +840,63 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
770
840
  ...
771
841
 
772
842
  @typing.overload
773
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
843
+ def checkpoint(*, load_policy: str = "fresh", temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
774
844
  """
775
- Specifies the PyPI packages for the step.
845
+ Enables checkpointing for a step.
776
846
 
777
- Information in this decorator will augment any
778
- attributes set in the `@pyi_base` flow-level decorator. Hence,
779
- you can use `@pypi_base` to set packages required by all
780
- steps and use `@pypi` to specify step-specific overrides.
781
847
 
782
848
  Parameters
783
849
  ----------
784
- packages : Dict[str, str], default: {}
785
- Packages to use for this step. The key is the name of the package
786
- and the value is the version to use.
787
- python : str, optional, default: None
788
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
789
- that the version used will correspond to the version of the Python interpreter used to start the run.
850
+ load_policy : str, default: "fresh"
851
+ The policy for loading the checkpoint. The following policies are supported:
852
+ - "eager": Loads the the latest available checkpoint within the namespace.
853
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
854
+ will be loaded at the start of the task.
855
+ - "none": Do not load any checkpoint
856
+ - "fresh": Loads the lastest checkpoint created within the running Task.
857
+ This mode helps loading checkpoints across various retry attempts of the same task.
858
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
859
+ created within the task will be loaded when the task is retries execution on failure.
860
+
861
+ temp_dir_root : str, default: None
862
+ The root directory under which `current.checkpoint.directory` will be created.
863
+
864
+
865
+
790
866
  """
791
867
  ...
792
868
 
793
869
  @typing.overload
794
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
870
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
795
871
  ...
796
872
 
797
873
  @typing.overload
798
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
874
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
799
875
  ...
800
876
 
801
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
877
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = "fresh", temp_dir_root: str = None):
802
878
  """
803
- Specifies the PyPI packages for the step.
879
+ Enables checkpointing for a step.
804
880
 
805
- Information in this decorator will augment any
806
- attributes set in the `@pyi_base` flow-level decorator. Hence,
807
- you can use `@pypi_base` to set packages required by all
808
- steps and use `@pypi` to specify step-specific overrides.
809
881
 
810
882
  Parameters
811
883
  ----------
812
- packages : Dict[str, str], default: {}
813
- Packages to use for this step. The key is the name of the package
814
- and the value is the version to use.
815
- python : str, optional, default: None
816
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
817
- that the version used will correspond to the version of the Python interpreter used to start the run.
884
+ load_policy : str, default: "fresh"
885
+ The policy for loading the checkpoint. The following policies are supported:
886
+ - "eager": Loads the the latest available checkpoint within the namespace.
887
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
888
+ will be loaded at the start of the task.
889
+ - "none": Do not load any checkpoint
890
+ - "fresh": Loads the lastest checkpoint created within the running Task.
891
+ This mode helps loading checkpoints across various retry attempts of the same task.
892
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
893
+ created within the task will be loaded when the task is retries execution on failure.
894
+
895
+ temp_dir_root : str, default: None
896
+ The root directory under which `current.checkpoint.directory` will be created.
897
+
898
+
899
+
818
900
  """
819
901
  ...
820
902
 
@@ -837,126 +919,56 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
837
919
  """
838
920
  ...
839
921
 
840
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
841
- """
842
- Specifies that this step should execute on Kubernetes.
843
-
844
- Parameters
845
- ----------
846
- cpu : int, default 1
847
- Number of CPUs required for this step. If `@resources` is
848
- also present, the maximum value from all decorators is used.
849
- memory : int, default 4096
850
- Memory size (in MB) required for this step. If
851
- `@resources` is also present, the maximum value from all decorators is
852
- used.
853
- disk : int, default 10240
854
- Disk size (in MB) required for this step. If
855
- `@resources` is also present, the maximum value from all decorators is
856
- used.
857
- image : str, optional, default None
858
- Docker image to use when launching on Kubernetes. If not specified, and
859
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
860
- not, a default Docker image mapping to the current version of Python is used.
861
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
862
- If given, the imagePullPolicy to be applied to the Docker image of the step.
863
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
864
- Kubernetes service account to use when launching pod in Kubernetes.
865
- secrets : List[str], optional, default None
866
- Kubernetes secrets to use when launching pod in Kubernetes. These
867
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
868
- in Metaflow configuration.
869
- node_selector: Union[Dict[str,str], str], optional, default None
870
- Kubernetes node selector(s) to apply to the pod running the task.
871
- Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
872
- or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
873
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
874
- Kubernetes namespace to use when launching pod in Kubernetes.
875
- gpu : int, optional, default None
876
- Number of GPUs required for this step. A value of zero implies that
877
- the scheduled node should not have GPUs.
878
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
879
- The vendor of the GPUs to be used for this step.
880
- tolerations : List[str], default []
881
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
882
- Kubernetes tolerations to use when launching pod in Kubernetes.
883
- use_tmpfs : bool, default False
884
- This enables an explicit tmpfs mount for this step.
885
- tmpfs_tempdir : bool, default True
886
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
887
- tmpfs_size : int, optional, default: None
888
- The value for the size (in MiB) of the tmpfs mount for this step.
889
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
890
- memory allocated for this step.
891
- tmpfs_path : str, optional, default /metaflow_temp
892
- Path to tmpfs mount for this step.
893
- persistent_volume_claims : Dict[str, str], optional, default None
894
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
895
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
896
- shared_memory: int, optional
897
- Shared memory size (in MiB) required for this step
898
- port: int, optional
899
- Port number to specify in the Kubernetes job object
900
- compute_pool : str, optional, default None
901
- Compute pool to be used for for this step.
902
- If not specified, any accessible compute pool within the perimeter is used.
903
- """
904
- ...
905
-
906
922
  @typing.overload
907
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
923
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
908
924
  """
909
- Specifies the Conda environment for the step.
925
+ Specifies the number of times the task corresponding
926
+ to a step needs to be retried.
910
927
 
911
- Information in this decorator will augment any
912
- attributes set in the `@conda_base` flow-level decorator. Hence,
913
- you can use `@conda_base` to set packages required by all
914
- steps and use `@conda` to specify step-specific overrides.
928
+ This decorator is useful for handling transient errors, such as networking issues.
929
+ If your task contains operations that can't be retried safely, e.g. database updates,
930
+ it is advisable to annotate it with `@retry(times=0)`.
931
+
932
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
933
+ decorator will execute a no-op task after all retries have been exhausted,
934
+ ensuring that the flow execution can continue.
915
935
 
916
936
  Parameters
917
937
  ----------
918
- packages : Dict[str, str], default {}
919
- Packages to use for this step. The key is the name of the package
920
- and the value is the version to use.
921
- libraries : Dict[str, str], default {}
922
- Supported for backward compatibility. When used with packages, packages will take precedence.
923
- python : str, optional, default None
924
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
925
- that the version used will correspond to the version of the Python interpreter used to start the run.
926
- disabled : bool, default False
927
- If set to True, disables @conda.
938
+ times : int, default 3
939
+ Number of times to retry this task.
940
+ minutes_between_retries : int, default 2
941
+ Number of minutes between retries.
928
942
  """
929
943
  ...
930
944
 
931
945
  @typing.overload
932
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
946
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
933
947
  ...
934
948
 
935
949
  @typing.overload
936
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
950
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
937
951
  ...
938
952
 
939
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
953
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
940
954
  """
941
- Specifies the Conda environment for the step.
955
+ Specifies the number of times the task corresponding
956
+ to a step needs to be retried.
942
957
 
943
- Information in this decorator will augment any
944
- attributes set in the `@conda_base` flow-level decorator. Hence,
945
- you can use `@conda_base` to set packages required by all
946
- steps and use `@conda` to specify step-specific overrides.
958
+ This decorator is useful for handling transient errors, such as networking issues.
959
+ If your task contains operations that can't be retried safely, e.g. database updates,
960
+ it is advisable to annotate it with `@retry(times=0)`.
961
+
962
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
963
+ decorator will execute a no-op task after all retries have been exhausted,
964
+ ensuring that the flow execution can continue.
947
965
 
948
966
  Parameters
949
967
  ----------
950
- packages : Dict[str, str], default {}
951
- Packages to use for this step. The key is the name of the package
952
- and the value is the version to use.
953
- libraries : Dict[str, str], default {}
954
- Supported for backward compatibility. When used with packages, packages will take precedence.
955
- python : str, optional, default None
956
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
957
- that the version used will correspond to the version of the Python interpreter used to start the run.
958
- disabled : bool, default False
959
- If set to True, disables @conda.
968
+ times : int, default 3
969
+ Number of times to retry this task.
970
+ minutes_between_retries : int, default 2
971
+ Number of minutes between retries.
960
972
  """
961
973
  ...
962
974
 
@@ -1037,261 +1049,146 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1037
1049
  """
1038
1050
  ...
1039
1051
 
1040
- @typing.overload
1041
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1052
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1042
1053
  """
1043
- Enables loading / saving of models within a step.
1044
-
1045
-
1046
- Parameters
1047
- ----------
1048
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1049
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1050
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
1051
- - `current.checkpoint`
1052
- - `current.model`
1053
- - `current.huggingface_hub`
1054
-
1055
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1056
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1057
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1058
-
1059
- temp_dir_root : str, default: None
1060
- The root directory under which `current.model.loaded` will store loaded models
1061
-
1062
-
1063
-
1064
- """
1065
- ...
1066
-
1067
- @typing.overload
1068
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1069
- ...
1070
-
1071
- @typing.overload
1072
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1073
- ...
1074
-
1075
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1076
- """
1077
- Enables loading / saving of models within a step.
1078
-
1079
-
1080
- Parameters
1081
- ----------
1082
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1083
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1084
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
1085
- - `current.checkpoint`
1086
- - `current.model`
1087
- - `current.huggingface_hub`
1088
-
1089
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1090
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1091
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1092
-
1093
- temp_dir_root : str, default: None
1094
- The root directory under which `current.model.loaded` will store loaded models
1095
-
1096
-
1097
-
1098
- """
1099
- ...
1100
-
1101
- @typing.overload
1102
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1103
- """
1104
- Internal decorator to support Fast bakery
1105
- """
1106
- ...
1107
-
1108
- @typing.overload
1109
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1110
- ...
1111
-
1112
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1113
- """
1114
- Internal decorator to support Fast bakery
1115
- """
1116
- ...
1117
-
1118
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1119
- """
1120
- Specifies what flows belong to the same project.
1121
-
1122
- A project-specific namespace is created for all flows that
1123
- use the same `@project(name)`.
1054
+ Specifies that this step should execute on Kubernetes.
1124
1055
 
1125
1056
  Parameters
1126
1057
  ----------
1127
- name : str
1128
- Project name. Make sure that the name is unique amongst all
1129
- projects that use the same production scheduler. The name may
1130
- contain only lowercase alphanumeric characters and underscores.
1131
-
1132
-
1058
+ cpu : int, default 1
1059
+ Number of CPUs required for this step. If `@resources` is
1060
+ also present, the maximum value from all decorators is used.
1061
+ memory : int, default 4096
1062
+ Memory size (in MB) required for this step. If
1063
+ `@resources` is also present, the maximum value from all decorators is
1064
+ used.
1065
+ disk : int, default 10240
1066
+ Disk size (in MB) required for this step. If
1067
+ `@resources` is also present, the maximum value from all decorators is
1068
+ used.
1069
+ image : str, optional, default None
1070
+ Docker image to use when launching on Kubernetes. If not specified, and
1071
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1072
+ not, a default Docker image mapping to the current version of Python is used.
1073
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1074
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1075
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1076
+ Kubernetes service account to use when launching pod in Kubernetes.
1077
+ secrets : List[str], optional, default None
1078
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1079
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1080
+ in Metaflow configuration.
1081
+ node_selector: Union[Dict[str,str], str], optional, default None
1082
+ Kubernetes node selector(s) to apply to the pod running the task.
1083
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1084
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1085
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1086
+ Kubernetes namespace to use when launching pod in Kubernetes.
1087
+ gpu : int, optional, default None
1088
+ Number of GPUs required for this step. A value of zero implies that
1089
+ the scheduled node should not have GPUs.
1090
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1091
+ The vendor of the GPUs to be used for this step.
1092
+ tolerations : List[str], default []
1093
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1094
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1095
+ use_tmpfs : bool, default False
1096
+ This enables an explicit tmpfs mount for this step.
1097
+ tmpfs_tempdir : bool, default True
1098
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1099
+ tmpfs_size : int, optional, default: None
1100
+ The value for the size (in MiB) of the tmpfs mount for this step.
1101
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1102
+ memory allocated for this step.
1103
+ tmpfs_path : str, optional, default /metaflow_temp
1104
+ Path to tmpfs mount for this step.
1105
+ persistent_volume_claims : Dict[str, str], optional, default None
1106
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1107
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1108
+ shared_memory: int, optional
1109
+ Shared memory size (in MiB) required for this step
1110
+ port: int, optional
1111
+ Port number to specify in the Kubernetes job object
1112
+ compute_pool : str, optional, default None
1113
+ Compute pool to be used for for this step.
1114
+ If not specified, any accessible compute pool within the perimeter is used.
1133
1115
  """
1134
1116
  ...
1135
1117
 
1136
1118
  @typing.overload
1137
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1119
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1138
1120
  """
1139
- Specifies the times when the flow should be run when running on a
1140
- production scheduler.
1121
+ Specifies the PyPI packages for all steps of the flow.
1141
1122
 
1123
+ Use `@pypi_base` to set common packages required by all
1124
+ steps and use `@pypi` to specify step-specific overrides.
1142
1125
  Parameters
1143
1126
  ----------
1144
- hourly : bool, default False
1145
- Run the workflow hourly.
1146
- daily : bool, default True
1147
- Run the workflow daily.
1148
- weekly : bool, default False
1149
- Run the workflow weekly.
1150
- cron : str, optional, default None
1151
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1152
- specified by this expression.
1153
- timezone : str, optional, default None
1154
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1155
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1127
+ packages : Dict[str, str], default: {}
1128
+ Packages to use for this flow. The key is the name of the package
1129
+ and the value is the version to use.
1130
+ python : str, optional, default: None
1131
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1132
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1156
1133
  """
1157
1134
  ...
1158
1135
 
1159
1136
  @typing.overload
1160
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1161
- ...
1162
-
1163
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1164
- """
1165
- Specifies the times when the flow should be run when running on a
1166
- production scheduler.
1167
-
1168
- Parameters
1169
- ----------
1170
- hourly : bool, default False
1171
- Run the workflow hourly.
1172
- daily : bool, default True
1173
- Run the workflow daily.
1174
- weekly : bool, default False
1175
- Run the workflow weekly.
1176
- cron : str, optional, default None
1177
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1178
- specified by this expression.
1179
- timezone : str, optional, default None
1180
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1181
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1182
- """
1137
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1183
1138
  ...
1184
1139
 
1185
- @typing.overload
1186
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1140
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1187
1141
  """
1188
- Specifies the flow(s) that this flow depends on.
1189
-
1190
- ```
1191
- @trigger_on_finish(flow='FooFlow')
1192
- ```
1193
- or
1194
- ```
1195
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1196
- ```
1197
- This decorator respects the @project decorator and triggers the flow
1198
- when upstream runs within the same namespace complete successfully
1199
-
1200
- Additionally, you can specify project aware upstream flow dependencies
1201
- by specifying the fully qualified project_flow_name.
1202
- ```
1203
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1204
- ```
1205
- or
1206
- ```
1207
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1208
- ```
1209
-
1210
- You can also specify just the project or project branch (other values will be
1211
- inferred from the current project or project branch):
1212
- ```
1213
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1214
- ```
1215
-
1216
- Note that `branch` is typically one of:
1217
- - `prod`
1218
- - `user.bob`
1219
- - `test.my_experiment`
1220
- - `prod.staging`
1142
+ Specifies the PyPI packages for all steps of the flow.
1221
1143
 
1144
+ Use `@pypi_base` to set common packages required by all
1145
+ steps and use `@pypi` to specify step-specific overrides.
1222
1146
  Parameters
1223
1147
  ----------
1224
- flow : Union[str, Dict[str, str]], optional, default None
1225
- Upstream flow dependency for this flow.
1226
- flows : List[Union[str, Dict[str, str]]], default []
1227
- Upstream flow dependencies for this flow.
1228
- options : Dict[str, Any], default {}
1229
- Backend-specific configuration for tuning eventing behavior.
1230
-
1231
-
1148
+ packages : Dict[str, str], default: {}
1149
+ Packages to use for this flow. The key is the name of the package
1150
+ and the value is the version to use.
1151
+ python : str, optional, default: None
1152
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1153
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1232
1154
  """
1233
1155
  ...
1234
1156
 
1235
- @typing.overload
1236
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1237
- ...
1238
-
1239
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1157
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1240
1158
  """
1241
- Specifies the flow(s) that this flow depends on.
1242
-
1243
- ```
1244
- @trigger_on_finish(flow='FooFlow')
1245
- ```
1246
- or
1247
- ```
1248
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1249
- ```
1250
- This decorator respects the @project decorator and triggers the flow
1251
- when upstream runs within the same namespace complete successfully
1159
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1252
1160
 
1253
- Additionally, you can specify project aware upstream flow dependencies
1254
- by specifying the fully qualified project_flow_name.
1255
- ```
1256
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1257
- ```
1258
- or
1259
- ```
1260
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1261
- ```
1161
+ User code call
1162
+ -----------
1163
+ @nim(
1164
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1165
+ backend='managed'
1166
+ )
1262
1167
 
1263
- You can also specify just the project or project branch (other values will be
1264
- inferred from the current project or project branch):
1265
- ```
1266
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1267
- ```
1168
+ Valid backend options
1169
+ ---------------------
1170
+ - 'managed': Outerbounds selects a compute provider based on the model.
1171
+ - 🚧 'dataplane': Run in your account.
1268
1172
 
1269
- Note that `branch` is typically one of:
1270
- - `prod`
1271
- - `user.bob`
1272
- - `test.my_experiment`
1273
- - `prod.staging`
1173
+ Valid model options
1174
+ ----------------
1175
+ - 'meta/llama3-8b-instruct': 8B parameter model
1176
+ - 'meta/llama3-70b-instruct': 70B parameter model
1177
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1274
1178
 
1275
1179
  Parameters
1276
1180
  ----------
1277
- flow : Union[str, Dict[str, str]], optional, default None
1278
- Upstream flow dependency for this flow.
1279
- flows : List[Union[str, Dict[str, str]]], default []
1280
- Upstream flow dependencies for this flow.
1281
- options : Dict[str, Any], default {}
1282
- Backend-specific configuration for tuning eventing behavior.
1283
-
1284
-
1181
+ models: list[NIM]
1182
+ List of NIM containers running models in sidecars.
1183
+ backend: str
1184
+ Compute provider to run the NIM container.
1285
1185
  """
1286
1186
  ...
1287
1187
 
1288
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1188
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1289
1189
  """
1290
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1291
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1292
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1293
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1294
- starts only after all sensors finish.
1190
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1191
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1295
1192
 
1296
1193
  Parameters
1297
1194
  ----------
@@ -1312,18 +1209,70 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1312
1209
  Name of the sensor on Airflow
1313
1210
  description : str
1314
1211
  Description of sensor in the Airflow UI
1315
- bucket_key : Union[str, List[str]]
1316
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1317
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1318
- bucket_name : str
1319
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1320
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1321
- wildcard_match : bool
1322
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1323
- aws_conn_id : str
1324
- a reference to the s3 connection on Airflow. (Default: None)
1325
- verify : bool
1326
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1212
+ external_dag_id : str
1213
+ The dag_id that contains the task you want to wait for.
1214
+ external_task_ids : List[str]
1215
+ The list of task_ids that you want to wait for.
1216
+ If None (default value) the sensor waits for the DAG. (Default: None)
1217
+ allowed_states : List[str]
1218
+ Iterable of allowed states, (Default: ['success'])
1219
+ failed_states : List[str]
1220
+ Iterable of failed or dis-allowed states. (Default: None)
1221
+ execution_delta : datetime.timedelta
1222
+ time difference with the previous execution to look at,
1223
+ the default is the same logical date as the current task or DAG. (Default: None)
1224
+ check_existence: bool
1225
+ Set to True to check if the external task exists or check if
1226
+ the DAG to wait for exists. (Default: True)
1227
+ """
1228
+ ...
1229
+
1230
+ @typing.overload
1231
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1232
+ """
1233
+ Specifies the Conda environment for all steps of the flow.
1234
+
1235
+ Use `@conda_base` to set common libraries required by all
1236
+ steps and use `@conda` to specify step-specific additions.
1237
+
1238
+ Parameters
1239
+ ----------
1240
+ packages : Dict[str, str], default {}
1241
+ Packages to use for this flow. The key is the name of the package
1242
+ and the value is the version to use.
1243
+ libraries : Dict[str, str], default {}
1244
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1245
+ python : str, optional, default None
1246
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1247
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1248
+ disabled : bool, default False
1249
+ If set to True, disables Conda.
1250
+ """
1251
+ ...
1252
+
1253
+ @typing.overload
1254
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1255
+ ...
1256
+
1257
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1258
+ """
1259
+ Specifies the Conda environment for all steps of the flow.
1260
+
1261
+ Use `@conda_base` to set common libraries required by all
1262
+ steps and use `@conda` to specify step-specific additions.
1263
+
1264
+ Parameters
1265
+ ----------
1266
+ packages : Dict[str, str], default {}
1267
+ Packages to use for this flow. The key is the name of the package
1268
+ and the value is the version to use.
1269
+ libraries : Dict[str, str], default {}
1270
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1271
+ python : str, optional, default None
1272
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1273
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1274
+ disabled : bool, default False
1275
+ If set to True, disables Conda.
1327
1276
  """
1328
1277
  ...
1329
1278
 
@@ -1422,59 +1371,80 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1422
1371
  """
1423
1372
  ...
1424
1373
 
1425
- @typing.overload
1426
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1374
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1427
1375
  """
1428
- Specifies the Conda environment for all steps of the flow.
1376
+ Specifies what flows belong to the same project.
1429
1377
 
1430
- Use `@conda_base` to set common libraries required by all
1431
- steps and use `@conda` to specify step-specific additions.
1378
+ A project-specific namespace is created for all flows that
1379
+ use the same `@project(name)`.
1432
1380
 
1433
1381
  Parameters
1434
1382
  ----------
1435
- packages : Dict[str, str], default {}
1436
- Packages to use for this flow. The key is the name of the package
1437
- and the value is the version to use.
1438
- libraries : Dict[str, str], default {}
1439
- Supported for backward compatibility. When used with packages, packages will take precedence.
1440
- python : str, optional, default None
1441
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1442
- that the version used will correspond to the version of the Python interpreter used to start the run.
1443
- disabled : bool, default False
1444
- If set to True, disables Conda.
1383
+ name : str
1384
+ Project name. Make sure that the name is unique amongst all
1385
+ projects that use the same production scheduler. The name may
1386
+ contain only lowercase alphanumeric characters and underscores.
1387
+
1388
+
1445
1389
  """
1446
1390
  ...
1447
1391
 
1448
1392
  @typing.overload
1449
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1393
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1394
+ """
1395
+ Specifies the times when the flow should be run when running on a
1396
+ production scheduler.
1397
+
1398
+ Parameters
1399
+ ----------
1400
+ hourly : bool, default False
1401
+ Run the workflow hourly.
1402
+ daily : bool, default True
1403
+ Run the workflow daily.
1404
+ weekly : bool, default False
1405
+ Run the workflow weekly.
1406
+ cron : str, optional, default None
1407
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1408
+ specified by this expression.
1409
+ timezone : str, optional, default None
1410
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1411
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1412
+ """
1450
1413
  ...
1451
1414
 
1452
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1415
+ @typing.overload
1416
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1417
+ ...
1418
+
1419
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1453
1420
  """
1454
- Specifies the Conda environment for all steps of the flow.
1455
-
1456
- Use `@conda_base` to set common libraries required by all
1457
- steps and use `@conda` to specify step-specific additions.
1421
+ Specifies the times when the flow should be run when running on a
1422
+ production scheduler.
1458
1423
 
1459
1424
  Parameters
1460
1425
  ----------
1461
- packages : Dict[str, str], default {}
1462
- Packages to use for this flow. The key is the name of the package
1463
- and the value is the version to use.
1464
- libraries : Dict[str, str], default {}
1465
- Supported for backward compatibility. When used with packages, packages will take precedence.
1466
- python : str, optional, default None
1467
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1468
- that the version used will correspond to the version of the Python interpreter used to start the run.
1469
- disabled : bool, default False
1470
- If set to True, disables Conda.
1426
+ hourly : bool, default False
1427
+ Run the workflow hourly.
1428
+ daily : bool, default True
1429
+ Run the workflow daily.
1430
+ weekly : bool, default False
1431
+ Run the workflow weekly.
1432
+ cron : str, optional, default None
1433
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1434
+ specified by this expression.
1435
+ timezone : str, optional, default None
1436
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1437
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1471
1438
  """
1472
1439
  ...
1473
1440
 
1474
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1441
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1475
1442
  """
1476
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1477
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1443
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1444
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1445
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1446
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1447
+ starts only after all sensors finish.
1478
1448
 
1479
1449
  Parameters
1480
1450
  ----------
@@ -1495,91 +1465,121 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1495
1465
  Name of the sensor on Airflow
1496
1466
  description : str
1497
1467
  Description of sensor in the Airflow UI
1498
- external_dag_id : str
1499
- The dag_id that contains the task you want to wait for.
1500
- external_task_ids : List[str]
1501
- The list of task_ids that you want to wait for.
1502
- If None (default value) the sensor waits for the DAG. (Default: None)
1503
- allowed_states : List[str]
1504
- Iterable of allowed states, (Default: ['success'])
1505
- failed_states : List[str]
1506
- Iterable of failed or dis-allowed states. (Default: None)
1507
- execution_delta : datetime.timedelta
1508
- time difference with the previous execution to look at,
1509
- the default is the same logical date as the current task or DAG. (Default: None)
1510
- check_existence: bool
1511
- Set to True to check if the external task exists or check if
1512
- the DAG to wait for exists. (Default: True)
1468
+ bucket_key : Union[str, List[str]]
1469
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1470
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1471
+ bucket_name : str
1472
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1473
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1474
+ wildcard_match : bool
1475
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1476
+ aws_conn_id : str
1477
+ a reference to the s3 connection on Airflow. (Default: None)
1478
+ verify : bool
1479
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1513
1480
  """
1514
1481
  ...
1515
1482
 
1516
- def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1483
+ @typing.overload
1484
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1517
1485
  """
1518
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1486
+ Specifies the flow(s) that this flow depends on.
1519
1487
 
1520
- User code call
1521
- -----------
1522
- @nim(
1523
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1524
- backend='managed'
1525
- )
1488
+ ```
1489
+ @trigger_on_finish(flow='FooFlow')
1490
+ ```
1491
+ or
1492
+ ```
1493
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1494
+ ```
1495
+ This decorator respects the @project decorator and triggers the flow
1496
+ when upstream runs within the same namespace complete successfully
1526
1497
 
1527
- Valid backend options
1528
- ---------------------
1529
- - 'managed': Outerbounds selects a compute provider based on the model.
1530
- - 🚧 'dataplane': Run in your account.
1498
+ Additionally, you can specify project aware upstream flow dependencies
1499
+ by specifying the fully qualified project_flow_name.
1500
+ ```
1501
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1502
+ ```
1503
+ or
1504
+ ```
1505
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1506
+ ```
1531
1507
 
1532
- Valid model options
1533
- ----------------
1534
- - 'meta/llama3-8b-instruct': 8B parameter model
1535
- - 'meta/llama3-70b-instruct': 70B parameter model
1536
- - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1508
+ You can also specify just the project or project branch (other values will be
1509
+ inferred from the current project or project branch):
1510
+ ```
1511
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1512
+ ```
1537
1513
 
1538
- Parameters
1539
- ----------
1540
- models: list[NIM]
1541
- List of NIM containers running models in sidecars.
1542
- backend: str
1543
- Compute provider to run the NIM container.
1544
- """
1545
- ...
1546
-
1547
- @typing.overload
1548
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1549
- """
1550
- Specifies the PyPI packages for all steps of the flow.
1514
+ Note that `branch` is typically one of:
1515
+ - `prod`
1516
+ - `user.bob`
1517
+ - `test.my_experiment`
1518
+ - `prod.staging`
1551
1519
 
1552
- Use `@pypi_base` to set common packages required by all
1553
- steps and use `@pypi` to specify step-specific overrides.
1554
1520
  Parameters
1555
1521
  ----------
1556
- packages : Dict[str, str], default: {}
1557
- Packages to use for this flow. The key is the name of the package
1558
- and the value is the version to use.
1559
- python : str, optional, default: None
1560
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1561
- that the version used will correspond to the version of the Python interpreter used to start the run.
1522
+ flow : Union[str, Dict[str, str]], optional, default None
1523
+ Upstream flow dependency for this flow.
1524
+ flows : List[Union[str, Dict[str, str]]], default []
1525
+ Upstream flow dependencies for this flow.
1526
+ options : Dict[str, Any], default {}
1527
+ Backend-specific configuration for tuning eventing behavior.
1528
+
1529
+
1562
1530
  """
1563
1531
  ...
1564
1532
 
1565
1533
  @typing.overload
1566
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1534
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1567
1535
  ...
1568
1536
 
1569
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1537
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1570
1538
  """
1571
- Specifies the PyPI packages for all steps of the flow.
1539
+ Specifies the flow(s) that this flow depends on.
1540
+
1541
+ ```
1542
+ @trigger_on_finish(flow='FooFlow')
1543
+ ```
1544
+ or
1545
+ ```
1546
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1547
+ ```
1548
+ This decorator respects the @project decorator and triggers the flow
1549
+ when upstream runs within the same namespace complete successfully
1550
+
1551
+ Additionally, you can specify project aware upstream flow dependencies
1552
+ by specifying the fully qualified project_flow_name.
1553
+ ```
1554
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1555
+ ```
1556
+ or
1557
+ ```
1558
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1559
+ ```
1560
+
1561
+ You can also specify just the project or project branch (other values will be
1562
+ inferred from the current project or project branch):
1563
+ ```
1564
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1565
+ ```
1566
+
1567
+ Note that `branch` is typically one of:
1568
+ - `prod`
1569
+ - `user.bob`
1570
+ - `test.my_experiment`
1571
+ - `prod.staging`
1572
1572
 
1573
- Use `@pypi_base` to set common packages required by all
1574
- steps and use `@pypi` to specify step-specific overrides.
1575
1573
  Parameters
1576
1574
  ----------
1577
- packages : Dict[str, str], default: {}
1578
- Packages to use for this flow. The key is the name of the package
1579
- and the value is the version to use.
1580
- python : str, optional, default: None
1581
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1582
- that the version used will correspond to the version of the Python interpreter used to start the run.
1575
+ flow : Union[str, Dict[str, str]], optional, default None
1576
+ Upstream flow dependency for this flow.
1577
+ flows : List[Union[str, Dict[str, str]]], default []
1578
+ Upstream flow dependencies for this flow.
1579
+ options : Dict[str, Any], default {}
1580
+ Backend-specific configuration for tuning eventing behavior.
1581
+
1582
+
1583
1583
  """
1584
1584
  ...
1585
1585