metaflow-stubs 2.12.28__py2.py3-none-any.whl → 2.12.30__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +279 -2983
- metaflow-stubs/cards.pyi +19 -473
- metaflow-stubs/cli.pyi +17 -81
- metaflow-stubs/client/__init__.pyi +19 -1113
- metaflow-stubs/client/core.pyi +19 -159
- metaflow-stubs/client/filecache.pyi +7 -11
- metaflow-stubs/clone_util.pyi +6 -26
- metaflow-stubs/events.pyi +7 -6
- metaflow-stubs/exception.pyi +8 -6
- metaflow-stubs/flowspec.pyi +21 -105
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +17 -565
- metaflow-stubs/info_file.pyi +6 -5
- metaflow-stubs/metadata_provider/__init__.pyi +16 -0
- metaflow-stubs/metadata_provider/heartbeat.pyi +34 -0
- metaflow-stubs/{metadata → metadata_provider}/metadata.pyi +10 -22
- metaflow-stubs/metadata_provider/util.pyi +19 -0
- metaflow-stubs/metaflow_config.pyi +8 -11
- metaflow-stubs/metaflow_current.pyi +32 -31
- metaflow-stubs/mflog/__init__.pyi +6 -0
- metaflow-stubs/mflog/mflog.pyi +52 -5
- metaflow-stubs/multicore_utils.pyi +6 -5
- metaflow-stubs/parameters.pyi +12 -22
- metaflow-stubs/plugins/__init__.pyi +51 -163
- metaflow-stubs/plugins/airflow/__init__.pyi +12 -5
- metaflow-stubs/plugins/airflow/airflow.pyi +19 -130
- metaflow-stubs/plugins/airflow/airflow_cli.pyi +17 -136
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +7 -26
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +7 -6
- metaflow-stubs/plugins/airflow/exception.pyi +7 -11
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +10 -97
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +9 -30
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +9 -40
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +9 -40
- metaflow-stubs/plugins/argo/__init__.pyi +12 -5
- metaflow-stubs/plugins/argo/argo_client.pyi +8 -26
- metaflow-stubs/plugins/argo/argo_events.pyi +7 -11
- metaflow-stubs/plugins/argo/argo_workflows.pyi +17 -121
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +22 -460
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +12 -404
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +65 -322
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +165 -0
- metaflow-stubs/plugins/aws/__init__.pyi +11 -5
- metaflow-stubs/plugins/aws/aws_client.pyi +6 -5
- metaflow-stubs/plugins/aws/aws_utils.pyi +6 -11
- metaflow-stubs/plugins/aws/batch/__init__.pyi +10 -5
- metaflow-stubs/plugins/aws/batch/batch.pyi +10 -55
- metaflow-stubs/plugins/aws/batch/batch_cli.pyi +10 -31
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +7 -11
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +15 -140
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +7 -5
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +10 -21
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +15 -5
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +6 -5
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +6 -5
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +6 -5
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +7 -5
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +11 -65
- metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +19 -175
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +6 -5
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +8 -37
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +53 -290
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +127 -0
- metaflow-stubs/plugins/azure/__init__.pyi +12 -7
- metaflow-stubs/plugins/azure/azure_credential.pyi +6 -5
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +7 -11
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +11 -24
- metaflow-stubs/plugins/azure/azure_utils.pyi +11 -29
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +8 -23
- metaflow-stubs/plugins/azure/includefile_support.pyi +7 -17
- metaflow-stubs/plugins/cards/__init__.pyi +15 -5
- metaflow-stubs/plugins/cards/card_cli.pyi +22 -491
- metaflow-stubs/plugins/cards/card_client.pyi +14 -76
- metaflow-stubs/plugins/cards/card_creator.pyi +7 -10
- metaflow-stubs/plugins/cards/card_datastore.pyi +10 -18
- metaflow-stubs/plugins/cards/card_decorator.pyi +10 -126
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +14 -81
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +13 -96
- metaflow-stubs/plugins/cards/card_modules/card.pyi +6 -5
- metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +12 -73
- metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +6 -61
- metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +6 -5
- metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +8 -45
- metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +7 -6
- metaflow-stubs/plugins/cards/card_modules/components.pyi +24 -107
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +6 -5
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +6 -12
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +11 -88
- metaflow-stubs/plugins/cards/card_resolver.pyi +6 -49
- metaflow-stubs/plugins/cards/component_serializer.pyi +13 -63
- metaflow-stubs/plugins/cards/exception.pyi +7 -11
- metaflow-stubs/plugins/catch_decorator.pyi +9 -29
- metaflow-stubs/plugins/datatools/__init__.pyi +13 -392
- metaflow-stubs/plugins/datatools/local.pyi +7 -11
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +19 -653
- metaflow-stubs/plugins/datatools/s3/s3.pyi +15 -263
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +7 -10
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +6 -11
- metaflow-stubs/plugins/debug_logger.pyi +7 -5
- metaflow-stubs/plugins/debug_monitor.pyi +7 -5
- metaflow-stubs/plugins/environment_decorator.pyi +7 -5
- metaflow-stubs/plugins/events_decorator.pyi +8 -14
- metaflow-stubs/plugins/frameworks/__init__.pyi +7 -5
- metaflow-stubs/plugins/frameworks/pytorch.pyi +8 -45
- metaflow-stubs/plugins/gcp/__init__.pyi +11 -7
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +11 -24
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +7 -11
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +6 -5
- metaflow-stubs/plugins/gcp/gs_utils.pyi +8 -20
- metaflow-stubs/plugins/gcp/includefile_support.pyi +7 -17
- metaflow-stubs/plugins/kubernetes/__init__.pyi +13 -5
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +6 -10
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +9 -29
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +16 -155
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +9 -72
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +19 -142
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +8 -41
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +7 -11
- metaflow-stubs/plugins/logs_cli.pyi +10 -9
- metaflow-stubs/plugins/package_cli.pyi +7 -5
- metaflow-stubs/plugins/parallel_decorator.pyi +11 -59
- metaflow-stubs/plugins/project_decorator.pyi +8 -14
- metaflow-stubs/plugins/pypi/__init__.pyi +12 -11
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +8 -27
- metaflow-stubs/plugins/pypi/conda_environment.pyi +13 -19
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +7 -5
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +7 -39
- metaflow-stubs/plugins/pypi/utils.pyi +7 -11
- metaflow-stubs/plugins/resources_decorator.pyi +7 -5
- metaflow-stubs/plugins/retry_decorator.pyi +7 -11
- metaflow-stubs/plugins/secrets/__init__.pyi +9 -5
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +9 -14
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +7 -11
- metaflow-stubs/plugins/storage_executor.pyi +6 -11
- metaflow-stubs/plugins/tag_cli.pyi +14 -396
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +9 -34
- metaflow-stubs/plugins/timeout_decorator.pyi +7 -11
- metaflow-stubs/procpoll.pyi +7 -5
- metaflow-stubs/pylint_wrapper.pyi +7 -11
- metaflow-stubs/runner/__init__.pyi +13 -5
- metaflow-stubs/runner/deployer.pyi +102 -210
- metaflow-stubs/runner/deployer_impl.pyi +87 -0
- metaflow-stubs/runner/metaflow_runner.pyi +23 -507
- metaflow-stubs/runner/nbdeploy.pyi +16 -60
- metaflow-stubs/runner/nbrun.pyi +11 -148
- metaflow-stubs/runner/subprocess_manager.pyi +9 -10
- metaflow-stubs/runner/utils.pyi +44 -9
- metaflow-stubs/system/__init__.pyi +9 -87
- metaflow-stubs/system/system_logger.pyi +7 -6
- metaflow-stubs/system/system_monitor.pyi +6 -5
- metaflow-stubs/tagging_util.pyi +6 -10
- metaflow-stubs/tuple_util.pyi +6 -5
- metaflow-stubs/version.pyi +6 -5
- {metaflow_stubs-2.12.28.dist-info → metaflow_stubs-2.12.30.dist-info}/METADATA +2 -2
- metaflow_stubs-2.12.30.dist-info/RECORD +158 -0
- {metaflow_stubs-2.12.28.dist-info → metaflow_stubs-2.12.30.dist-info}/WHEEL +1 -1
- metaflow-stubs/metadata/util.pyi +0 -18
- metaflow_stubs-2.12.28.dist-info/RECORD +0 -152
- {metaflow_stubs-2.12.28.dist-info → metaflow_stubs-2.12.30.dist-info}/top_level.txt +0 -0
@@ -1,51 +1,29 @@
|
|
1
|
-
|
2
|
-
#
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-11-
|
5
|
-
|
1
|
+
######################################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.12.30 #
|
4
|
+
# Generated on 2024-11-13T13:50:31.368039 #
|
5
|
+
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
|
+
import metaflow
|
9
10
|
import typing
|
10
11
|
if typing.TYPE_CHECKING:
|
11
|
-
import metaflow.decorators
|
12
|
-
import metaflow.metaflow_current
|
13
|
-
import metaflow.parameters
|
14
12
|
import metaflow.exception
|
15
13
|
|
16
|
-
JSONType
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
selected through the `METAFLOW_PROFILE` environment variable.
|
30
|
-
|
31
|
-
Returns
|
32
|
-
-------
|
33
|
-
str
|
34
|
-
Information about the Metadata provider currently selected. This information typically
|
35
|
-
returns provider specific information (like URL for remote providers or local paths for
|
36
|
-
local providers).
|
37
|
-
"""
|
38
|
-
...
|
39
|
-
|
40
|
-
class MetaflowException(Exception, metaclass=type):
|
41
|
-
def __init__(self, msg = "", lineno = None):
|
42
|
-
...
|
43
|
-
def __str__(self):
|
44
|
-
...
|
45
|
-
...
|
46
|
-
|
47
|
-
class MetaflowInternalError(metaflow.exception.MetaflowException, metaclass=type):
|
48
|
-
...
|
14
|
+
from ....parameters import JSONType as JSONType
|
15
|
+
from ....metaflow_current import current as current
|
16
|
+
from .... import parameters as parameters
|
17
|
+
from ....client.core import get_metadata as get_metadata
|
18
|
+
from ...._vendor import click as click
|
19
|
+
from ....exception import MetaflowException as MetaflowException
|
20
|
+
from ....exception import MetaflowInternalError as MetaflowInternalError
|
21
|
+
from ..batch.batch_decorator import BatchDecorator as BatchDecorator
|
22
|
+
from ....tagging_util import validate_tags as validate_tags
|
23
|
+
from .production_token import load_token as load_token
|
24
|
+
from .production_token import new_token as new_token
|
25
|
+
from .production_token import store_token as store_token
|
26
|
+
from .step_functions import StepFunctions as StepFunctions
|
49
27
|
|
50
28
|
SERVICE_VERSION_CHECK: bool
|
51
29
|
|
@@ -53,140 +31,6 @@ SFN_STATE_MACHINE_PREFIX: None
|
|
53
31
|
|
54
32
|
UI_URL: None
|
55
33
|
|
56
|
-
class BatchDecorator(metaflow.decorators.StepDecorator, metaclass=type):
|
57
|
-
"""
|
58
|
-
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
59
|
-
|
60
|
-
Parameters
|
61
|
-
----------
|
62
|
-
cpu : int, default 1
|
63
|
-
Number of CPUs required for this step. If `@resources` is
|
64
|
-
also present, the maximum value from all decorators is used.
|
65
|
-
gpu : int, default 0
|
66
|
-
Number of GPUs required for this step. If `@resources` is
|
67
|
-
also present, the maximum value from all decorators is used.
|
68
|
-
memory : int, default 4096
|
69
|
-
Memory size (in MB) required for this step. If
|
70
|
-
`@resources` is also present, the maximum value from all decorators is
|
71
|
-
used.
|
72
|
-
image : str, optional, default None
|
73
|
-
Docker image to use when launching on AWS Batch. If not specified, and
|
74
|
-
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
75
|
-
not, a default Docker image mapping to the current version of Python is used.
|
76
|
-
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
77
|
-
AWS Batch Job Queue to submit the job to.
|
78
|
-
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
79
|
-
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
80
|
-
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
81
|
-
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
82
|
-
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
83
|
-
shared_memory : int, optional, default None
|
84
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
85
|
-
This parameter maps to the `--shm-size` option in Docker.
|
86
|
-
max_swap : int, optional, default None
|
87
|
-
The total amount of swap memory (in MiB) a container can use for this
|
88
|
-
step. This parameter is translated to the `--memory-swap` option in
|
89
|
-
Docker where the value is the sum of the container memory plus the
|
90
|
-
`max_swap` value.
|
91
|
-
swappiness : int, optional, default None
|
92
|
-
This allows you to tune memory swappiness behavior for this step.
|
93
|
-
A swappiness value of 0 causes swapping not to happen unless absolutely
|
94
|
-
necessary. A swappiness value of 100 causes pages to be swapped very
|
95
|
-
aggressively. Accepted values are whole numbers between 0 and 100.
|
96
|
-
use_tmpfs : bool, default False
|
97
|
-
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
98
|
-
not available on Fargate compute environments
|
99
|
-
tmpfs_tempdir : bool, default True
|
100
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
101
|
-
tmpfs_size : int, optional, default None
|
102
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
103
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
104
|
-
memory allocated for this step.
|
105
|
-
tmpfs_path : str, optional, default None
|
106
|
-
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
107
|
-
inferentia : int, default 0
|
108
|
-
Number of Inferentia chips required for this step.
|
109
|
-
trainium : int, default None
|
110
|
-
Alias for inferentia. Use only one of the two.
|
111
|
-
efa : int, default 0
|
112
|
-
Number of elastic fabric adapter network devices to attach to container
|
113
|
-
ephemeral_storage : int, default None
|
114
|
-
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
115
|
-
This is only relevant for Fargate compute environments
|
116
|
-
log_driver: str, optional, default None
|
117
|
-
The log driver to use for the Amazon ECS container.
|
118
|
-
log_options: List[str], optional, default None
|
119
|
-
List of strings containing options for the chosen log driver. The configurable values
|
120
|
-
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
121
|
-
Example: [`awslogs-group:aws/batch/job`]
|
122
|
-
"""
|
123
|
-
def __init__(self, attributes = None, statically_defined = False):
|
124
|
-
...
|
125
|
-
def step_init(self, flow, graph, step, decos, environment, flow_datastore, logger):
|
126
|
-
...
|
127
|
-
def runtime_init(self, flow, graph, package, run_id):
|
128
|
-
...
|
129
|
-
def runtime_task_created(self, task_datastore, task_id, split_index, input_paths, is_cloned, ubf_context):
|
130
|
-
...
|
131
|
-
def runtime_step_cli(self, cli_args, retry_count, max_user_code_retries, ubf_context):
|
132
|
-
...
|
133
|
-
def task_pre_step(self, step_name, task_datastore, metadata, run_id, task_id, flow, graph, retry_count, max_retries, ubf_context, inputs):
|
134
|
-
...
|
135
|
-
def task_finished(self, step_name, flow, graph, is_task_ok, retry_count, max_retries):
|
136
|
-
...
|
137
|
-
...
|
138
|
-
|
139
|
-
def validate_tags(tags, existing_tags = None):
|
140
|
-
"""
|
141
|
-
Raises MetaflowTaggingError if invalid based on these rules:
|
142
|
-
|
143
|
-
Tag set size is too large. But it's OK if tag set is not larger
|
144
|
-
than an existing tag set (if provided).
|
145
|
-
|
146
|
-
Then, we validate each tag. See validate_tag()
|
147
|
-
"""
|
148
|
-
...
|
149
|
-
|
150
|
-
def load_token(token_prefix):
|
151
|
-
...
|
152
|
-
|
153
|
-
def new_token(token_prefix, prev_token = None):
|
154
|
-
...
|
155
|
-
|
156
|
-
def store_token(token_prefix, token):
|
157
|
-
...
|
158
|
-
|
159
|
-
class StepFunctions(object, metaclass=type):
|
160
|
-
def __init__(self, name, graph, flow, code_package_sha, code_package_url, production_token, metadata, flow_datastore, environment, event_logger, monitor, tags = None, namespace = None, username = None, max_workers = None, workflow_timeout = None, is_project = False, use_distributed_map = False):
|
161
|
-
...
|
162
|
-
def to_json(self):
|
163
|
-
...
|
164
|
-
def trigger_explanation(self):
|
165
|
-
...
|
166
|
-
def deploy(self, log_execution_history):
|
167
|
-
...
|
168
|
-
def schedule(self):
|
169
|
-
...
|
170
|
-
@classmethod
|
171
|
-
def delete(cls, name):
|
172
|
-
...
|
173
|
-
@classmethod
|
174
|
-
def terminate(cls, flow_name, name):
|
175
|
-
...
|
176
|
-
@classmethod
|
177
|
-
def trigger(cls, name, parameters):
|
178
|
-
...
|
179
|
-
@classmethod
|
180
|
-
def list(cls, name, states):
|
181
|
-
...
|
182
|
-
@classmethod
|
183
|
-
def get_existing_deployment(cls, name):
|
184
|
-
...
|
185
|
-
@classmethod
|
186
|
-
def get_execution(cls, state_machine_name, name):
|
187
|
-
...
|
188
|
-
...
|
189
|
-
|
190
34
|
class IncorrectProductionToken(metaflow.exception.MetaflowException, metaclass=type):
|
191
35
|
...
|
192
36
|
|
@@ -1,12 +1,13 @@
|
|
1
|
-
|
2
|
-
#
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-11-
|
5
|
-
|
1
|
+
######################################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.12.30 #
|
4
|
+
# Generated on 2024-11-13T13:50:31.365708 #
|
5
|
+
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
|
10
|
+
|
10
11
|
AWS_SANDBOX_ENABLED: bool
|
11
12
|
|
12
13
|
AWS_SANDBOX_REGION: None
|
@@ -1,47 +1,18 @@
|
|
1
|
-
|
2
|
-
#
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-11-
|
5
|
-
|
1
|
+
######################################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.12.30 #
|
4
|
+
# Generated on 2024-11-13T13:50:31.364021 #
|
5
|
+
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
|
+
import metaflow
|
9
10
|
import typing
|
10
11
|
if typing.TYPE_CHECKING:
|
11
12
|
import metaflow.decorators
|
12
13
|
|
13
|
-
|
14
|
-
|
15
|
-
MetaDatum(field, value, type, tags)
|
16
|
-
"""
|
17
|
-
@staticmethod
|
18
|
-
def __new__(_cls, field, value, type, tags):
|
19
|
-
"""
|
20
|
-
Create new instance of MetaDatum(field, value, type, tags)
|
21
|
-
"""
|
22
|
-
...
|
23
|
-
def __repr__(self):
|
24
|
-
"""
|
25
|
-
Return a nicely formatted representation string
|
26
|
-
"""
|
27
|
-
...
|
28
|
-
def __getnewargs__(self):
|
29
|
-
"""
|
30
|
-
Return self as a plain tuple. Used by copy and pickle.
|
31
|
-
"""
|
32
|
-
...
|
33
|
-
...
|
34
|
-
|
35
|
-
class DynamoDbClient(object, metaclass=type):
|
36
|
-
def __init__(self):
|
37
|
-
...
|
38
|
-
def save_foreach_cardinality(self, foreach_split_task_id, foreach_cardinality, ttl):
|
39
|
-
...
|
40
|
-
def save_parent_task_id_for_foreach_join(self, foreach_split_task_id, foreach_join_parent_task_id):
|
41
|
-
...
|
42
|
-
def get_parent_task_ids_for_foreach_join(self, foreach_split_task_id):
|
43
|
-
...
|
44
|
-
...
|
14
|
+
from ....metadata_provider.metadata import MetaDatum as MetaDatum
|
15
|
+
from .dynamo_db_client import DynamoDbClient as DynamoDbClient
|
45
16
|
|
46
17
|
class StepFunctionsInternalDecorator(metaflow.decorators.StepDecorator, metaclass=type):
|
47
18
|
def task_pre_step(self, step_name, task_datastore, metadata, run_id, task_id, flow, graph, retry_count, max_user_code_retries, ubf_context, inputs):
|
@@ -1,321 +1,84 @@
|
|
1
|
-
|
2
|
-
#
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-11-
|
5
|
-
|
1
|
+
######################################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.12.30 #
|
4
|
+
# Generated on 2024-11-13T13:50:31.364717 #
|
5
|
+
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
|
+
import metaflow
|
9
10
|
import typing
|
10
11
|
if typing.TYPE_CHECKING:
|
11
|
-
import metaflow.runner.
|
12
|
+
import metaflow.runner.deployer_impl
|
13
|
+
import metaflow.plugins.aws.step_functions.step_functions_deployer_objects
|
14
|
+
import typing
|
12
15
|
|
13
|
-
|
14
|
-
def __init__(self, name, graph, flow, code_package_sha, code_package_url, production_token, metadata, flow_datastore, environment, event_logger, monitor, tags = None, namespace = None, username = None, max_workers = None, workflow_timeout = None, is_project = False, use_distributed_map = False):
|
15
|
-
...
|
16
|
-
def to_json(self):
|
17
|
-
...
|
18
|
-
def trigger_explanation(self):
|
19
|
-
...
|
20
|
-
def deploy(self, log_execution_history):
|
21
|
-
...
|
22
|
-
def schedule(self):
|
23
|
-
...
|
24
|
-
@classmethod
|
25
|
-
def delete(cls, name):
|
26
|
-
...
|
27
|
-
@classmethod
|
28
|
-
def terminate(cls, flow_name, name):
|
29
|
-
...
|
30
|
-
@classmethod
|
31
|
-
def trigger(cls, name, parameters):
|
32
|
-
...
|
33
|
-
@classmethod
|
34
|
-
def list(cls, name, states):
|
35
|
-
...
|
36
|
-
@classmethod
|
37
|
-
def get_existing_deployment(cls, name):
|
38
|
-
...
|
39
|
-
@classmethod
|
40
|
-
def get_execution(cls, state_machine_name, name):
|
41
|
-
...
|
42
|
-
...
|
16
|
+
from ....runner.deployer_impl import DeployerImpl as DeployerImpl
|
43
17
|
|
44
|
-
|
18
|
+
TYPE_CHECKING: bool
|
19
|
+
|
20
|
+
class StepFunctionsDeployer(metaflow.runner.deployer_impl.DeployerImpl, metaclass=type):
|
45
21
|
"""
|
46
|
-
|
47
|
-
class variable that matches the name of the CLI group.
|
22
|
+
Deployer implementation for AWS Step Functions.
|
48
23
|
|
49
24
|
Parameters
|
50
25
|
----------
|
51
|
-
|
52
|
-
|
53
|
-
show_output : bool, default True
|
54
|
-
Show the 'stdout' and 'stderr' to the console by default.
|
55
|
-
profile : Optional[str], default None
|
56
|
-
Metaflow profile to use for the deployment. If not specified, the default
|
57
|
-
profile is used.
|
58
|
-
env : Optional[Dict], default None
|
59
|
-
Additional environment variables to set for the deployment.
|
60
|
-
cwd : Optional[str], default None
|
61
|
-
The directory to run the subprocess in; if not specified, the current
|
62
|
-
directory is used.
|
63
|
-
file_read_timeout : int, default 3600
|
64
|
-
The timeout until which we try to read the deployer attribute file.
|
65
|
-
**kwargs : Any
|
66
|
-
Additional arguments that you would pass to `python myflow.py` before
|
67
|
-
the deployment command.
|
26
|
+
name : str, optional, default None
|
27
|
+
State Machine name. The flow name is used instead if this option is not specified.
|
68
28
|
"""
|
69
|
-
def __init__(self,
|
70
|
-
...
|
71
|
-
def __enter__(self) -> metaflow.runner.deployer.DeployerImpl:
|
72
|
-
...
|
73
|
-
def create(self, **kwargs) -> metaflow.runner.deployer.DeployedFlow:
|
29
|
+
def __init__(self, deployer_kwargs: typing.Dict[str, str], **kwargs):
|
74
30
|
"""
|
75
|
-
|
31
|
+
Initialize the StepFunctionsDeployer.
|
76
32
|
|
77
33
|
Parameters
|
78
34
|
----------
|
35
|
+
deployer_kwargs : Dict[str, str]
|
36
|
+
The deployer-specific keyword arguments.
|
79
37
|
**kwargs : Any
|
80
|
-
Additional arguments to pass to
|
81
|
-
command line arguments of `create`
|
82
|
-
|
83
|
-
Returns
|
84
|
-
-------
|
85
|
-
DeployedFlow
|
86
|
-
DeployedFlow object representing the deployed flow.
|
87
|
-
|
88
|
-
Raises
|
89
|
-
------
|
90
|
-
Exception
|
91
|
-
If there is an error during deployment.
|
92
|
-
"""
|
93
|
-
...
|
94
|
-
def __exit__(self, exc_type, exc_value, traceback):
|
95
|
-
"""
|
96
|
-
Cleanup resources on exit.
|
97
|
-
"""
|
98
|
-
...
|
99
|
-
def cleanup(self):
|
100
|
-
"""
|
101
|
-
Cleanup resources.
|
38
|
+
Additional arguments to pass to the superclass constructor.
|
102
39
|
"""
|
103
40
|
...
|
104
|
-
|
105
|
-
|
106
|
-
class DeployedFlow(object, metaclass=type):
|
107
|
-
"""
|
108
|
-
DeployedFlow class represents a flow that has been deployed.
|
109
|
-
|
110
|
-
Parameters
|
111
|
-
----------
|
112
|
-
deployer : DeployerImpl
|
113
|
-
Instance of the deployer implementation.
|
114
|
-
"""
|
115
|
-
def __init__(self, deployer: metaflow.runner.deployer.DeployerImpl):
|
41
|
+
@property
|
42
|
+
def deployer_kwargs(self) -> typing.Dict[str, typing.Any]:
|
116
43
|
...
|
117
|
-
|
118
|
-
|
119
|
-
class TriggeredRun(object, metaclass=type):
|
120
|
-
"""
|
121
|
-
TriggeredRun class represents a run that has been triggered on a production orchestrator.
|
122
|
-
|
123
|
-
Only when the `start` task starts running, the `run` object corresponding to the run
|
124
|
-
becomes available.
|
125
|
-
"""
|
126
|
-
def __init__(self, deployer: metaflow.runner.deployer.DeployerImpl, content: str):
|
44
|
+
@staticmethod
|
45
|
+
def deployed_flow_type() -> typing.Type["metaflow.plugins.aws.step_functions.step_functions_deployer_objects.StepFunctionsDeployedFlow"]:
|
127
46
|
...
|
128
|
-
def
|
47
|
+
def create(self, **kwargs) -> "metaflow.plugins.aws.step_functions.step_functions_deployer_objects.StepFunctionsDeployedFlow":
|
129
48
|
"""
|
130
|
-
|
49
|
+
Create a new AWS Step Functions State Machine deployment.
|
131
50
|
|
132
51
|
Parameters
|
133
52
|
----------
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
53
|
+
authorize : str, optional, default None
|
54
|
+
Authorize using this production token. Required when re-deploying an existing flow
|
55
|
+
for the first time. The token is cached in METAFLOW_HOME.
|
56
|
+
generate_new_token : bool, optional, default False
|
57
|
+
Generate a new production token for this flow. Moves the production flow to a new namespace.
|
58
|
+
given_token : str, optional, default None
|
59
|
+
Use the given production token for this flow. Moves the production flow to the given namespace.
|
60
|
+
tags : List[str], optional, default None
|
61
|
+
Annotate all objects produced by AWS Step Functions runs with these tags.
|
62
|
+
user_namespace : str, optional, default None
|
63
|
+
Change the namespace from the default (production token) to the given tag.
|
64
|
+
only_json : bool, optional, default False
|
65
|
+
Only print out JSON sent to AWS Step Functions without deploying anything.
|
66
|
+
max_workers : int, optional, default 100
|
67
|
+
Maximum number of parallel processes.
|
68
|
+
workflow_timeout : int, optional, default None
|
69
|
+
Workflow timeout in seconds.
|
70
|
+
log_execution_history : bool, optional, default False
|
71
|
+
Log AWS Step Functions execution history to AWS CloudWatch Logs log group.
|
72
|
+
use_distributed_map : bool, optional, default False
|
73
|
+
Use AWS Step Functions Distributed Map instead of Inline Map for defining foreach
|
74
|
+
tasks in Amazon State Language.
|
75
|
+
deployer_attribute_file : str, optional, default None
|
76
|
+
Write the workflow name to the specified file. Used internally for Metaflow's Deployer API.
|
150
77
|
|
151
78
|
Returns
|
152
79
|
-------
|
153
|
-
|
154
|
-
|
155
|
-
"""
|
156
|
-
...
|
157
|
-
...
|
158
|
-
|
159
|
-
def get_lower_level_group(api, top_level_kwargs: typing.Dict, _type: typing.Optional[str], deployer_kwargs: typing.Dict):
|
160
|
-
"""
|
161
|
-
Retrieve a lower-level group from the API based on the type and provided arguments.
|
162
|
-
|
163
|
-
Parameters
|
164
|
-
----------
|
165
|
-
api : MetaflowAPI
|
166
|
-
Metaflow API instance.
|
167
|
-
top_level_kwargs : Dict
|
168
|
-
Top-level keyword arguments to pass to the API.
|
169
|
-
_type : str
|
170
|
-
Type of the deployer implementation to target.
|
171
|
-
deployer_kwargs : Dict
|
172
|
-
Keyword arguments specific to the deployer.
|
173
|
-
|
174
|
-
Returns
|
175
|
-
-------
|
176
|
-
Any
|
177
|
-
The lower-level group object retrieved from the API.
|
178
|
-
|
179
|
-
Raises
|
180
|
-
------
|
181
|
-
ValueError
|
182
|
-
If the `_type` is None.
|
183
|
-
"""
|
184
|
-
...
|
185
|
-
|
186
|
-
def handle_timeout(tfp_runner_attribute, command_obj: "CommandManager", file_read_timeout: int):
|
187
|
-
"""
|
188
|
-
Handle the timeout for a running subprocess command that reads a file
|
189
|
-
and raises an error with appropriate logs if a TimeoutError occurs.
|
190
|
-
|
191
|
-
Parameters
|
192
|
-
----------
|
193
|
-
tfp_runner_attribute : NamedTemporaryFile
|
194
|
-
Temporary file that stores runner attribute data.
|
195
|
-
command_obj : CommandManager
|
196
|
-
Command manager object that encapsulates the running command details.
|
197
|
-
file_read_timeout : int
|
198
|
-
Timeout for reading the file.
|
199
|
-
|
200
|
-
Returns
|
201
|
-
-------
|
202
|
-
str
|
203
|
-
Content read from the temporary file.
|
204
|
-
|
205
|
-
Raises
|
206
|
-
------
|
207
|
-
RuntimeError
|
208
|
-
If a TimeoutError occurs, it raises a RuntimeError with the command's
|
209
|
-
stdout and stderr logs.
|
210
|
-
"""
|
211
|
-
...
|
212
|
-
|
213
|
-
def terminate(instance: metaflow.runner.deployer.TriggeredRun, **kwargs):
|
214
|
-
"""
|
215
|
-
Terminate the running workflow.
|
216
|
-
|
217
|
-
Parameters
|
218
|
-
----------
|
219
|
-
**kwargs : Any
|
220
|
-
Additional arguments to pass to the terminate command.
|
221
|
-
|
222
|
-
Returns
|
223
|
-
-------
|
224
|
-
bool
|
225
|
-
True if the command was successful, False otherwise.
|
226
|
-
"""
|
227
|
-
...
|
228
|
-
|
229
|
-
def production_token(instance: metaflow.runner.deployer.DeployedFlow):
|
230
|
-
"""
|
231
|
-
Get the production token for the deployed flow.
|
232
|
-
|
233
|
-
Returns
|
234
|
-
-------
|
235
|
-
str, optional
|
236
|
-
The production token, None if it cannot be retrieved.
|
237
|
-
"""
|
238
|
-
...
|
239
|
-
|
240
|
-
def list_runs(instance: metaflow.runner.deployer.DeployedFlow, states: typing.Optional[typing.List[str]] = None):
|
241
|
-
"""
|
242
|
-
List runs of the deployed flow.
|
243
|
-
|
244
|
-
Parameters
|
245
|
-
----------
|
246
|
-
states : Optional[List[str]], optional
|
247
|
-
A list of states to filter the runs by. Allowed values are:
|
248
|
-
RUNNING, SUCCEEDED, FAILED, TIMED_OUT, ABORTED.
|
249
|
-
If not provided, all states will be considered.
|
250
|
-
|
251
|
-
Returns
|
252
|
-
-------
|
253
|
-
List[TriggeredRun]
|
254
|
-
A list of TriggeredRun objects representing the runs of the deployed flow.
|
255
|
-
|
256
|
-
Raises
|
257
|
-
------
|
258
|
-
ValueError
|
259
|
-
If any of the provided states are invalid or if there are duplicate states.
|
260
|
-
"""
|
261
|
-
...
|
262
|
-
|
263
|
-
def delete(instance: metaflow.runner.deployer.DeployedFlow, **kwargs):
|
264
|
-
"""
|
265
|
-
Delete the deployed flow.
|
266
|
-
|
267
|
-
Parameters
|
268
|
-
----------
|
269
|
-
**kwargs : Any
|
270
|
-
Additional arguments to pass to the delete command.
|
271
|
-
|
272
|
-
Returns
|
273
|
-
-------
|
274
|
-
bool
|
275
|
-
True if the command was successful, False otherwise.
|
276
|
-
"""
|
277
|
-
...
|
278
|
-
|
279
|
-
def trigger(instance: metaflow.runner.deployer.DeployedFlow, **kwargs):
|
280
|
-
"""
|
281
|
-
Trigger a new run for the deployed flow.
|
282
|
-
|
283
|
-
Parameters
|
284
|
-
----------
|
285
|
-
**kwargs : Any
|
286
|
-
Additional arguments to pass to the trigger command, `Parameters` in particular
|
287
|
-
|
288
|
-
Returns
|
289
|
-
-------
|
290
|
-
StepFunctionsTriggeredRun
|
291
|
-
The triggered run instance.
|
292
|
-
|
293
|
-
Raises
|
294
|
-
------
|
295
|
-
Exception
|
296
|
-
If there is an error during the trigger process.
|
297
|
-
"""
|
298
|
-
...
|
299
|
-
|
300
|
-
class StepFunctionsDeployer(metaflow.runner.deployer.DeployerImpl, metaclass=type):
|
301
|
-
"""
|
302
|
-
Deployer implementation for AWS Step Functions.
|
303
|
-
|
304
|
-
Attributes
|
305
|
-
----------
|
306
|
-
TYPE : ClassVar[Optional[str]]
|
307
|
-
The type of the deployer, which is "step-functions".
|
308
|
-
"""
|
309
|
-
def __init__(self, deployer_kwargs, **kwargs):
|
310
|
-
"""
|
311
|
-
Initialize the StepFunctionsDeployer.
|
312
|
-
|
313
|
-
Parameters
|
314
|
-
----------
|
315
|
-
deployer_kwargs : dict
|
316
|
-
The deployer-specific keyword arguments.
|
317
|
-
**kwargs : Any
|
318
|
-
Additional arguments to pass to the superclass constructor.
|
80
|
+
StepFunctionsDeployedFlow
|
81
|
+
The Flow deployed to AWS Step Functions.
|
319
82
|
"""
|
320
83
|
...
|
321
84
|
...
|