ob-metaflow-stubs 2.11.4.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +2753 -0
- metaflow-stubs/cards.pyi +266 -0
- metaflow-stubs/cli.pyi +137 -0
- metaflow-stubs/client/__init__.pyi +993 -0
- metaflow-stubs/client/core.pyi +1425 -0
- metaflow-stubs/client/filecache.pyi +87 -0
- metaflow-stubs/events.pyi +107 -0
- metaflow-stubs/exception.pyi +98 -0
- metaflow-stubs/flowspec.pyi +297 -0
- metaflow-stubs/generated_for.txt +1 -0
- metaflow-stubs/includefile.pyi +524 -0
- metaflow-stubs/metadata/metadata.pyi +377 -0
- metaflow-stubs/metadata/util.pyi +18 -0
- metaflow-stubs/metaflow_config.pyi +263 -0
- metaflow-stubs/metaflow_current.pyi +327 -0
- metaflow-stubs/mflog/mflog.pyi +22 -0
- metaflow-stubs/multicore_utils.pyi +62 -0
- metaflow-stubs/parameters.pyi +114 -0
- metaflow-stubs/plugins/__init__.pyi +209 -0
- metaflow-stubs/plugins/airflow/__init__.pyi +9 -0
- metaflow-stubs/plugins/airflow/airflow.pyi +179 -0
- metaflow-stubs/plugins/airflow/airflow_cli.pyi +90 -0
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +50 -0
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +137 -0
- metaflow-stubs/plugins/airflow/exception.pyi +27 -0
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +26 -0
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +60 -0
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +54 -0
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +50 -0
- metaflow-stubs/plugins/argo/__init__.pyi +9 -0
- metaflow-stubs/plugins/argo/argo_client.pyi +77 -0
- metaflow-stubs/plugins/argo/argo_events.pyi +79 -0
- metaflow-stubs/plugins/argo/argo_workflows.pyi +604 -0
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +180 -0
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +169 -0
- metaflow-stubs/plugins/aws/__init__.pyi +9 -0
- metaflow-stubs/plugins/aws/aws_client.pyi +22 -0
- metaflow-stubs/plugins/aws/aws_utils.pyi +93 -0
- metaflow-stubs/plugins/aws/batch/__init__.pyi +9 -0
- metaflow-stubs/plugins/aws/batch/batch.pyi +120 -0
- metaflow-stubs/plugins/aws/batch/batch_cli.pyi +42 -0
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +159 -0
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +145 -0
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +9 -0
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +73 -0
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +9 -0
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +22 -0
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +27 -0
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +18 -0
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +17 -0
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +220 -0
- metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +139 -0
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +36 -0
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +49 -0
- metaflow-stubs/plugins/azure/__init__.pyi +9 -0
- metaflow-stubs/plugins/azure/azure_credential.pyi +28 -0
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +28 -0
- metaflow-stubs/plugins/azure/azure_utils.pyi +76 -0
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +52 -0
- metaflow-stubs/plugins/azure/includefile_support.pyi +63 -0
- metaflow-stubs/plugins/cards/__init__.pyi +9 -0
- metaflow-stubs/plugins/cards/card_cli.pyi +557 -0
- metaflow-stubs/plugins/cards/card_client.pyi +178 -0
- metaflow-stubs/plugins/cards/card_creator.pyi +26 -0
- metaflow-stubs/plugins/cards/card_datastore.pyi +111 -0
- metaflow-stubs/plugins/cards/card_decorator.pyi +133 -0
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +67 -0
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +263 -0
- metaflow-stubs/plugins/cards/card_modules/card.pyi +62 -0
- metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +78 -0
- metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +77 -0
- metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +11 -0
- metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +114 -0
- metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +75 -0
- metaflow-stubs/plugins/cards/card_modules/components.pyi +251 -0
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +45 -0
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +24 -0
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +155 -0
- metaflow-stubs/plugins/cards/card_resolver.pyi +60 -0
- metaflow-stubs/plugins/cards/component_serializer.pyi +227 -0
- metaflow-stubs/plugins/cards/exception.pyi +71 -0
- metaflow-stubs/plugins/catch_decorator.pyi +58 -0
- metaflow-stubs/plugins/datatools/__init__.pyi +339 -0
- metaflow-stubs/plugins/datatools/local.pyi +82 -0
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +589 -0
- metaflow-stubs/plugins/datatools/s3/s3.pyi +875 -0
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +34 -0
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +45 -0
- metaflow-stubs/plugins/debug_logger.pyi +25 -0
- metaflow-stubs/plugins/debug_monitor.pyi +25 -0
- metaflow-stubs/plugins/environment_decorator.pyi +17 -0
- metaflow-stubs/plugins/events_decorator.pyi +34 -0
- metaflow-stubs/plugins/frameworks/__init__.pyi +9 -0
- metaflow-stubs/plugins/frameworks/pytorch.pyi +42 -0
- metaflow-stubs/plugins/gcp/__init__.pyi +9 -0
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +22 -0
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +26 -0
- metaflow-stubs/plugins/gcp/gs_utils.pyi +38 -0
- metaflow-stubs/plugins/gcp/includefile_support.pyi +63 -0
- metaflow-stubs/plugins/kubernetes/__init__.pyi +9 -0
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +127 -0
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +73 -0
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +62 -0
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +165 -0
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +100 -0
- metaflow-stubs/plugins/package_cli.pyi +9 -0
- metaflow-stubs/plugins/parallel_decorator.pyi +34 -0
- metaflow-stubs/plugins/project_decorator.pyi +36 -0
- metaflow-stubs/plugins/pypi/__init__.pyi +18 -0
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +59 -0
- metaflow-stubs/plugins/pypi/conda_environment.pyi +86 -0
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +22 -0
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +50 -0
- metaflow-stubs/plugins/pypi/utils.pyi +28 -0
- metaflow-stubs/plugins/resources_decorator.pyi +15 -0
- metaflow-stubs/plugins/retry_decorator.pyi +28 -0
- metaflow-stubs/plugins/secrets/__init__.pyi +21 -0
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +30 -0
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +75 -0
- metaflow-stubs/plugins/storage_executor.pyi +33 -0
- metaflow-stubs/plugins/tag_cli.pyi +370 -0
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +54 -0
- metaflow-stubs/plugins/timeout_decorator.pyi +39 -0
- metaflow-stubs/procpoll.pyi +51 -0
- metaflow-stubs/py.typed +0 -0
- metaflow-stubs/pylint_wrapper.pyi +31 -0
- metaflow-stubs/tagging_util.pyi +52 -0
- ob_metaflow_stubs-2.11.4.1.dist-info/METADATA +22 -0
- ob_metaflow_stubs-2.11.4.1.dist-info/RECORD +131 -0
- ob_metaflow_stubs-2.11.4.1.dist-info/WHEEL +6 -0
- ob_metaflow_stubs-2.11.4.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,180 @@
|
|
1
|
+
##################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.11.4.1 #
|
4
|
+
# Generated on 2024-02-28T05:30:43.971565 #
|
5
|
+
##################################################################################
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
import typing
|
10
|
+
if typing.TYPE_CHECKING:
|
11
|
+
import metaflow.metaflow_current
|
12
|
+
import metaflow.exception
|
13
|
+
import metaflow.decorators
|
14
|
+
import metaflow.parameters
|
15
|
+
|
16
|
+
JSONType: metaflow.parameters.JSONTypeClass
|
17
|
+
|
18
|
+
current: metaflow.metaflow_current.Current
|
19
|
+
|
20
|
+
class MetaflowException(Exception, metaclass=type):
|
21
|
+
def __init__(self, msg = "", lineno = None):
|
22
|
+
...
|
23
|
+
def __str__(self):
|
24
|
+
...
|
25
|
+
...
|
26
|
+
|
27
|
+
class MetaflowInternalError(metaflow.exception.MetaflowException, metaclass=type):
|
28
|
+
...
|
29
|
+
|
30
|
+
ARGO_WORKFLOWS_UI_URL: None
|
31
|
+
|
32
|
+
KUBERNETES_NAMESPACE: str
|
33
|
+
|
34
|
+
SERVICE_VERSION_CHECK: bool
|
35
|
+
|
36
|
+
UI_URL: None
|
37
|
+
|
38
|
+
def load_token(token_prefix):
|
39
|
+
...
|
40
|
+
|
41
|
+
def new_token(token_prefix, prev_token = None):
|
42
|
+
...
|
43
|
+
|
44
|
+
def store_token(token_prefix, token):
|
45
|
+
...
|
46
|
+
|
47
|
+
class EnvironmentDecorator(metaflow.decorators.StepDecorator, metaclass=type):
|
48
|
+
def runtime_step_cli(self, cli_args, retry_count, max_user_code_retries, ubf_context):
|
49
|
+
...
|
50
|
+
...
|
51
|
+
|
52
|
+
class KubernetesDecorator(metaflow.decorators.StepDecorator, metaclass=type):
|
53
|
+
def __init__(self, attributes = None, statically_defined = False):
|
54
|
+
...
|
55
|
+
def step_init(self, flow, graph, step, decos, environment, flow_datastore, logger):
|
56
|
+
...
|
57
|
+
def package_init(self, flow, step_name, environment):
|
58
|
+
...
|
59
|
+
def runtime_init(self, flow, graph, package, run_id):
|
60
|
+
...
|
61
|
+
def runtime_task_created(self, task_datastore, task_id, split_index, input_paths, is_cloned, ubf_context):
|
62
|
+
...
|
63
|
+
def runtime_step_cli(self, cli_args, retry_count, max_user_code_retries, ubf_context):
|
64
|
+
...
|
65
|
+
def task_pre_step(self, step_name, task_datastore, metadata, run_id, task_id, flow, graph, retry_count, max_retries, ubf_context, inputs):
|
66
|
+
...
|
67
|
+
def task_finished(self, step_name, flow, graph, is_task_ok, retry_count, max_retries):
|
68
|
+
...
|
69
|
+
...
|
70
|
+
|
71
|
+
def validate_tags(tags, existing_tags = None):
|
72
|
+
"""
|
73
|
+
Raises MetaflowTaggingError if invalid based on these rules:
|
74
|
+
|
75
|
+
Tag set size is too large. But it's OK if tag set is not larger
|
76
|
+
than an existing tag set (if provided).
|
77
|
+
|
78
|
+
Then, we validate each tag. See validate_tag()
|
79
|
+
"""
|
80
|
+
...
|
81
|
+
|
82
|
+
class ArgoWorkflows(object, metaclass=type):
|
83
|
+
def __init__(self, name, graph, flow, code_package_sha, code_package_url, production_token, metadata, flow_datastore, environment, event_logger, monitor, tags = None, namespace = None, username = None, max_workers = None, workflow_timeout = None, workflow_priority = None, auto_emit_argo_events = False, notify_on_error = False, notify_on_success = False, notify_slack_webhook_url = None, notify_pager_duty_integration_key = None):
|
84
|
+
...
|
85
|
+
def __str__(self):
|
86
|
+
...
|
87
|
+
def deploy(self):
|
88
|
+
...
|
89
|
+
@staticmethod
|
90
|
+
def list_templates(flow_name, all = False):
|
91
|
+
...
|
92
|
+
@staticmethod
|
93
|
+
def delete(name):
|
94
|
+
...
|
95
|
+
@classmethod
|
96
|
+
def terminate(cls, flow_name, name):
|
97
|
+
...
|
98
|
+
@staticmethod
|
99
|
+
def get_workflow_status(flow_name, name):
|
100
|
+
...
|
101
|
+
@staticmethod
|
102
|
+
def suspend(name):
|
103
|
+
...
|
104
|
+
@staticmethod
|
105
|
+
def unsuspend(name):
|
106
|
+
...
|
107
|
+
@classmethod
|
108
|
+
def trigger(cls, name, parameters = None):
|
109
|
+
...
|
110
|
+
def schedule(self):
|
111
|
+
...
|
112
|
+
def trigger_explanation(self):
|
113
|
+
...
|
114
|
+
@classmethod
|
115
|
+
def get_existing_deployment(cls, name):
|
116
|
+
...
|
117
|
+
@classmethod
|
118
|
+
def get_execution(cls, name):
|
119
|
+
...
|
120
|
+
def list_to_prose(self, items, singular):
|
121
|
+
...
|
122
|
+
...
|
123
|
+
|
124
|
+
class IncorrectProductionToken(metaflow.exception.MetaflowException, metaclass=type):
|
125
|
+
...
|
126
|
+
|
127
|
+
class RunIdMismatch(metaflow.exception.MetaflowException, metaclass=type):
|
128
|
+
...
|
129
|
+
|
130
|
+
class IncorrectMetadataServiceVersion(metaflow.exception.MetaflowException, metaclass=type):
|
131
|
+
...
|
132
|
+
|
133
|
+
class ArgoWorkflowsNameTooLong(metaflow.exception.MetaflowException, metaclass=type):
|
134
|
+
...
|
135
|
+
|
136
|
+
class UnsupportedPythonVersion(metaflow.exception.MetaflowException, metaclass=type):
|
137
|
+
...
|
138
|
+
|
139
|
+
def check_python_version(obj):
|
140
|
+
...
|
141
|
+
|
142
|
+
def check_metadata_service_version(obj):
|
143
|
+
...
|
144
|
+
|
145
|
+
def resolve_workflow_name(obj, name):
|
146
|
+
...
|
147
|
+
|
148
|
+
def make_flow(obj, token, name, tags, namespace, max_workers, workflow_timeout, workflow_priority, auto_emit_argo_events, notify_on_error, notify_on_success, notify_slack_webhook_url, notify_pager_duty_integration_key):
|
149
|
+
...
|
150
|
+
|
151
|
+
def resolve_token(name, token_prefix, obj, authorize, given_token, generate_new_token, is_project):
|
152
|
+
...
|
153
|
+
|
154
|
+
def validate_token(name, token_prefix, authorize, instructions_fn = None):
|
155
|
+
"""
|
156
|
+
Validate that the production token matches that of the deployed flow.
|
157
|
+
In case both the user and token do not match, raises an error.
|
158
|
+
Optionally outputs instructions on token usage via the provided instruction_fn(flow_name, prev_user)
|
159
|
+
"""
|
160
|
+
...
|
161
|
+
|
162
|
+
def validate_run_id(workflow_name, token_prefix, authorize, run_id, instructions_fn = None):
|
163
|
+
"""
|
164
|
+
Validates that a run_id adheres to the Argo Workflows naming rules, and
|
165
|
+
that it belongs to the current flow (accounting for project branch as well).
|
166
|
+
"""
|
167
|
+
...
|
168
|
+
|
169
|
+
def sanitize_for_argo(text):
|
170
|
+
"""
|
171
|
+
Sanitizes a string so it does not contain characters that are not permitted in Argo Workflow resource names.
|
172
|
+
"""
|
173
|
+
...
|
174
|
+
|
175
|
+
def remap_status(status):
|
176
|
+
"""
|
177
|
+
Group similar Argo Workflow statuses together in order to have similar output to step functions statuses.
|
178
|
+
"""
|
179
|
+
...
|
180
|
+
|
@@ -0,0 +1,169 @@
|
|
1
|
+
##################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.11.4.1 #
|
4
|
+
# Generated on 2024-02-28T05:30:43.961868 #
|
5
|
+
##################################################################################
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
import typing
|
10
|
+
if typing.TYPE_CHECKING:
|
11
|
+
import metaflow.metaflow_current
|
12
|
+
import metaflow
|
13
|
+
import metaflow.events
|
14
|
+
import metaflow.decorators
|
15
|
+
|
16
|
+
current: metaflow.metaflow_current.Current
|
17
|
+
|
18
|
+
class Trigger(object, metaclass=type):
|
19
|
+
def __init__(self, _meta = None):
|
20
|
+
...
|
21
|
+
@classmethod
|
22
|
+
def from_runs(cls, run_objs: typing.List["metaflow.Run"]):
|
23
|
+
...
|
24
|
+
@property
|
25
|
+
def event(self) -> typing.Optional[metaflow.events.MetaflowEvent]:
|
26
|
+
"""
|
27
|
+
The `MetaflowEvent` object corresponding to the triggering event.
|
28
|
+
|
29
|
+
If multiple events triggered the run, this property is the latest event.
|
30
|
+
|
31
|
+
Returns
|
32
|
+
-------
|
33
|
+
MetaflowEvent, optional
|
34
|
+
The latest event that triggered the run, if applicable.
|
35
|
+
"""
|
36
|
+
...
|
37
|
+
@property
|
38
|
+
def events(self) -> typing.Optional[typing.List[metaflow.events.MetaflowEvent]]:
|
39
|
+
"""
|
40
|
+
The list of `MetaflowEvent` objects correspondings to all the triggering events.
|
41
|
+
|
42
|
+
Returns
|
43
|
+
-------
|
44
|
+
List[MetaflowEvent], optional
|
45
|
+
List of all events that triggered the run
|
46
|
+
"""
|
47
|
+
...
|
48
|
+
@property
|
49
|
+
def run(self) -> typing.Optional["metaflow.Run"]:
|
50
|
+
"""
|
51
|
+
The corresponding `Run` object if the triggering event is a Metaflow run.
|
52
|
+
|
53
|
+
In case multiple runs triggered the run, this property is the latest run.
|
54
|
+
Returns `None` if none of the triggering events are a `Run`.
|
55
|
+
|
56
|
+
Returns
|
57
|
+
-------
|
58
|
+
Run, optional
|
59
|
+
Latest Run that triggered this run, if applicable.
|
60
|
+
"""
|
61
|
+
...
|
62
|
+
@property
|
63
|
+
def runs(self) -> typing.Optional[typing.List["metaflow.Run"]]:
|
64
|
+
"""
|
65
|
+
The list of `Run` objects in the triggering events.
|
66
|
+
Returns `None` if none of the triggering events are `Run` objects.
|
67
|
+
|
68
|
+
Returns
|
69
|
+
-------
|
70
|
+
List[Run], optional
|
71
|
+
List of runs that triggered this run, if applicable.
|
72
|
+
"""
|
73
|
+
...
|
74
|
+
def __getitem__(self, key: str) -> typing.Union["metaflow.Run", metaflow.events.MetaflowEvent]:
|
75
|
+
"""
|
76
|
+
If triggering events are runs, `key` corresponds to the flow name of the triggering run.
|
77
|
+
Otherwise, `key` corresponds to the event name and a `MetaflowEvent` object is returned.
|
78
|
+
|
79
|
+
Returns
|
80
|
+
-------
|
81
|
+
Union[Run, MetaflowEvent]
|
82
|
+
`Run` object if triggered by a run. Otherwise returns a `MetaflowEvent`.
|
83
|
+
"""
|
84
|
+
...
|
85
|
+
def __iter__(self):
|
86
|
+
...
|
87
|
+
def __contains__(self, ident: str) -> bool:
|
88
|
+
...
|
89
|
+
...
|
90
|
+
|
91
|
+
class MetaDatum(tuple, metaclass=type):
|
92
|
+
@staticmethod
|
93
|
+
def __new__(_cls, field, value, type, tags):
|
94
|
+
"""
|
95
|
+
Create new instance of MetaDatum(field, value, type, tags)
|
96
|
+
"""
|
97
|
+
...
|
98
|
+
def __repr__(self):
|
99
|
+
"""
|
100
|
+
Return a nicely formatted representation string
|
101
|
+
"""
|
102
|
+
...
|
103
|
+
def __getnewargs__(self):
|
104
|
+
"""
|
105
|
+
Return self as a plain tuple. Used by copy and pickle.
|
106
|
+
"""
|
107
|
+
...
|
108
|
+
...
|
109
|
+
|
110
|
+
ARGO_EVENTS_WEBHOOK_URL: None
|
111
|
+
|
112
|
+
class ArgoEvent(object, metaclass=type):
|
113
|
+
def __init__(self, name, url = None, payload = None, access_token = None):
|
114
|
+
...
|
115
|
+
def add_to_payload(self, key, value):
|
116
|
+
"""
|
117
|
+
Add a key-value pair in the payload. This is typically used to set parameters
|
118
|
+
of triggered flows. Often, `key` is the parameter name you want to set to
|
119
|
+
`value`. Overrides any existing value of `key`.
|
120
|
+
|
121
|
+
Parameters
|
122
|
+
----------
|
123
|
+
key : str
|
124
|
+
Key
|
125
|
+
value : str
|
126
|
+
Value
|
127
|
+
"""
|
128
|
+
...
|
129
|
+
def safe_publish(self, payload = None, ignore_errors = True):
|
130
|
+
"""
|
131
|
+
Publishes an event when called inside a deployed workflow. Outside a deployed workflow
|
132
|
+
this function does nothing.
|
133
|
+
|
134
|
+
Use this function inside flows to create events safely. As this function is a no-op
|
135
|
+
for local runs, you can safely call it during local development without causing unintended
|
136
|
+
side-effects. It takes effect only when deployed on Argo Workflows.
|
137
|
+
|
138
|
+
Parameters
|
139
|
+
----------
|
140
|
+
payload : dict
|
141
|
+
Additional key-value pairs to add to the payload.
|
142
|
+
ignore_errors : bool, default True
|
143
|
+
If True, events are created on a best effort basis - errors are silently ignored.
|
144
|
+
"""
|
145
|
+
...
|
146
|
+
def publish(self, payload = None, force = True, ignore_errors = True):
|
147
|
+
"""
|
148
|
+
Publishes an event.
|
149
|
+
|
150
|
+
Note that the function returns immediately after the event has been sent. It
|
151
|
+
does not wait for flows to start, nor it guarantees that any flows will start.
|
152
|
+
|
153
|
+
Parameters
|
154
|
+
----------
|
155
|
+
payload : dict
|
156
|
+
Additional key-value pairs to add to the payload.
|
157
|
+
ignore_errors : bool, default True
|
158
|
+
If True, events are created on a best effort basis - errors are silently ignored.
|
159
|
+
"""
|
160
|
+
...
|
161
|
+
...
|
162
|
+
|
163
|
+
class ArgoWorkflowsInternalDecorator(metaflow.decorators.StepDecorator, metaclass=type):
|
164
|
+
def task_pre_step(self, step_name, task_datastore, metadata, run_id, task_id, flow, graph, retry_count, max_user_code_retries, ubf_context, inputs):
|
165
|
+
...
|
166
|
+
def task_finished(self, step_name, flow, graph, is_task_ok, retry_count, max_user_code_retries):
|
167
|
+
...
|
168
|
+
...
|
169
|
+
|
@@ -0,0 +1,9 @@
|
|
1
|
+
##################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.11.4.1 #
|
4
|
+
# Generated on 2024-02-28T05:30:43.928423 #
|
5
|
+
##################################################################################
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
|
@@ -0,0 +1,22 @@
|
|
1
|
+
##################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.11.4.1 #
|
4
|
+
# Generated on 2024-02-28T05:30:43.956839 #
|
5
|
+
##################################################################################
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
|
10
|
+
cached_aws_sandbox_creds: None
|
11
|
+
|
12
|
+
cached_provider_class: None
|
13
|
+
|
14
|
+
class Boto3ClientProvider(object, metaclass=type):
|
15
|
+
@staticmethod
|
16
|
+
def get_client(module, with_error = False, role_arn = None, session_vars = None, client_params = None):
|
17
|
+
...
|
18
|
+
...
|
19
|
+
|
20
|
+
def get_aws_client(module, with_error = False, role_arn = None, session_vars = None, client_params = None):
|
21
|
+
...
|
22
|
+
|
@@ -0,0 +1,93 @@
|
|
1
|
+
##################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.11.4.1 #
|
4
|
+
# Generated on 2024-02-28T05:30:43.956196 #
|
5
|
+
##################################################################################
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
|
10
|
+
class MetaflowException(Exception, metaclass=type):
|
11
|
+
def __init__(self, msg = "", lineno = None):
|
12
|
+
...
|
13
|
+
def __str__(self):
|
14
|
+
...
|
15
|
+
...
|
16
|
+
|
17
|
+
MAX_MEMORY_PER_TASK: None
|
18
|
+
|
19
|
+
MAX_CPU_PER_TASK: None
|
20
|
+
|
21
|
+
def get_ec2_instance_metadata():
|
22
|
+
"""
|
23
|
+
Fetches the EC2 instance metadata through AWS instance metadata service
|
24
|
+
|
25
|
+
Returns either an empty dictionary, or one with the keys
|
26
|
+
- ec2-instance-id
|
27
|
+
- ec2-instance-type
|
28
|
+
- ec2-region
|
29
|
+
- ec2-availability-zone
|
30
|
+
"""
|
31
|
+
...
|
32
|
+
|
33
|
+
def get_docker_registry(image_uri):
|
34
|
+
"""
|
35
|
+
Explanation:
|
36
|
+
(.+?(?:[:.].+?)\/)? - [GROUP 0] REGISTRY
|
37
|
+
.+? - A registry must start with at least one character
|
38
|
+
(?:[:.].+?)\/ - A registry must have ":" or "." and end with "/"
|
39
|
+
? - Make a registry optional
|
40
|
+
(.*?) - [GROUP 1] REPOSITORY
|
41
|
+
.*? - Get repository name until separator
|
42
|
+
(?:[@:])? - SEPARATOR
|
43
|
+
?: - Don't capture separator
|
44
|
+
[@:] - The separator must be either "@" or ":"
|
45
|
+
? - The separator is optional
|
46
|
+
((?<=[@:]).*)? - [GROUP 2] TAG / DIGEST
|
47
|
+
(?<=[@:]) - A tag / digest must be preceded by "@" or ":"
|
48
|
+
.* - Capture rest of tag / digest
|
49
|
+
? - A tag / digest is optional
|
50
|
+
Examples:
|
51
|
+
image
|
52
|
+
- None
|
53
|
+
- image
|
54
|
+
- None
|
55
|
+
example/image
|
56
|
+
- None
|
57
|
+
- example/image
|
58
|
+
- None
|
59
|
+
example/image:tag
|
60
|
+
- None
|
61
|
+
- example/image
|
62
|
+
- tag
|
63
|
+
example.domain.com/example/image:tag
|
64
|
+
- example.domain.com/
|
65
|
+
- example/image
|
66
|
+
- tag
|
67
|
+
123.123.123.123:123/example/image:tag
|
68
|
+
- 123.123.123.123:123/
|
69
|
+
- example/image
|
70
|
+
- tag
|
71
|
+
example.domain.com/example/image@sha256:45b23dee0
|
72
|
+
- example.domain.com/
|
73
|
+
- example/image
|
74
|
+
- sha256:45b23dee0
|
75
|
+
"""
|
76
|
+
...
|
77
|
+
|
78
|
+
def compute_resource_attributes(decos, compute_deco, step_name, resource_defaults):
|
79
|
+
"""
|
80
|
+
Compute resource values taking into account defaults, the values specified
|
81
|
+
in the compute decorator (like @batch or @kubernetes) directly, and
|
82
|
+
resources specified via @resources decorator.
|
83
|
+
|
84
|
+
Returns a dictionary of resource attr -> value (str).
|
85
|
+
"""
|
86
|
+
...
|
87
|
+
|
88
|
+
def sanitize_batch_tag(key, value):
|
89
|
+
"""
|
90
|
+
Sanitize a key and value for use as a Batch tag.
|
91
|
+
"""
|
92
|
+
...
|
93
|
+
|
@@ -0,0 +1,9 @@
|
|
1
|
+
##################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.11.4.1 #
|
4
|
+
# Generated on 2024-02-28T05:30:43.956310 #
|
5
|
+
##################################################################################
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
|
@@ -0,0 +1,120 @@
|
|
1
|
+
##################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.11.4.1 #
|
4
|
+
# Generated on 2024-02-28T05:30:44.002020 #
|
5
|
+
##################################################################################
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
import typing
|
10
|
+
if typing.TYPE_CHECKING:
|
11
|
+
import metaflow.exception
|
12
|
+
|
13
|
+
class S3Tail(object, metaclass=type):
|
14
|
+
def __init__(self, s3url):
|
15
|
+
...
|
16
|
+
def reset_client(self, hard_reset = False):
|
17
|
+
...
|
18
|
+
def clone(self, s3url):
|
19
|
+
...
|
20
|
+
@property
|
21
|
+
def bytes_read(self):
|
22
|
+
...
|
23
|
+
@property
|
24
|
+
def tail(self):
|
25
|
+
...
|
26
|
+
def __iter__(self):
|
27
|
+
...
|
28
|
+
def _make_range_request(self, *args, **kwargs):
|
29
|
+
...
|
30
|
+
...
|
31
|
+
|
32
|
+
def sanitize_batch_tag(key, value):
|
33
|
+
"""
|
34
|
+
Sanitize a key and value for use as a Batch tag.
|
35
|
+
"""
|
36
|
+
...
|
37
|
+
|
38
|
+
class MetaflowException(Exception, metaclass=type):
|
39
|
+
def __init__(self, msg = "", lineno = None):
|
40
|
+
...
|
41
|
+
def __str__(self):
|
42
|
+
...
|
43
|
+
...
|
44
|
+
|
45
|
+
OTEL_ENDPOINT: None
|
46
|
+
|
47
|
+
SERVICE_INTERNAL_URL: None
|
48
|
+
|
49
|
+
DATATOOLS_S3ROOT: None
|
50
|
+
|
51
|
+
DATASTORE_SYSROOT_S3: None
|
52
|
+
|
53
|
+
DEFAULT_METADATA: str
|
54
|
+
|
55
|
+
SERVICE_HEADERS: dict
|
56
|
+
|
57
|
+
BATCH_EMIT_TAGS: bool
|
58
|
+
|
59
|
+
CARD_S3ROOT: None
|
60
|
+
|
61
|
+
S3_ENDPOINT_URL: None
|
62
|
+
|
63
|
+
DEFAULT_SECRETS_BACKEND_TYPE: None
|
64
|
+
|
65
|
+
AWS_SECRETS_MANAGER_DEFAULT_REGION: None
|
66
|
+
|
67
|
+
S3_SERVER_SIDE_ENCRYPTION: None
|
68
|
+
|
69
|
+
BASH_SAVE_LOGS: str
|
70
|
+
|
71
|
+
class BatchClient(object, metaclass=type):
|
72
|
+
def __init__(self):
|
73
|
+
...
|
74
|
+
def active_job_queues(self):
|
75
|
+
...
|
76
|
+
def unfinished_jobs(self):
|
77
|
+
...
|
78
|
+
def describe_jobs(self, job_ids):
|
79
|
+
...
|
80
|
+
def describe_job_queue(self, job_queue):
|
81
|
+
...
|
82
|
+
def job(self):
|
83
|
+
...
|
84
|
+
def attach_job(self, job_id):
|
85
|
+
...
|
86
|
+
def region(self):
|
87
|
+
...
|
88
|
+
...
|
89
|
+
|
90
|
+
LOGS_DIR: str
|
91
|
+
|
92
|
+
STDOUT_FILE: str
|
93
|
+
|
94
|
+
STDERR_FILE: str
|
95
|
+
|
96
|
+
STDOUT_PATH: str
|
97
|
+
|
98
|
+
STDERR_PATH: str
|
99
|
+
|
100
|
+
class BatchException(metaflow.exception.MetaflowException, metaclass=type):
|
101
|
+
...
|
102
|
+
|
103
|
+
class BatchKilledException(metaflow.exception.MetaflowException, metaclass=type):
|
104
|
+
...
|
105
|
+
|
106
|
+
class Batch(object, metaclass=type):
|
107
|
+
def __init__(self, metadata, environment):
|
108
|
+
...
|
109
|
+
def list_jobs(self, flow_name, run_id, user, echo):
|
110
|
+
...
|
111
|
+
def kill_jobs(self, flow_name, run_id, user, echo):
|
112
|
+
...
|
113
|
+
def create_job(self, step_name, step_cli, task_spec, code_package_sha, code_package_url, code_package_ds, image, queue, iam_role = None, execution_role = None, cpu = None, gpu = None, memory = None, run_time_limit = None, shared_memory = None, max_swap = None, swappiness = None, inferentia = None, efa = None, env = {}, attrs = {}, host_volumes = None, efs_volumes = None, use_tmpfs = None, tmpfs_tempdir = None, tmpfs_size = None, tmpfs_path = None, num_parallel = 0):
|
114
|
+
...
|
115
|
+
def launch_job(self, step_name, step_cli, task_spec, code_package_sha, code_package_url, code_package_ds, image, queue, iam_role = None, execution_role = None, cpu = None, gpu = None, memory = None, run_time_limit = None, shared_memory = None, max_swap = None, swappiness = None, inferentia = None, efa = None, host_volumes = None, efs_volumes = None, use_tmpfs = None, tmpfs_tempdir = None, tmpfs_size = None, tmpfs_path = None, num_parallel = 0, env = {}, attrs = {}):
|
116
|
+
...
|
117
|
+
def wait(self, stdout_location, stderr_location, echo = None):
|
118
|
+
...
|
119
|
+
...
|
120
|
+
|
@@ -0,0 +1,42 @@
|
|
1
|
+
##################################################################################
|
2
|
+
# Auto-generated Metaflow stub file #
|
3
|
+
# MF version: 2.11.4.1 #
|
4
|
+
# Generated on 2024-02-28T05:30:44.003931 #
|
5
|
+
##################################################################################
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
import typing
|
10
|
+
if typing.TYPE_CHECKING:
|
11
|
+
import metaflow.exception
|
12
|
+
|
13
|
+
class CommandException(metaflow.exception.MetaflowException, metaclass=type):
|
14
|
+
...
|
15
|
+
|
16
|
+
METAFLOW_EXIT_DISALLOW_RETRY: int
|
17
|
+
|
18
|
+
def sync_local_metadata_from_datastore(metadata_local_dir, task_ds):
|
19
|
+
...
|
20
|
+
|
21
|
+
DATASTORE_LOCAL_DIR: str
|
22
|
+
|
23
|
+
TASK_LOG_SOURCE: str
|
24
|
+
|
25
|
+
class Batch(object, metaclass=type):
|
26
|
+
def __init__(self, metadata, environment):
|
27
|
+
...
|
28
|
+
def list_jobs(self, flow_name, run_id, user, echo):
|
29
|
+
...
|
30
|
+
def kill_jobs(self, flow_name, run_id, user, echo):
|
31
|
+
...
|
32
|
+
def create_job(self, step_name, step_cli, task_spec, code_package_sha, code_package_url, code_package_ds, image, queue, iam_role = None, execution_role = None, cpu = None, gpu = None, memory = None, run_time_limit = None, shared_memory = None, max_swap = None, swappiness = None, inferentia = None, efa = None, env = {}, attrs = {}, host_volumes = None, efs_volumes = None, use_tmpfs = None, tmpfs_tempdir = None, tmpfs_size = None, tmpfs_path = None, num_parallel = 0):
|
33
|
+
...
|
34
|
+
def launch_job(self, step_name, step_cli, task_spec, code_package_sha, code_package_url, code_package_ds, image, queue, iam_role = None, execution_role = None, cpu = None, gpu = None, memory = None, run_time_limit = None, shared_memory = None, max_swap = None, swappiness = None, inferentia = None, efa = None, host_volumes = None, efs_volumes = None, use_tmpfs = None, tmpfs_tempdir = None, tmpfs_size = None, tmpfs_path = None, num_parallel = 0, env = {}, attrs = {}):
|
35
|
+
...
|
36
|
+
def wait(self, stdout_location, stderr_location, echo = None):
|
37
|
+
...
|
38
|
+
...
|
39
|
+
|
40
|
+
class BatchKilledException(metaflow.exception.MetaflowException, metaclass=type):
|
41
|
+
...
|
42
|
+
|