ob-metaflow 2.11.13.1__py2.py3-none-any.whl → 2.19.7.1rc0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow/R.py +10 -7
- metaflow/__init__.py +40 -25
- metaflow/_vendor/imghdr/__init__.py +186 -0
- metaflow/_vendor/importlib_metadata/__init__.py +1063 -0
- metaflow/_vendor/importlib_metadata/_adapters.py +68 -0
- metaflow/_vendor/importlib_metadata/_collections.py +30 -0
- metaflow/_vendor/importlib_metadata/_compat.py +71 -0
- metaflow/_vendor/importlib_metadata/_functools.py +104 -0
- metaflow/_vendor/importlib_metadata/_itertools.py +73 -0
- metaflow/_vendor/importlib_metadata/_meta.py +48 -0
- metaflow/_vendor/importlib_metadata/_text.py +99 -0
- metaflow/_vendor/importlib_metadata/py.typed +0 -0
- metaflow/_vendor/typeguard/__init__.py +48 -0
- metaflow/_vendor/typeguard/_checkers.py +1070 -0
- metaflow/_vendor/typeguard/_config.py +108 -0
- metaflow/_vendor/typeguard/_decorators.py +233 -0
- metaflow/_vendor/typeguard/_exceptions.py +42 -0
- metaflow/_vendor/typeguard/_functions.py +308 -0
- metaflow/_vendor/typeguard/_importhook.py +213 -0
- metaflow/_vendor/typeguard/_memo.py +48 -0
- metaflow/_vendor/typeguard/_pytest_plugin.py +127 -0
- metaflow/_vendor/typeguard/_suppression.py +86 -0
- metaflow/_vendor/typeguard/_transformer.py +1229 -0
- metaflow/_vendor/typeguard/_union_transformer.py +55 -0
- metaflow/_vendor/typeguard/_utils.py +173 -0
- metaflow/_vendor/typeguard/py.typed +0 -0
- metaflow/_vendor/typing_extensions.py +3641 -0
- metaflow/_vendor/v3_7/importlib_metadata/__init__.py +1063 -0
- metaflow/_vendor/v3_7/importlib_metadata/_adapters.py +68 -0
- metaflow/_vendor/v3_7/importlib_metadata/_collections.py +30 -0
- metaflow/_vendor/v3_7/importlib_metadata/_compat.py +71 -0
- metaflow/_vendor/v3_7/importlib_metadata/_functools.py +104 -0
- metaflow/_vendor/v3_7/importlib_metadata/_itertools.py +73 -0
- metaflow/_vendor/v3_7/importlib_metadata/_meta.py +48 -0
- metaflow/_vendor/v3_7/importlib_metadata/_text.py +99 -0
- metaflow/_vendor/v3_7/importlib_metadata/py.typed +0 -0
- metaflow/_vendor/v3_7/typeguard/__init__.py +48 -0
- metaflow/_vendor/v3_7/typeguard/_checkers.py +906 -0
- metaflow/_vendor/v3_7/typeguard/_config.py +108 -0
- metaflow/_vendor/v3_7/typeguard/_decorators.py +237 -0
- metaflow/_vendor/v3_7/typeguard/_exceptions.py +42 -0
- metaflow/_vendor/v3_7/typeguard/_functions.py +310 -0
- metaflow/_vendor/v3_7/typeguard/_importhook.py +213 -0
- metaflow/_vendor/v3_7/typeguard/_memo.py +48 -0
- metaflow/_vendor/v3_7/typeguard/_pytest_plugin.py +100 -0
- metaflow/_vendor/v3_7/typeguard/_suppression.py +88 -0
- metaflow/_vendor/v3_7/typeguard/_transformer.py +1207 -0
- metaflow/_vendor/v3_7/typeguard/_union_transformer.py +54 -0
- metaflow/_vendor/v3_7/typeguard/_utils.py +169 -0
- metaflow/_vendor/v3_7/typeguard/py.typed +0 -0
- metaflow/_vendor/v3_7/typing_extensions.py +3072 -0
- metaflow/_vendor/yaml/__init__.py +427 -0
- metaflow/_vendor/yaml/composer.py +139 -0
- metaflow/_vendor/yaml/constructor.py +748 -0
- metaflow/_vendor/yaml/cyaml.py +101 -0
- metaflow/_vendor/yaml/dumper.py +62 -0
- metaflow/_vendor/yaml/emitter.py +1137 -0
- metaflow/_vendor/yaml/error.py +75 -0
- metaflow/_vendor/yaml/events.py +86 -0
- metaflow/_vendor/yaml/loader.py +63 -0
- metaflow/_vendor/yaml/nodes.py +49 -0
- metaflow/_vendor/yaml/parser.py +589 -0
- metaflow/_vendor/yaml/reader.py +185 -0
- metaflow/_vendor/yaml/representer.py +389 -0
- metaflow/_vendor/yaml/resolver.py +227 -0
- metaflow/_vendor/yaml/scanner.py +1435 -0
- metaflow/_vendor/yaml/serializer.py +111 -0
- metaflow/_vendor/yaml/tokens.py +104 -0
- metaflow/cards.py +5 -0
- metaflow/cli.py +331 -785
- metaflow/cli_args.py +17 -0
- metaflow/cli_components/__init__.py +0 -0
- metaflow/cli_components/dump_cmd.py +96 -0
- metaflow/cli_components/init_cmd.py +52 -0
- metaflow/cli_components/run_cmds.py +546 -0
- metaflow/cli_components/step_cmd.py +334 -0
- metaflow/cli_components/utils.py +140 -0
- metaflow/client/__init__.py +1 -0
- metaflow/client/core.py +467 -73
- metaflow/client/filecache.py +75 -35
- metaflow/clone_util.py +7 -1
- metaflow/cmd/code/__init__.py +231 -0
- metaflow/cmd/develop/stub_generator.py +756 -288
- metaflow/cmd/develop/stubs.py +12 -28
- metaflow/cmd/main_cli.py +6 -4
- metaflow/cmd/make_wrapper.py +78 -0
- metaflow/datastore/__init__.py +1 -0
- metaflow/datastore/content_addressed_store.py +41 -10
- metaflow/datastore/datastore_set.py +11 -2
- metaflow/datastore/flow_datastore.py +156 -10
- metaflow/datastore/spin_datastore.py +91 -0
- metaflow/datastore/task_datastore.py +154 -39
- metaflow/debug.py +5 -0
- metaflow/decorators.py +404 -78
- metaflow/exception.py +8 -2
- metaflow/extension_support/__init__.py +527 -376
- metaflow/extension_support/_empty_file.py +2 -2
- metaflow/extension_support/plugins.py +49 -31
- metaflow/flowspec.py +482 -33
- metaflow/graph.py +210 -42
- metaflow/includefile.py +84 -40
- metaflow/lint.py +141 -22
- metaflow/meta_files.py +13 -0
- metaflow/{metadata → metadata_provider}/heartbeat.py +24 -8
- metaflow/{metadata → metadata_provider}/metadata.py +86 -1
- metaflow/metaflow_config.py +175 -28
- metaflow/metaflow_config_funcs.py +51 -3
- metaflow/metaflow_current.py +4 -10
- metaflow/metaflow_environment.py +139 -53
- metaflow/metaflow_git.py +115 -0
- metaflow/metaflow_profile.py +18 -0
- metaflow/metaflow_version.py +150 -66
- metaflow/mflog/__init__.py +4 -3
- metaflow/mflog/save_logs.py +2 -2
- metaflow/multicore_utils.py +31 -14
- metaflow/package/__init__.py +673 -0
- metaflow/packaging_sys/__init__.py +880 -0
- metaflow/packaging_sys/backend.py +128 -0
- metaflow/packaging_sys/distribution_support.py +153 -0
- metaflow/packaging_sys/tar_backend.py +99 -0
- metaflow/packaging_sys/utils.py +54 -0
- metaflow/packaging_sys/v1.py +527 -0
- metaflow/parameters.py +149 -28
- metaflow/plugins/__init__.py +74 -5
- metaflow/plugins/airflow/airflow.py +40 -25
- metaflow/plugins/airflow/airflow_cli.py +22 -5
- metaflow/plugins/airflow/airflow_decorator.py +1 -1
- metaflow/plugins/airflow/airflow_utils.py +5 -3
- metaflow/plugins/airflow/sensors/base_sensor.py +4 -4
- metaflow/plugins/airflow/sensors/external_task_sensor.py +2 -2
- metaflow/plugins/airflow/sensors/s3_sensor.py +2 -2
- metaflow/plugins/argo/argo_client.py +78 -33
- metaflow/plugins/argo/argo_events.py +6 -6
- metaflow/plugins/argo/argo_workflows.py +2410 -527
- metaflow/plugins/argo/argo_workflows_cli.py +571 -121
- metaflow/plugins/argo/argo_workflows_decorator.py +43 -12
- metaflow/plugins/argo/argo_workflows_deployer.py +106 -0
- metaflow/plugins/argo/argo_workflows_deployer_objects.py +453 -0
- metaflow/plugins/argo/capture_error.py +73 -0
- metaflow/plugins/argo/conditional_input_paths.py +35 -0
- metaflow/plugins/argo/exit_hooks.py +209 -0
- metaflow/plugins/argo/jobset_input_paths.py +15 -0
- metaflow/plugins/argo/param_val.py +19 -0
- metaflow/plugins/aws/aws_client.py +10 -3
- metaflow/plugins/aws/aws_utils.py +55 -2
- metaflow/plugins/aws/batch/batch.py +72 -5
- metaflow/plugins/aws/batch/batch_cli.py +33 -10
- metaflow/plugins/aws/batch/batch_client.py +4 -3
- metaflow/plugins/aws/batch/batch_decorator.py +102 -35
- metaflow/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.py +13 -10
- metaflow/plugins/aws/step_functions/dynamo_db_client.py +0 -3
- metaflow/plugins/aws/step_functions/production_token.py +1 -1
- metaflow/plugins/aws/step_functions/step_functions.py +65 -8
- metaflow/plugins/aws/step_functions/step_functions_cli.py +101 -7
- metaflow/plugins/aws/step_functions/step_functions_decorator.py +1 -2
- metaflow/plugins/aws/step_functions/step_functions_deployer.py +97 -0
- metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +264 -0
- metaflow/plugins/azure/azure_exceptions.py +1 -1
- metaflow/plugins/azure/azure_secret_manager_secrets_provider.py +240 -0
- metaflow/plugins/azure/azure_tail.py +1 -1
- metaflow/plugins/azure/includefile_support.py +2 -0
- metaflow/plugins/cards/card_cli.py +66 -30
- metaflow/plugins/cards/card_creator.py +25 -1
- metaflow/plugins/cards/card_datastore.py +21 -49
- metaflow/plugins/cards/card_decorator.py +132 -8
- metaflow/plugins/cards/card_modules/basic.py +112 -17
- metaflow/plugins/cards/card_modules/bundle.css +1 -1
- metaflow/plugins/cards/card_modules/card.py +16 -1
- metaflow/plugins/cards/card_modules/chevron/renderer.py +1 -1
- metaflow/plugins/cards/card_modules/components.py +665 -28
- metaflow/plugins/cards/card_modules/convert_to_native_type.py +36 -7
- metaflow/plugins/cards/card_modules/json_viewer.py +232 -0
- metaflow/plugins/cards/card_modules/main.css +1 -0
- metaflow/plugins/cards/card_modules/main.js +68 -49
- metaflow/plugins/cards/card_modules/renderer_tools.py +1 -0
- metaflow/plugins/cards/card_modules/test_cards.py +26 -12
- metaflow/plugins/cards/card_server.py +39 -14
- metaflow/plugins/cards/component_serializer.py +2 -9
- metaflow/plugins/cards/metadata.py +22 -0
- metaflow/plugins/catch_decorator.py +9 -0
- metaflow/plugins/datastores/azure_storage.py +10 -1
- metaflow/plugins/datastores/gs_storage.py +6 -2
- metaflow/plugins/datastores/local_storage.py +12 -6
- metaflow/plugins/datastores/spin_storage.py +12 -0
- metaflow/plugins/datatools/local.py +2 -0
- metaflow/plugins/datatools/s3/s3.py +126 -75
- metaflow/plugins/datatools/s3/s3op.py +254 -121
- metaflow/plugins/env_escape/__init__.py +3 -3
- metaflow/plugins/env_escape/client_modules.py +102 -72
- metaflow/plugins/env_escape/server.py +7 -0
- metaflow/plugins/env_escape/stub.py +24 -5
- metaflow/plugins/events_decorator.py +343 -185
- metaflow/plugins/exit_hook/__init__.py +0 -0
- metaflow/plugins/exit_hook/exit_hook_decorator.py +46 -0
- metaflow/plugins/exit_hook/exit_hook_script.py +52 -0
- metaflow/plugins/gcp/__init__.py +1 -1
- metaflow/plugins/gcp/gcp_secret_manager_secrets_provider.py +11 -6
- metaflow/plugins/gcp/gs_tail.py +10 -6
- metaflow/plugins/gcp/includefile_support.py +3 -0
- metaflow/plugins/kubernetes/kube_utils.py +108 -0
- metaflow/plugins/kubernetes/kubernetes.py +411 -130
- metaflow/plugins/kubernetes/kubernetes_cli.py +168 -36
- metaflow/plugins/kubernetes/kubernetes_client.py +104 -2
- metaflow/plugins/kubernetes/kubernetes_decorator.py +246 -88
- metaflow/plugins/kubernetes/kubernetes_job.py +253 -581
- metaflow/plugins/kubernetes/kubernetes_jobsets.py +1071 -0
- metaflow/plugins/kubernetes/spot_metadata_cli.py +69 -0
- metaflow/plugins/kubernetes/spot_monitor_sidecar.py +109 -0
- metaflow/plugins/logs_cli.py +359 -0
- metaflow/plugins/{metadata → metadata_providers}/local.py +144 -84
- metaflow/plugins/{metadata → metadata_providers}/service.py +103 -26
- metaflow/plugins/metadata_providers/spin.py +16 -0
- metaflow/plugins/package_cli.py +36 -24
- metaflow/plugins/parallel_decorator.py +128 -11
- metaflow/plugins/parsers.py +16 -0
- metaflow/plugins/project_decorator.py +51 -5
- metaflow/plugins/pypi/bootstrap.py +357 -105
- metaflow/plugins/pypi/conda_decorator.py +82 -81
- metaflow/plugins/pypi/conda_environment.py +187 -52
- metaflow/plugins/pypi/micromamba.py +157 -47
- metaflow/plugins/pypi/parsers.py +268 -0
- metaflow/plugins/pypi/pip.py +88 -13
- metaflow/plugins/pypi/pypi_decorator.py +37 -1
- metaflow/plugins/pypi/utils.py +48 -2
- metaflow/plugins/resources_decorator.py +2 -2
- metaflow/plugins/secrets/__init__.py +3 -0
- metaflow/plugins/secrets/secrets_decorator.py +26 -181
- metaflow/plugins/secrets/secrets_func.py +49 -0
- metaflow/plugins/secrets/secrets_spec.py +101 -0
- metaflow/plugins/secrets/utils.py +74 -0
- metaflow/plugins/tag_cli.py +4 -7
- metaflow/plugins/test_unbounded_foreach_decorator.py +41 -6
- metaflow/plugins/timeout_decorator.py +3 -3
- metaflow/plugins/uv/__init__.py +0 -0
- metaflow/plugins/uv/bootstrap.py +128 -0
- metaflow/plugins/uv/uv_environment.py +72 -0
- metaflow/procpoll.py +1 -1
- metaflow/pylint_wrapper.py +5 -1
- metaflow/runner/__init__.py +0 -0
- metaflow/runner/click_api.py +717 -0
- metaflow/runner/deployer.py +470 -0
- metaflow/runner/deployer_impl.py +201 -0
- metaflow/runner/metaflow_runner.py +714 -0
- metaflow/runner/nbdeploy.py +132 -0
- metaflow/runner/nbrun.py +225 -0
- metaflow/runner/subprocess_manager.py +650 -0
- metaflow/runner/utils.py +335 -0
- metaflow/runtime.py +1078 -260
- metaflow/sidecar/sidecar_worker.py +1 -1
- metaflow/system/__init__.py +5 -0
- metaflow/system/system_logger.py +85 -0
- metaflow/system/system_monitor.py +108 -0
- metaflow/system/system_utils.py +19 -0
- metaflow/task.py +521 -225
- metaflow/tracing/__init__.py +7 -7
- metaflow/tracing/span_exporter.py +31 -38
- metaflow/tracing/tracing_modules.py +38 -43
- metaflow/tuple_util.py +27 -0
- metaflow/user_configs/__init__.py +0 -0
- metaflow/user_configs/config_options.py +563 -0
- metaflow/user_configs/config_parameters.py +598 -0
- metaflow/user_decorators/__init__.py +0 -0
- metaflow/user_decorators/common.py +144 -0
- metaflow/user_decorators/mutable_flow.py +512 -0
- metaflow/user_decorators/mutable_step.py +424 -0
- metaflow/user_decorators/user_flow_decorator.py +264 -0
- metaflow/user_decorators/user_step_decorator.py +749 -0
- metaflow/util.py +243 -27
- metaflow/vendor.py +23 -7
- metaflow/version.py +1 -1
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/Makefile +355 -0
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/Tiltfile +726 -0
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/pick_services.sh +105 -0
- ob_metaflow-2.19.7.1rc0.dist-info/METADATA +87 -0
- ob_metaflow-2.19.7.1rc0.dist-info/RECORD +445 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/WHEEL +1 -1
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/entry_points.txt +1 -0
- metaflow/_vendor/v3_5/__init__.py +0 -1
- metaflow/_vendor/v3_5/importlib_metadata/__init__.py +0 -644
- metaflow/_vendor/v3_5/importlib_metadata/_compat.py +0 -152
- metaflow/package.py +0 -188
- ob_metaflow-2.11.13.1.dist-info/METADATA +0 -85
- ob_metaflow-2.11.13.1.dist-info/RECORD +0 -308
- /metaflow/_vendor/{v3_5/zipp.py → zipp.py} +0 -0
- /metaflow/{metadata → metadata_provider}/__init__.py +0 -0
- /metaflow/{metadata → metadata_provider}/util.py +0 -0
- /metaflow/plugins/{metadata → metadata_providers}/__init__.py +0 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info/licenses}/LICENSE +0 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/top_level.txt +0 -0
|
@@ -4,12 +4,18 @@ import platform
|
|
|
4
4
|
import re
|
|
5
5
|
import sys
|
|
6
6
|
from hashlib import sha1
|
|
7
|
+
from time import sleep
|
|
7
8
|
|
|
8
|
-
from metaflow import JSONType, current, decorators, parameters
|
|
9
|
+
from metaflow import JSONType, Run, current, decorators, parameters
|
|
9
10
|
from metaflow._vendor import click
|
|
10
|
-
from metaflow.exception import
|
|
11
|
+
from metaflow.exception import (
|
|
12
|
+
MetaflowException,
|
|
13
|
+
MetaflowInternalError,
|
|
14
|
+
MetaflowNotFound,
|
|
15
|
+
)
|
|
11
16
|
from metaflow.metaflow_config import (
|
|
12
17
|
ARGO_WORKFLOWS_UI_URL,
|
|
18
|
+
FEAT_ALWAYS_UPLOAD_CODE_PACKAGE,
|
|
13
19
|
KUBERNETES_NAMESPACE,
|
|
14
20
|
SERVICE_VERSION_CHECK,
|
|
15
21
|
UI_URL,
|
|
@@ -27,9 +33,18 @@ from metaflow.plugins.kubernetes.kubernetes_decorator import KubernetesDecorator
|
|
|
27
33
|
from metaflow.tagging_util import validate_tags
|
|
28
34
|
from metaflow.util import get_username, to_bytes, to_unicode, version_parse
|
|
29
35
|
|
|
30
|
-
from .argo_workflows import ArgoWorkflows
|
|
36
|
+
from .argo_workflows import ArgoWorkflows, ArgoWorkflowsException
|
|
37
|
+
|
|
38
|
+
NEW_ARGO_NAMELENGTH_METAFLOW_VERSION = "2.17"
|
|
39
|
+
|
|
40
|
+
VALID_NAME = re.compile(r"^[a-z]([a-z0-9\.\-]*[a-z0-9])?$")
|
|
31
41
|
|
|
32
|
-
|
|
42
|
+
unsupported_decorators = {
|
|
43
|
+
"snowpark": "Step *%s* is marked for execution on Snowpark with Argo Workflows which isn't currently supported.",
|
|
44
|
+
"slurm": "Step *%s* is marked for execution on Slurm with Argo Workflows which isn't currently supported.",
|
|
45
|
+
"nvidia": "Step *%s* is marked for execution on Nvidia with Argo Workflows which isn't currently supported.",
|
|
46
|
+
"nvct": "Step *%s* is marked for execution on Nvct with Argo Workflows which isn't currently supported.",
|
|
47
|
+
}
|
|
33
48
|
|
|
34
49
|
|
|
35
50
|
class IncorrectProductionToken(MetaflowException):
|
|
@@ -73,7 +88,16 @@ def argo_workflows(obj, name=None):
|
|
|
73
88
|
obj.workflow_name,
|
|
74
89
|
obj.token_prefix,
|
|
75
90
|
obj.is_project,
|
|
76
|
-
|
|
91
|
+
obj._is_workflow_name_modified,
|
|
92
|
+
obj._exception_on_create, # exception_on_create is used to prevent deploying new flows with too long names via --name
|
|
93
|
+
) = resolve_workflow_name_v2(obj, name)
|
|
94
|
+
# Backward compatibility for Metaflow versions <=2.16 because of
|
|
95
|
+
# change in name length restrictions in Argo Workflows from 253 to 52
|
|
96
|
+
# characters.
|
|
97
|
+
(
|
|
98
|
+
obj._v1_workflow_name,
|
|
99
|
+
obj._v1_is_workflow_name_modified,
|
|
100
|
+
) = resolve_workflow_name_v1(obj, name)
|
|
77
101
|
|
|
78
102
|
|
|
79
103
|
@argo_workflows.command(help="Deploy a new version of this workflow to Argo Workflows.")
|
|
@@ -119,6 +143,7 @@ def argo_workflows(obj, name=None):
|
|
|
119
143
|
is_flag=True,
|
|
120
144
|
default=False,
|
|
121
145
|
help="Only print out JSON sent to Argo Workflows. Do not deploy anything.",
|
|
146
|
+
hidden=True,
|
|
122
147
|
)
|
|
123
148
|
@click.option(
|
|
124
149
|
"--max-workers",
|
|
@@ -157,14 +182,63 @@ def argo_workflows(obj, name=None):
|
|
|
157
182
|
)
|
|
158
183
|
@click.option(
|
|
159
184
|
"--notify-slack-webhook-url",
|
|
160
|
-
default=
|
|
185
|
+
default=None,
|
|
161
186
|
help="Slack incoming webhook url for workflow success/failure notifications.",
|
|
162
187
|
)
|
|
163
188
|
@click.option(
|
|
164
189
|
"--notify-pager-duty-integration-key",
|
|
165
|
-
default=
|
|
190
|
+
default=None,
|
|
166
191
|
help="PagerDuty Events API V2 Integration key for workflow success/failure notifications.",
|
|
167
192
|
)
|
|
193
|
+
@click.option(
|
|
194
|
+
"--notify-incident-io-api-key",
|
|
195
|
+
default=None,
|
|
196
|
+
help="Incident.io API V2 key for workflow success/failure notifications.",
|
|
197
|
+
)
|
|
198
|
+
@click.option(
|
|
199
|
+
"--incident-io-alert-source-config-id",
|
|
200
|
+
default=None,
|
|
201
|
+
help="Incident.io Alert source config ID. Example '01GW2G3V0S59R238FAHPDS1R66'",
|
|
202
|
+
)
|
|
203
|
+
@click.option(
|
|
204
|
+
"--incident-io-metadata",
|
|
205
|
+
default=None,
|
|
206
|
+
type=str,
|
|
207
|
+
multiple=True,
|
|
208
|
+
help="Incident.io Alert Custom Metadata field in the form of Key=Value",
|
|
209
|
+
)
|
|
210
|
+
@click.option(
|
|
211
|
+
"--enable-heartbeat-daemon/--no-enable-heartbeat-daemon",
|
|
212
|
+
default=False,
|
|
213
|
+
show_default=True,
|
|
214
|
+
help="Use a daemon container to broadcast heartbeats.",
|
|
215
|
+
)
|
|
216
|
+
@click.option(
|
|
217
|
+
"--deployer-attribute-file",
|
|
218
|
+
default=None,
|
|
219
|
+
show_default=True,
|
|
220
|
+
type=str,
|
|
221
|
+
help="Write the workflow name to the file specified. Used internally for Metaflow's Deployer API.",
|
|
222
|
+
hidden=True,
|
|
223
|
+
)
|
|
224
|
+
@click.option(
|
|
225
|
+
"--enable-error-msg-capture/--no-enable-error-msg-capture",
|
|
226
|
+
default=True,
|
|
227
|
+
show_default=True,
|
|
228
|
+
help="Capture stack trace of first failed task in exit hook.",
|
|
229
|
+
)
|
|
230
|
+
@click.option(
|
|
231
|
+
"--workflow-title",
|
|
232
|
+
default=None,
|
|
233
|
+
type=str,
|
|
234
|
+
help="Custom title for the workflow displayed in Argo Workflows UI. Defaults to `project_flow_name`. Supports markdown formatting.",
|
|
235
|
+
)
|
|
236
|
+
@click.option(
|
|
237
|
+
"--workflow-description",
|
|
238
|
+
default=None,
|
|
239
|
+
type=str,
|
|
240
|
+
help="Custom description for the workflow displayed in Argo Workflows UI. Defaults to the flow's docstring if available. Supports markdown formatting and multi-line text.",
|
|
241
|
+
)
|
|
168
242
|
@click.pass_obj
|
|
169
243
|
def create(
|
|
170
244
|
obj,
|
|
@@ -182,10 +256,39 @@ def create(
|
|
|
182
256
|
notify_on_success=False,
|
|
183
257
|
notify_slack_webhook_url=None,
|
|
184
258
|
notify_pager_duty_integration_key=None,
|
|
259
|
+
notify_incident_io_api_key=None,
|
|
260
|
+
incident_io_alert_source_config_id=None,
|
|
261
|
+
incident_io_metadata=None,
|
|
262
|
+
enable_heartbeat_daemon=True,
|
|
263
|
+
workflow_title=None,
|
|
264
|
+
workflow_description=None,
|
|
265
|
+
deployer_attribute_file=None,
|
|
266
|
+
enable_error_msg_capture=False,
|
|
185
267
|
):
|
|
268
|
+
# check if we are supposed to block deploying the flow due to name length constraints.
|
|
269
|
+
if obj._exception_on_create is not None:
|
|
270
|
+
raise obj._exception_on_create
|
|
271
|
+
|
|
272
|
+
# TODO: Remove this once we have a proper validator system in place
|
|
273
|
+
for node in obj.graph:
|
|
274
|
+
for decorator, error_message in unsupported_decorators.items():
|
|
275
|
+
if any([d.name == decorator for d in node.decorators]):
|
|
276
|
+
raise MetaflowException(error_message % node.name)
|
|
277
|
+
|
|
186
278
|
validate_tags(tags)
|
|
187
279
|
|
|
188
|
-
|
|
280
|
+
if deployer_attribute_file:
|
|
281
|
+
with open(deployer_attribute_file, "w", encoding="utf-8") as f:
|
|
282
|
+
json.dump(
|
|
283
|
+
{
|
|
284
|
+
"name": obj.workflow_name,
|
|
285
|
+
"flow_name": obj.flow.name,
|
|
286
|
+
"metadata": obj.metadata.metadata_str(),
|
|
287
|
+
},
|
|
288
|
+
f,
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
obj.echo("Deploying *%s* to Argo Workflows..." % obj.flow.name, bold=True)
|
|
189
292
|
|
|
190
293
|
if SERVICE_VERSION_CHECK:
|
|
191
294
|
# TODO: Consider dispelling with this check since it's been 2 years since the
|
|
@@ -218,6 +321,13 @@ def create(
|
|
|
218
321
|
notify_on_success,
|
|
219
322
|
notify_slack_webhook_url,
|
|
220
323
|
notify_pager_duty_integration_key,
|
|
324
|
+
notify_incident_io_api_key,
|
|
325
|
+
incident_io_alert_source_config_id,
|
|
326
|
+
incident_io_metadata,
|
|
327
|
+
enable_heartbeat_daemon,
|
|
328
|
+
enable_error_msg_capture,
|
|
329
|
+
workflow_title,
|
|
330
|
+
workflow_description,
|
|
221
331
|
)
|
|
222
332
|
|
|
223
333
|
if only_json:
|
|
@@ -227,7 +337,7 @@ def create(
|
|
|
227
337
|
flow.deploy()
|
|
228
338
|
obj.echo(
|
|
229
339
|
"Workflow *{workflow_name}* "
|
|
230
|
-
"for flow *{name}*
|
|
340
|
+
"for flow *{name}* deployed to "
|
|
231
341
|
"Argo Workflows successfully.\n".format(
|
|
232
342
|
workflow_name=obj.workflow_name, name=current.flow_name
|
|
233
343
|
),
|
|
@@ -236,8 +346,40 @@ def create(
|
|
|
236
346
|
if obj._is_workflow_name_modified:
|
|
237
347
|
obj.echo(
|
|
238
348
|
"Note that the flow was deployed with a modified name "
|
|
239
|
-
"due to Kubernetes naming conventions
|
|
240
|
-
"original flow name is stored in the workflow
|
|
349
|
+
"due to Kubernetes naming conventions on Argo Workflows. The "
|
|
350
|
+
"original flow name is stored in the workflow annotations.\n",
|
|
351
|
+
wrap=True,
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
if obj.workflow_name != obj._v1_workflow_name:
|
|
355
|
+
# Delete the old workflow if it exists
|
|
356
|
+
try:
|
|
357
|
+
ArgoWorkflows.delete(obj._v1_workflow_name)
|
|
358
|
+
obj.echo("Important!", bold=True, nl=False)
|
|
359
|
+
obj.echo(
|
|
360
|
+
" To comply with new naming restrictions on Argo "
|
|
361
|
+
"Workflows, this deployment replaced the previously "
|
|
362
|
+
"deployed workflow {v1_workflow_name}.\n".format(
|
|
363
|
+
v1_workflow_name=obj._v1_workflow_name
|
|
364
|
+
),
|
|
365
|
+
wrap=True,
|
|
366
|
+
)
|
|
367
|
+
except ArgoWorkflowsException as e:
|
|
368
|
+
# TODO: Catch a more specific exception
|
|
369
|
+
pass
|
|
370
|
+
|
|
371
|
+
obj.echo("Warning! ", bold=True, nl=False)
|
|
372
|
+
obj.echo(
|
|
373
|
+
"Due to new naming restrictions on Argo Workflows, "
|
|
374
|
+
"re-deploying this flow with older versions of Metaflow (<{version}) "
|
|
375
|
+
"will result in the flow being deployed with a different name -\n"
|
|
376
|
+
"*{v1_workflow_name}* without replacing the version you just deployed. "
|
|
377
|
+
"This may result in duplicate executions of this flow. To avoid this issue, "
|
|
378
|
+
"always deploy this flow using Metaflow ≥{version} or specify the flow name with --name.".format(
|
|
379
|
+
v1_workflow_name=obj._v1_workflow_name,
|
|
380
|
+
version=NEW_ARGO_NAMELENGTH_METAFLOW_VERSION,
|
|
381
|
+
),
|
|
382
|
+
wrap=True,
|
|
241
383
|
)
|
|
242
384
|
|
|
243
385
|
if ARGO_WORKFLOWS_UI_URL:
|
|
@@ -267,20 +409,20 @@ def create(
|
|
|
267
409
|
|
|
268
410
|
|
|
269
411
|
def check_python_version(obj):
|
|
270
|
-
# argo-workflows integration for Metaflow isn't supported for Py versions below 3.
|
|
412
|
+
# argo-workflows integration for Metaflow isn't supported for Py versions below 3.6.
|
|
271
413
|
# This constraint can very well be lifted if desired.
|
|
272
|
-
if sys.version_info < (3,
|
|
414
|
+
if sys.version_info < (3, 6):
|
|
273
415
|
obj.echo("")
|
|
274
416
|
obj.echo(
|
|
275
417
|
"Metaflow doesn't support Argo Workflows for Python %s right now."
|
|
276
418
|
% platform.python_version()
|
|
277
419
|
)
|
|
278
420
|
obj.echo(
|
|
279
|
-
"Please upgrade your Python interpreter to version 3.
|
|
421
|
+
"Please upgrade your Python interpreter to version 3.6 (or higher) or "
|
|
280
422
|
"reach out to us at slack.outerbounds.co for more help."
|
|
281
423
|
)
|
|
282
424
|
raise UnsupportedPythonVersion(
|
|
283
|
-
"Try again with a more recent version of Python (>=3.
|
|
425
|
+
"Try again with a more recent version of Python (>=3.6)."
|
|
284
426
|
)
|
|
285
427
|
|
|
286
428
|
|
|
@@ -317,9 +459,108 @@ def check_metadata_service_version(obj):
|
|
|
317
459
|
)
|
|
318
460
|
|
|
319
461
|
|
|
320
|
-
|
|
462
|
+
# Argo Workflows has a few restrictions on workflow names:
|
|
463
|
+
# - Argo Workflow Template names can't be longer than 253 characters since
|
|
464
|
+
# they follow DNS Subdomain name restrictions.
|
|
465
|
+
# - Argo Workflows stores workflow template names as a label in the workflow
|
|
466
|
+
# template metadata - workflows.argoproj.io/workflow-template, which follows
|
|
467
|
+
# RFC 1123, which is a strict subset of DNS Subdomain names and allows for
|
|
468
|
+
# 63 characters.
|
|
469
|
+
# - Argo Workflows appends a unix timestamp to the workflow name when the workflow
|
|
470
|
+
# is created (-1243856725) from a workflow template deployed as a cron workflow template
|
|
471
|
+
# reducing the number of characters available to 52.
|
|
472
|
+
# - TODO: Check naming restrictions for Argo Events.
|
|
473
|
+
|
|
474
|
+
# In summary -
|
|
475
|
+
# - We truncate the workflow name to 45 characters to leave enough room for future
|
|
476
|
+
# enhancements to the Argo Workflows integration.
|
|
477
|
+
# - We remove any underscores since Argo Workflows doesn't allow them.
|
|
478
|
+
# - We convert the name to lower case.
|
|
479
|
+
# - We remove + and @ as not allowed characters, which can be part of the
|
|
480
|
+
# project branch due to using email addresses as user names.
|
|
481
|
+
# - We append a hash of the workflow name to the end to make it unique.
|
|
482
|
+
|
|
483
|
+
# A complication here is that in previous versions of Metaflow (=<2.16), the limit was a
|
|
484
|
+
# rather lax 253 characters - so we have two issues to contend with:
|
|
485
|
+
# 1. Replacing any equivalent flows deployed using previous versions of Metaflow which
|
|
486
|
+
# adds a bit of complexity to the business logic.
|
|
487
|
+
# 2. Breaking Metaflow users who have multiple versions of Metaflow floating in their
|
|
488
|
+
# organization. Imagine a scenario, where metaflow-v1 (253 chars) deploys the same
|
|
489
|
+
# flow which was previously deployed using the new metaflow-v2 (45 chars) - the user
|
|
490
|
+
# will end up with two workflows templates instead of one since metaflow-v1 has no
|
|
491
|
+
# awareness of the new name truncation logic introduced by metaflow-v2. Unfortunately,
|
|
492
|
+
# there is no way to avoid this scenario - so we will do our best to message to the
|
|
493
|
+
# user to not use an older version of Metaflow to redeploy affected flows.
|
|
494
|
+
# ------------------------------------------------------------------------------------------
|
|
495
|
+
# | metaflow-v1 (253 chars) | metaflow-v2 (45 chars) | Result |
|
|
496
|
+
# ------------------------------------------------------------------------------------------
|
|
497
|
+
# | workflow_name_modified = True | workflow_name_modified = False | Not possible |
|
|
498
|
+
# ------------------------------------------------------------------------------------------
|
|
499
|
+
# | workflow_name_modified = False | workflow_name_modified = True | Messaging needed |
|
|
500
|
+
# ------------------------------------------------------------------------------------------
|
|
501
|
+
# | workflow_name_modified = False | workflow_name_modified = False | No message needed |
|
|
502
|
+
# ------------------------------------------------------------------------------------------
|
|
503
|
+
# | workflow_name_modified = True | workflow_name_modified = True | Messaging needed |
|
|
504
|
+
# ------------------------------------------------------------------------------------------
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
def resolve_workflow_name_v1(obj, name):
|
|
508
|
+
# models the workflow_name calculation logic in Metaflow versions =<2.16
|
|
509
|
+
# important!! - should stay static including any future bugs
|
|
510
|
+
project = current.get("project_name")
|
|
511
|
+
is_workflow_name_modified = False
|
|
512
|
+
if project:
|
|
513
|
+
if name:
|
|
514
|
+
return None, False # not possible in versions =<2.16
|
|
515
|
+
workflow_name = current.project_flow_name
|
|
516
|
+
if len(workflow_name) > 253:
|
|
517
|
+
name_hash = to_unicode(
|
|
518
|
+
base64.b32encode(sha1(to_bytes(workflow_name)).digest())
|
|
519
|
+
)[:8].lower()
|
|
520
|
+
workflow_name = "%s-%s" % (workflow_name[:242], name_hash)
|
|
521
|
+
is_workflow_name_modified = True
|
|
522
|
+
if not VALID_NAME.search(workflow_name):
|
|
523
|
+
workflow_name = (
|
|
524
|
+
re.compile(r"^[^A-Za-z0-9]+")
|
|
525
|
+
.sub("", workflow_name)
|
|
526
|
+
.replace("_", "")
|
|
527
|
+
.replace("@", "")
|
|
528
|
+
.replace("+", "")
|
|
529
|
+
.lower()
|
|
530
|
+
)
|
|
531
|
+
is_workflow_name_modified = True
|
|
532
|
+
else:
|
|
533
|
+
if name and not VALID_NAME.search(name):
|
|
534
|
+
return None, False # not possible in versions =<2.16
|
|
535
|
+
workflow_name = name if name else current.flow_name
|
|
536
|
+
if len(workflow_name) > 253:
|
|
537
|
+
return None, False # not possible in versions =<2.16
|
|
538
|
+
if not VALID_NAME.search(workflow_name):
|
|
539
|
+
# Note - since the original name sanitization was a surjective
|
|
540
|
+
# mapping, using it here is a bug, but we leave this in
|
|
541
|
+
# place since the usage of v1_workflow_name is to generate
|
|
542
|
+
# historical workflow names, so we need to replicate all
|
|
543
|
+
# the bugs too :'(
|
|
544
|
+
|
|
545
|
+
workflow_name = (
|
|
546
|
+
re.compile(r"^[^A-Za-z0-9]+")
|
|
547
|
+
.sub("", workflow_name)
|
|
548
|
+
.replace("_", "")
|
|
549
|
+
.replace("@", "")
|
|
550
|
+
.replace("+", "")
|
|
551
|
+
.lower()
|
|
552
|
+
)
|
|
553
|
+
is_workflow_name_modified = True
|
|
554
|
+
return workflow_name, is_workflow_name_modified
|
|
555
|
+
|
|
556
|
+
|
|
557
|
+
def resolve_workflow_name_v2(obj, name):
|
|
558
|
+
# current logic for imputing workflow_name
|
|
559
|
+
limit = 45
|
|
321
560
|
project = current.get("project_name")
|
|
322
|
-
|
|
561
|
+
is_workflow_name_modified = False
|
|
562
|
+
exception_on_create = None
|
|
563
|
+
|
|
323
564
|
if project:
|
|
324
565
|
if name:
|
|
325
566
|
raise MetaflowException(
|
|
@@ -332,48 +573,86 @@ def resolve_workflow_name(obj, name):
|
|
|
332
573
|
% to_unicode(base64.b32encode(sha1(project_branch).digest()))[:16]
|
|
333
574
|
)
|
|
334
575
|
is_project = True
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
# Workflows doesn't (DNS Subdomain names as defined in RFC 1123) - so we will
|
|
338
|
-
# remove any underscores as well as convert the name to lower case.
|
|
339
|
-
# Also remove + and @ as not allowed characters, which can be part of the
|
|
340
|
-
# project branch due to using email addresses as user names.
|
|
341
|
-
if len(workflow_name) > 253:
|
|
576
|
+
|
|
577
|
+
if len(workflow_name) > limit:
|
|
342
578
|
name_hash = to_unicode(
|
|
343
579
|
base64.b32encode(sha1(to_bytes(workflow_name)).digest())
|
|
344
|
-
)[:
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
580
|
+
)[:5].lower()
|
|
581
|
+
|
|
582
|
+
# Generate a meaningful short name
|
|
583
|
+
project_name = project
|
|
584
|
+
branch_name = current.branch_name
|
|
585
|
+
flow_name = current.flow_name
|
|
586
|
+
parts = [project_name, branch_name, flow_name]
|
|
587
|
+
max_name_len = limit - 6
|
|
588
|
+
min_each = 7
|
|
589
|
+
total_len = sum(len(p) for p in parts)
|
|
590
|
+
remaining = max_name_len - 3 * min_each
|
|
591
|
+
extras = [int(remaining * len(p) / total_len) for p in parts]
|
|
592
|
+
while sum(extras) < remaining:
|
|
593
|
+
extras[extras.index(min(extras))] += 1
|
|
594
|
+
budgets = [min_each + e for e in extras]
|
|
595
|
+
proj_budget = budgets[0]
|
|
596
|
+
if len(project_name) <= proj_budget:
|
|
597
|
+
proj_str = project_name
|
|
598
|
+
else:
|
|
599
|
+
h = proj_budget // 2
|
|
600
|
+
t = proj_budget - h
|
|
601
|
+
proj_str = project_name[:h] + project_name[-t:]
|
|
602
|
+
branch_budget = budgets[1]
|
|
603
|
+
branch_str = branch_name[:branch_budget]
|
|
604
|
+
flow_budget = budgets[2]
|
|
605
|
+
if len(flow_name) <= flow_budget:
|
|
606
|
+
flow_str = flow_name
|
|
607
|
+
else:
|
|
608
|
+
h = flow_budget // 2
|
|
609
|
+
t = flow_budget - h
|
|
610
|
+
flow_str = flow_name[:h] + flow_name[-t:]
|
|
611
|
+
descriptive_name = sanitize_for_argo(
|
|
612
|
+
"%s.%s.%s" % (proj_str, branch_str, flow_str)
|
|
613
|
+
)
|
|
614
|
+
workflow_name = "%s-%s" % (descriptive_name, name_hash)
|
|
615
|
+
is_workflow_name_modified = True
|
|
350
616
|
else:
|
|
351
617
|
if name and not VALID_NAME.search(name):
|
|
352
618
|
raise MetaflowException(
|
|
353
619
|
"Name '%s' contains invalid characters. The "
|
|
354
620
|
"name must consist of lower case alphanumeric characters, '-' or '.'"
|
|
355
|
-
", and must start
|
|
621
|
+
", and must start with an alphabetic character, "
|
|
622
|
+
"and end with an alphanumeric character." % name
|
|
356
623
|
)
|
|
357
|
-
|
|
358
624
|
workflow_name = name if name else current.flow_name
|
|
359
625
|
token_prefix = workflow_name
|
|
360
626
|
is_project = False
|
|
361
627
|
|
|
362
|
-
if len(workflow_name) >
|
|
363
|
-
|
|
364
|
-
|
|
628
|
+
if len(workflow_name) > limit:
|
|
629
|
+
# NOTE: We could have opted for truncating names specified by --name and flow_name
|
|
630
|
+
# as well, but chose to error instead due to the expectation that users would
|
|
631
|
+
# be intentionally explicit in their naming, and truncating these would lose
|
|
632
|
+
# information they intended to encode in the deployment.
|
|
633
|
+
exception_on_create = ArgoWorkflowsNameTooLong(
|
|
634
|
+
"The full name of the workflow:\n*%s*\nis longer than %s "
|
|
365
635
|
"characters.\n\n"
|
|
366
636
|
"To deploy this workflow to Argo Workflows, please "
|
|
367
637
|
"assign a shorter name\nusing the option\n"
|
|
368
|
-
"*argo-workflows --name <name> create*." %
|
|
638
|
+
"*argo-workflows --name <name> create*." % (name, limit)
|
|
369
639
|
)
|
|
370
|
-
raise ArgoWorkflowsNameTooLong(msg)
|
|
371
640
|
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
641
|
+
if not VALID_NAME.search(workflow_name):
|
|
642
|
+
# NOTE: Even though sanitize_for_argo is surjective which can result in collisions,
|
|
643
|
+
# we still use it here since production tokens guard against name collisions
|
|
644
|
+
# and if we made it injective, metaflow 2.17 will result in every deployed
|
|
645
|
+
# flow's name changing, significantly increasing the blast radius of the change.
|
|
646
|
+
workflow_name = sanitize_for_argo(workflow_name)
|
|
647
|
+
is_workflow_name_modified = True
|
|
375
648
|
|
|
376
|
-
return
|
|
649
|
+
return (
|
|
650
|
+
workflow_name,
|
|
651
|
+
token_prefix.lower(),
|
|
652
|
+
is_project,
|
|
653
|
+
is_workflow_name_modified,
|
|
654
|
+
exception_on_create,
|
|
655
|
+
)
|
|
377
656
|
|
|
378
657
|
|
|
379
658
|
def make_flow(
|
|
@@ -390,6 +669,13 @@ def make_flow(
|
|
|
390
669
|
notify_on_success,
|
|
391
670
|
notify_slack_webhook_url,
|
|
392
671
|
notify_pager_duty_integration_key,
|
|
672
|
+
notify_incident_io_api_key,
|
|
673
|
+
incident_io_alert_source_config_id,
|
|
674
|
+
incident_io_metadata,
|
|
675
|
+
enable_heartbeat_daemon,
|
|
676
|
+
enable_error_msg_capture,
|
|
677
|
+
workflow_title,
|
|
678
|
+
workflow_description,
|
|
393
679
|
):
|
|
394
680
|
# TODO: Make this check less specific to Amazon S3 as we introduce
|
|
395
681
|
# support for more cloud object stores.
|
|
@@ -399,15 +685,27 @@ def make_flow(
|
|
|
399
685
|
)
|
|
400
686
|
|
|
401
687
|
if (notify_on_error or notify_on_success) and not (
|
|
402
|
-
notify_slack_webhook_url
|
|
688
|
+
notify_slack_webhook_url
|
|
689
|
+
or notify_pager_duty_integration_key
|
|
690
|
+
or notify_incident_io_api_key
|
|
403
691
|
):
|
|
404
692
|
raise MetaflowException(
|
|
405
|
-
"Notifications require specifying an incoming Slack webhook url via --notify-slack-webhook-url or
|
|
406
|
-
"
|
|
407
|
-
"notifications for your Slack workspace, follow the instructions at "
|
|
408
|
-
"https://api.slack.com/messaging/webhooks to generate a webhook url.\n
|
|
409
|
-
"generate an integration key by following the instructions at "
|
|
410
|
-
"https://support.pagerduty.com/docs/services-and-integrations#create-a-generic-events-api-integration"
|
|
693
|
+
"Notifications require specifying an incoming Slack webhook url via --notify-slack-webhook-url, PagerDuty events v2 integration key via --notify-pager-duty-integration-key or\n"
|
|
694
|
+
"Incident.io integration API key via --notify-incident-io-api-key.\n"
|
|
695
|
+
" If you would like to set up notifications for your Slack workspace, follow the instructions at "
|
|
696
|
+
"https://api.slack.com/messaging/webhooks to generate a webhook url.\n"
|
|
697
|
+
" For notifications through PagerDuty, generate an integration key by following the instructions at "
|
|
698
|
+
"https://support.pagerduty.com/docs/services-and-integrations#create-a-generic-events-api-integration\n"
|
|
699
|
+
" For notifications through Incident.io, generate an alert source config."
|
|
700
|
+
)
|
|
701
|
+
|
|
702
|
+
if (
|
|
703
|
+
(notify_on_error or notify_on_success)
|
|
704
|
+
and notify_incident_io_api_key
|
|
705
|
+
and incident_io_alert_source_config_id is None
|
|
706
|
+
):
|
|
707
|
+
raise MetaflowException(
|
|
708
|
+
"Incident.io alerts require an alert source configuration ID. Please set one with --incident-io-alert-source-config-id"
|
|
411
709
|
)
|
|
412
710
|
|
|
413
711
|
# Attach @kubernetes and @environment decorator to the flow to
|
|
@@ -415,24 +713,37 @@ def make_flow(
|
|
|
415
713
|
decorators._attach_decorators(
|
|
416
714
|
obj.flow, [KubernetesDecorator.name, EnvironmentDecorator.name]
|
|
417
715
|
)
|
|
716
|
+
decorators._init(obj.flow)
|
|
418
717
|
|
|
419
718
|
decorators._init_step_decorators(
|
|
420
719
|
obj.flow, obj.graph, obj.environment, obj.flow_datastore, obj.logger
|
|
421
720
|
)
|
|
721
|
+
obj.graph = obj.flow._graph
|
|
422
722
|
|
|
423
723
|
# Save the code package in the flow datastore so that both user code and
|
|
424
724
|
# metaflow package can be retrieved during workflow execution.
|
|
425
725
|
obj.package = MetaflowPackage(
|
|
426
|
-
obj.flow,
|
|
726
|
+
obj.flow,
|
|
727
|
+
obj.environment,
|
|
728
|
+
obj.echo,
|
|
729
|
+
suffixes=obj.package_suffixes,
|
|
730
|
+
flow_datastore=obj.flow_datastore if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE else None,
|
|
427
731
|
)
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
732
|
+
|
|
733
|
+
# This blocks until the package is created
|
|
734
|
+
if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE:
|
|
735
|
+
package_url = obj.package.package_url()
|
|
736
|
+
package_sha = obj.package.package_sha()
|
|
737
|
+
else:
|
|
738
|
+
package_url, package_sha = obj.flow_datastore.save_data(
|
|
739
|
+
[obj.package.blob], len_hint=1
|
|
740
|
+
)[0]
|
|
431
741
|
|
|
432
742
|
return ArgoWorkflows(
|
|
433
743
|
name,
|
|
434
744
|
obj.graph,
|
|
435
745
|
obj.flow,
|
|
746
|
+
obj.package.package_metadata,
|
|
436
747
|
package_sha,
|
|
437
748
|
package_url,
|
|
438
749
|
token,
|
|
@@ -452,6 +763,13 @@ def make_flow(
|
|
|
452
763
|
notify_on_success=notify_on_success,
|
|
453
764
|
notify_slack_webhook_url=notify_slack_webhook_url,
|
|
454
765
|
notify_pager_duty_integration_key=notify_pager_duty_integration_key,
|
|
766
|
+
notify_incident_io_api_key=notify_incident_io_api_key,
|
|
767
|
+
incident_io_alert_source_config_id=incident_io_alert_source_config_id,
|
|
768
|
+
incident_io_metadata=incident_io_metadata,
|
|
769
|
+
enable_heartbeat_daemon=enable_heartbeat_daemon,
|
|
770
|
+
enable_error_msg_capture=enable_error_msg_capture,
|
|
771
|
+
workflow_title=workflow_title,
|
|
772
|
+
workflow_description=workflow_description,
|
|
455
773
|
)
|
|
456
774
|
|
|
457
775
|
|
|
@@ -563,8 +881,16 @@ def resolve_token(
|
|
|
563
881
|
type=str,
|
|
564
882
|
help="Write the ID of this run to the file specified.",
|
|
565
883
|
)
|
|
884
|
+
@click.option(
|
|
885
|
+
"--deployer-attribute-file",
|
|
886
|
+
default=None,
|
|
887
|
+
show_default=True,
|
|
888
|
+
type=str,
|
|
889
|
+
help="Write the metadata and pathspec of this run to the file specified.\nUsed internally for Metaflow's Deployer API.",
|
|
890
|
+
hidden=True,
|
|
891
|
+
)
|
|
566
892
|
@click.pass_obj
|
|
567
|
-
def trigger(obj, run_id_file=None, **kwargs):
|
|
893
|
+
def trigger(obj, run_id_file=None, deployer_attribute_file=None, **kwargs):
|
|
568
894
|
def _convert_value(param):
|
|
569
895
|
# Swap `-` with `_` in parameter name to match click's behavior
|
|
570
896
|
val = kwargs.get(param.name.replace("-", "_").lower())
|
|
@@ -580,16 +906,48 @@ def trigger(obj, run_id_file=None, **kwargs):
|
|
|
580
906
|
if kwargs.get(param.name.replace("-", "_").lower()) is not None
|
|
581
907
|
}
|
|
582
908
|
|
|
583
|
-
|
|
909
|
+
workflow_name_to_deploy = obj.workflow_name
|
|
910
|
+
# For users that upgraded the client but did not redeploy their flow,
|
|
911
|
+
# we fallback to old workflow names in case of a conflict.
|
|
912
|
+
if obj.workflow_name != obj._v1_workflow_name:
|
|
913
|
+
# use the old name only if there exists a deployment.
|
|
914
|
+
if ArgoWorkflows.get_existing_deployment(obj._v1_workflow_name):
|
|
915
|
+
obj.echo("Warning! ", bold=True, nl=False)
|
|
916
|
+
obj.echo(
|
|
917
|
+
"Found a deployment of this flow with an old style name, defaulted to triggering *%s*."
|
|
918
|
+
% obj._v1_workflow_name,
|
|
919
|
+
wrap=True,
|
|
920
|
+
)
|
|
921
|
+
obj.echo(
|
|
922
|
+
"Due to new naming restrictions on Argo Workflows, "
|
|
923
|
+
"this flow will have a shorter name with newer versions of Metaflow (>=%s) "
|
|
924
|
+
"which will allow it to be triggered through Argo UI as well. "
|
|
925
|
+
% NEW_ARGO_NAMELENGTH_METAFLOW_VERSION,
|
|
926
|
+
wrap=True,
|
|
927
|
+
)
|
|
928
|
+
obj.echo("re-deploy your flow in order to get rid of this message.")
|
|
929
|
+
workflow_name_to_deploy = obj._v1_workflow_name
|
|
930
|
+
response = ArgoWorkflows.trigger(workflow_name_to_deploy, params)
|
|
584
931
|
run_id = "argo-" + response["metadata"]["name"]
|
|
585
932
|
|
|
586
933
|
if run_id_file:
|
|
587
934
|
with open(run_id_file, "w") as f:
|
|
588
935
|
f.write(str(run_id))
|
|
589
936
|
|
|
937
|
+
if deployer_attribute_file:
|
|
938
|
+
with open(deployer_attribute_file, "w") as f:
|
|
939
|
+
json.dump(
|
|
940
|
+
{
|
|
941
|
+
"name": workflow_name_to_deploy,
|
|
942
|
+
"metadata": obj.metadata.metadata_str(),
|
|
943
|
+
"pathspec": "/".join((obj.flow.name, run_id)),
|
|
944
|
+
},
|
|
945
|
+
f,
|
|
946
|
+
)
|
|
947
|
+
|
|
590
948
|
obj.echo(
|
|
591
949
|
"Workflow *{name}* triggered on Argo Workflows "
|
|
592
|
-
"(run-id *{run_id}*).".format(name=
|
|
950
|
+
"(run-id *{run_id}*).".format(name=workflow_name_to_deploy, run_id=run_id),
|
|
593
951
|
bold=True,
|
|
594
952
|
)
|
|
595
953
|
|
|
@@ -631,26 +989,57 @@ def delete(obj, authorize=None):
|
|
|
631
989
|
"about production tokens."
|
|
632
990
|
)
|
|
633
991
|
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
)
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
992
|
+
# Cases and expected behaviours:
|
|
993
|
+
# old name exists, new name does not exist -> delete old and do not fail on missing new
|
|
994
|
+
# old name exists, new name exists -> delete both
|
|
995
|
+
# old name does not exist, new name exists -> only try to delete new
|
|
996
|
+
# old name does not exist, new name does not exist -> keep previous behaviour where missing deployment raises error for the new name.
|
|
997
|
+
def _delete(workflow_name):
|
|
998
|
+
validate_token(workflow_name, obj.token_prefix, authorize, _token_instructions)
|
|
999
|
+
obj.echo("Deleting workflow *{name}*...".format(name=workflow_name), bold=True)
|
|
1000
|
+
|
|
1001
|
+
schedule_deleted, sensor_deleted, workflow_deleted = ArgoWorkflows.delete(
|
|
1002
|
+
workflow_name
|
|
645
1003
|
)
|
|
646
1004
|
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
1005
|
+
if schedule_deleted:
|
|
1006
|
+
obj.echo(
|
|
1007
|
+
"Deleting cronworkflow *{name}*...".format(name=workflow_name),
|
|
1008
|
+
bold=True,
|
|
1009
|
+
)
|
|
652
1010
|
|
|
653
|
-
|
|
1011
|
+
if sensor_deleted:
|
|
1012
|
+
obj.echo(
|
|
1013
|
+
"Deleting sensor *{name}*...".format(name=workflow_name),
|
|
1014
|
+
bold=True,
|
|
1015
|
+
)
|
|
1016
|
+
return workflow_deleted
|
|
1017
|
+
|
|
1018
|
+
workflows_deleted = False
|
|
1019
|
+
cleanup_old_name = False
|
|
1020
|
+
if obj.workflow_name != obj._v1_workflow_name:
|
|
1021
|
+
# Only add the old name if there exists a deployment with such name.
|
|
1022
|
+
# This is due to the way validate_token is tied to an existing deployment.
|
|
1023
|
+
if ArgoWorkflows.get_existing_deployment(obj._v1_workflow_name) is not None:
|
|
1024
|
+
cleanup_old_name = True
|
|
1025
|
+
obj.echo(
|
|
1026
|
+
"This flow has been deployed with another name in the past due to a limitation with Argo Workflows. "
|
|
1027
|
+
"Will also delete the older deployment.",
|
|
1028
|
+
wrap=True,
|
|
1029
|
+
)
|
|
1030
|
+
_delete(obj._v1_workflow_name)
|
|
1031
|
+
workflows_deleted = True
|
|
1032
|
+
|
|
1033
|
+
# Always try to delete the current name.
|
|
1034
|
+
# Do not raise exception if we deleted old name before this.
|
|
1035
|
+
try:
|
|
1036
|
+
_delete(obj.workflow_name)
|
|
1037
|
+
workflows_deleted = True
|
|
1038
|
+
except ArgoWorkflowsException:
|
|
1039
|
+
if not cleanup_old_name:
|
|
1040
|
+
raise
|
|
1041
|
+
|
|
1042
|
+
if workflows_deleted:
|
|
654
1043
|
obj.echo(
|
|
655
1044
|
"Deleting Kubernetes resources may take a while. "
|
|
656
1045
|
"Deploying the flow again to Argo Workflows while the delete is in-flight will fail."
|
|
@@ -689,17 +1078,21 @@ def suspend(obj, run_id, authorize=None):
|
|
|
689
1078
|
"about production tokens."
|
|
690
1079
|
)
|
|
691
1080
|
|
|
692
|
-
|
|
693
|
-
obj.workflow_name, obj.token_prefix, authorize, run_id, _token_instructions
|
|
694
|
-
)
|
|
1081
|
+
workflows = _get_existing_workflow_names(obj)
|
|
695
1082
|
|
|
696
|
-
|
|
697
|
-
|
|
1083
|
+
for workflow_name in workflows:
|
|
1084
|
+
validate_run_id(
|
|
1085
|
+
workflow_name, obj.token_prefix, authorize, run_id, _token_instructions
|
|
1086
|
+
)
|
|
1087
|
+
|
|
1088
|
+
# Trim prefix from run_id
|
|
1089
|
+
name = run_id[5:]
|
|
698
1090
|
|
|
699
|
-
|
|
1091
|
+
workflow_suspended = ArgoWorkflows.suspend(name)
|
|
700
1092
|
|
|
701
|
-
|
|
702
|
-
|
|
1093
|
+
if workflow_suspended:
|
|
1094
|
+
obj.echo("Suspended execution of *%s*" % run_id)
|
|
1095
|
+
break # no need to try out all workflow_names if we found the running one.
|
|
703
1096
|
|
|
704
1097
|
|
|
705
1098
|
@argo_workflows.command(help="Unsuspend flow execution on Argo Workflows.")
|
|
@@ -733,17 +1126,21 @@ def unsuspend(obj, run_id, authorize=None):
|
|
|
733
1126
|
"about production tokens."
|
|
734
1127
|
)
|
|
735
1128
|
|
|
736
|
-
|
|
737
|
-
obj.workflow_name, obj.token_prefix, authorize, run_id, _token_instructions
|
|
738
|
-
)
|
|
1129
|
+
workflows = _get_existing_workflow_names(obj)
|
|
739
1130
|
|
|
740
|
-
|
|
741
|
-
|
|
1131
|
+
for workflow_name in workflows:
|
|
1132
|
+
validate_run_id(
|
|
1133
|
+
workflow_name, obj.token_prefix, authorize, run_id, _token_instructions
|
|
1134
|
+
)
|
|
1135
|
+
|
|
1136
|
+
# Trim prefix from run_id
|
|
1137
|
+
name = run_id[5:]
|
|
742
1138
|
|
|
743
|
-
|
|
1139
|
+
workflow_suspended = ArgoWorkflows.unsuspend(name)
|
|
744
1140
|
|
|
745
|
-
|
|
746
|
-
|
|
1141
|
+
if workflow_suspended:
|
|
1142
|
+
obj.echo("Unsuspended execution of *%s*" % run_id)
|
|
1143
|
+
break # no need to try all workflow_names if we found one.
|
|
747
1144
|
|
|
748
1145
|
|
|
749
1146
|
def validate_token(name, token_prefix, authorize, instructions_fn=None):
|
|
@@ -786,6 +1183,20 @@ def validate_token(name, token_prefix, authorize, instructions_fn=None):
|
|
|
786
1183
|
return True
|
|
787
1184
|
|
|
788
1185
|
|
|
1186
|
+
def get_run_object(pathspec: str):
|
|
1187
|
+
try:
|
|
1188
|
+
return Run(pathspec, _namespace_check=False)
|
|
1189
|
+
except MetaflowNotFound:
|
|
1190
|
+
return None
|
|
1191
|
+
|
|
1192
|
+
|
|
1193
|
+
def get_status_considering_run_object(status, run_obj):
|
|
1194
|
+
remapped_status = remap_status(status)
|
|
1195
|
+
if remapped_status == "Running" and run_obj is None:
|
|
1196
|
+
return "Pending"
|
|
1197
|
+
return remapped_status
|
|
1198
|
+
|
|
1199
|
+
|
|
789
1200
|
@argo_workflows.command(help="Fetch flow execution status on Argo Workflows.")
|
|
790
1201
|
@click.argument("run-id", required=True, type=str)
|
|
791
1202
|
@click.pass_obj
|
|
@@ -803,8 +1214,10 @@ def status(obj, run_id):
|
|
|
803
1214
|
# Trim prefix from run_id
|
|
804
1215
|
name = run_id[5:]
|
|
805
1216
|
status = ArgoWorkflows.get_workflow_status(obj.flow.name, name)
|
|
1217
|
+
run_obj = get_run_object("/".join((obj.flow.name, run_id)))
|
|
806
1218
|
if status is not None:
|
|
807
|
-
|
|
1219
|
+
status = get_status_considering_run_object(status, run_obj)
|
|
1220
|
+
obj.echo_always(status)
|
|
808
1221
|
|
|
809
1222
|
|
|
810
1223
|
@argo_workflows.command(help="Terminate flow execution on Argo Workflows.")
|
|
@@ -835,22 +1248,26 @@ def terminate(obj, run_id, authorize=None):
|
|
|
835
1248
|
"about production tokens."
|
|
836
1249
|
)
|
|
837
1250
|
|
|
838
|
-
|
|
839
|
-
obj.workflow_name, obj.token_prefix, authorize, run_id, _token_instructions
|
|
840
|
-
)
|
|
1251
|
+
workflows = _get_existing_workflow_names(obj)
|
|
841
1252
|
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
1253
|
+
for workflow_name in workflows:
|
|
1254
|
+
validate_run_id(
|
|
1255
|
+
workflow_name, obj.token_prefix, authorize, run_id, _token_instructions
|
|
1256
|
+
)
|
|
1257
|
+
|
|
1258
|
+
# Trim prefix from run_id
|
|
1259
|
+
name = run_id[5:]
|
|
1260
|
+
obj.echo(
|
|
1261
|
+
"Terminating run *{run_id}* for {flow_name} ...".format(
|
|
1262
|
+
run_id=run_id, flow_name=obj.flow.name
|
|
1263
|
+
),
|
|
1264
|
+
bold=True,
|
|
1265
|
+
)
|
|
850
1266
|
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
1267
|
+
terminated = ArgoWorkflows.terminate(obj.flow.name, name)
|
|
1268
|
+
if terminated:
|
|
1269
|
+
obj.echo("\nRun terminated.")
|
|
1270
|
+
break # no need to try all workflow_names if we found the running one.
|
|
854
1271
|
|
|
855
1272
|
|
|
856
1273
|
@argo_workflows.command(help="List Argo Workflow templates for the flow.")
|
|
@@ -863,11 +1280,35 @@ def terminate(obj, run_id, authorize=None):
|
|
|
863
1280
|
)
|
|
864
1281
|
@click.pass_obj
|
|
865
1282
|
def list_workflow_templates(obj, all=None):
|
|
866
|
-
|
|
867
|
-
for template_name in templates:
|
|
1283
|
+
for template_name in ArgoWorkflows.list_templates(obj.flow.name, all):
|
|
868
1284
|
obj.echo_always(template_name)
|
|
869
1285
|
|
|
870
1286
|
|
|
1287
|
+
# Internal CLI command to run a heartbeat daemon in an Argo Workflows Daemon container.
|
|
1288
|
+
@argo_workflows.command(hidden=True, help="start heartbeat process for a run")
|
|
1289
|
+
@click.option("--run_id", required=True)
|
|
1290
|
+
@click.option(
|
|
1291
|
+
"--tag",
|
|
1292
|
+
"tags",
|
|
1293
|
+
multiple=True,
|
|
1294
|
+
default=None,
|
|
1295
|
+
help="Annotate all objects produced by Argo Workflows runs "
|
|
1296
|
+
"with the given tag. You can specify this option multiple "
|
|
1297
|
+
"times to attach multiple tags.",
|
|
1298
|
+
)
|
|
1299
|
+
@click.pass_obj
|
|
1300
|
+
def heartbeat(obj, run_id, tags=None):
|
|
1301
|
+
# Try to register a run in case the start task has not taken care of it yet.
|
|
1302
|
+
obj.metadata.register_run_id(run_id, tags)
|
|
1303
|
+
# Start run heartbeat
|
|
1304
|
+
obj.metadata.start_run_heartbeat(obj.flow.name, run_id)
|
|
1305
|
+
# Keepalive loop
|
|
1306
|
+
while True:
|
|
1307
|
+
# Do not pollute daemon logs with anything unnecessary,
|
|
1308
|
+
# as they might be extremely long running.
|
|
1309
|
+
sleep(10)
|
|
1310
|
+
|
|
1311
|
+
|
|
871
1312
|
def validate_run_id(
|
|
872
1313
|
workflow_name, token_prefix, authorize, run_id, instructions_fn=None
|
|
873
1314
|
):
|
|
@@ -899,13 +1340,7 @@ def validate_run_id(
|
|
|
899
1340
|
|
|
900
1341
|
if project_name is not None:
|
|
901
1342
|
# Verify we are operating on the correct project.
|
|
902
|
-
|
|
903
|
-
# e.g. 'test_proj' and 'test_project' should count as a mismatch.
|
|
904
|
-
project_part = "%s." % sanitize_for_argo(project_name)
|
|
905
|
-
if (
|
|
906
|
-
current.get("project_name") != project_name
|
|
907
|
-
and project_part not in workflow_name
|
|
908
|
-
):
|
|
1343
|
+
if current.get("project_name") != project_name:
|
|
909
1344
|
raise RunIdMismatch(
|
|
910
1345
|
"The workflow belongs to the project *%s*. "
|
|
911
1346
|
"Please use the project decorator or --name to target the correct project"
|
|
@@ -913,13 +1348,7 @@ def validate_run_id(
|
|
|
913
1348
|
)
|
|
914
1349
|
|
|
915
1350
|
# Verify we are operating on the correct branch.
|
|
916
|
-
|
|
917
|
-
# e.g. 'user.tes' and 'user.test' should count as a mismatch.
|
|
918
|
-
branch_part = ".%s." % sanitize_for_argo(branch_name)
|
|
919
|
-
if (
|
|
920
|
-
current.get("branch_name") != branch_name
|
|
921
|
-
and branch_part not in workflow_name
|
|
922
|
-
):
|
|
1351
|
+
if current.get("branch_name") != branch_name:
|
|
923
1352
|
raise RunIdMismatch(
|
|
924
1353
|
"The workflow belongs to the branch *%s*. "
|
|
925
1354
|
"Please use --branch, --production or --name to target the correct branch"
|
|
@@ -941,11 +1370,26 @@ def validate_run_id(
|
|
|
941
1370
|
return True
|
|
942
1371
|
|
|
943
1372
|
|
|
1373
|
+
def _get_existing_workflow_names(obj):
|
|
1374
|
+
"""
|
|
1375
|
+
Construct a list of the current workflow name and possible existing deployments of old workflow names
|
|
1376
|
+
"""
|
|
1377
|
+
workflows = [obj.workflow_name]
|
|
1378
|
+
if obj.workflow_name != obj._v1_workflow_name:
|
|
1379
|
+
# Only add the old name if there exists a deployment with such name.
|
|
1380
|
+
# This is due to the way validate_token is tied to an existing deployment.
|
|
1381
|
+
if ArgoWorkflows.get_existing_deployment(obj._v1_workflow_name) is not None:
|
|
1382
|
+
workflows.append(obj._v1_workflow_name)
|
|
1383
|
+
|
|
1384
|
+
return workflows
|
|
1385
|
+
|
|
1386
|
+
|
|
944
1387
|
def sanitize_for_argo(text):
|
|
945
1388
|
"""
|
|
946
|
-
Sanitizes a string so it does not contain characters that are not permitted in
|
|
1389
|
+
Sanitizes a string so it does not contain characters that are not permitted in
|
|
1390
|
+
Argo Workflow resource names.
|
|
947
1391
|
"""
|
|
948
|
-
|
|
1392
|
+
sanitized = (
|
|
949
1393
|
re.compile(r"^[^A-Za-z0-9]+")
|
|
950
1394
|
.sub("", text)
|
|
951
1395
|
.replace("_", "")
|
|
@@ -953,6 +1397,12 @@ def sanitize_for_argo(text):
|
|
|
953
1397
|
.replace("+", "")
|
|
954
1398
|
.lower()
|
|
955
1399
|
)
|
|
1400
|
+
# This is added in order to get sanitized and truncated project branch names to adhere to RFC 1123 subdomain requirements
|
|
1401
|
+
# f.ex. after truncation a project flow name might be project.branch-cut-short-.flowname
|
|
1402
|
+
# sanitize around the . separators by removing any non-alphanumeric characters
|
|
1403
|
+
sanitized = re.compile(r"[^a-z0-9]*\.[^a-z0-9]*").sub(".", sanitized)
|
|
1404
|
+
|
|
1405
|
+
return sanitized
|
|
956
1406
|
|
|
957
1407
|
|
|
958
1408
|
def remap_status(status):
|