ob-metaflow 2.11.13.1__py2.py3-none-any.whl → 2.19.7.1rc0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow/R.py +10 -7
- metaflow/__init__.py +40 -25
- metaflow/_vendor/imghdr/__init__.py +186 -0
- metaflow/_vendor/importlib_metadata/__init__.py +1063 -0
- metaflow/_vendor/importlib_metadata/_adapters.py +68 -0
- metaflow/_vendor/importlib_metadata/_collections.py +30 -0
- metaflow/_vendor/importlib_metadata/_compat.py +71 -0
- metaflow/_vendor/importlib_metadata/_functools.py +104 -0
- metaflow/_vendor/importlib_metadata/_itertools.py +73 -0
- metaflow/_vendor/importlib_metadata/_meta.py +48 -0
- metaflow/_vendor/importlib_metadata/_text.py +99 -0
- metaflow/_vendor/importlib_metadata/py.typed +0 -0
- metaflow/_vendor/typeguard/__init__.py +48 -0
- metaflow/_vendor/typeguard/_checkers.py +1070 -0
- metaflow/_vendor/typeguard/_config.py +108 -0
- metaflow/_vendor/typeguard/_decorators.py +233 -0
- metaflow/_vendor/typeguard/_exceptions.py +42 -0
- metaflow/_vendor/typeguard/_functions.py +308 -0
- metaflow/_vendor/typeguard/_importhook.py +213 -0
- metaflow/_vendor/typeguard/_memo.py +48 -0
- metaflow/_vendor/typeguard/_pytest_plugin.py +127 -0
- metaflow/_vendor/typeguard/_suppression.py +86 -0
- metaflow/_vendor/typeguard/_transformer.py +1229 -0
- metaflow/_vendor/typeguard/_union_transformer.py +55 -0
- metaflow/_vendor/typeguard/_utils.py +173 -0
- metaflow/_vendor/typeguard/py.typed +0 -0
- metaflow/_vendor/typing_extensions.py +3641 -0
- metaflow/_vendor/v3_7/importlib_metadata/__init__.py +1063 -0
- metaflow/_vendor/v3_7/importlib_metadata/_adapters.py +68 -0
- metaflow/_vendor/v3_7/importlib_metadata/_collections.py +30 -0
- metaflow/_vendor/v3_7/importlib_metadata/_compat.py +71 -0
- metaflow/_vendor/v3_7/importlib_metadata/_functools.py +104 -0
- metaflow/_vendor/v3_7/importlib_metadata/_itertools.py +73 -0
- metaflow/_vendor/v3_7/importlib_metadata/_meta.py +48 -0
- metaflow/_vendor/v3_7/importlib_metadata/_text.py +99 -0
- metaflow/_vendor/v3_7/importlib_metadata/py.typed +0 -0
- metaflow/_vendor/v3_7/typeguard/__init__.py +48 -0
- metaflow/_vendor/v3_7/typeguard/_checkers.py +906 -0
- metaflow/_vendor/v3_7/typeguard/_config.py +108 -0
- metaflow/_vendor/v3_7/typeguard/_decorators.py +237 -0
- metaflow/_vendor/v3_7/typeguard/_exceptions.py +42 -0
- metaflow/_vendor/v3_7/typeguard/_functions.py +310 -0
- metaflow/_vendor/v3_7/typeguard/_importhook.py +213 -0
- metaflow/_vendor/v3_7/typeguard/_memo.py +48 -0
- metaflow/_vendor/v3_7/typeguard/_pytest_plugin.py +100 -0
- metaflow/_vendor/v3_7/typeguard/_suppression.py +88 -0
- metaflow/_vendor/v3_7/typeguard/_transformer.py +1207 -0
- metaflow/_vendor/v3_7/typeguard/_union_transformer.py +54 -0
- metaflow/_vendor/v3_7/typeguard/_utils.py +169 -0
- metaflow/_vendor/v3_7/typeguard/py.typed +0 -0
- metaflow/_vendor/v3_7/typing_extensions.py +3072 -0
- metaflow/_vendor/yaml/__init__.py +427 -0
- metaflow/_vendor/yaml/composer.py +139 -0
- metaflow/_vendor/yaml/constructor.py +748 -0
- metaflow/_vendor/yaml/cyaml.py +101 -0
- metaflow/_vendor/yaml/dumper.py +62 -0
- metaflow/_vendor/yaml/emitter.py +1137 -0
- metaflow/_vendor/yaml/error.py +75 -0
- metaflow/_vendor/yaml/events.py +86 -0
- metaflow/_vendor/yaml/loader.py +63 -0
- metaflow/_vendor/yaml/nodes.py +49 -0
- metaflow/_vendor/yaml/parser.py +589 -0
- metaflow/_vendor/yaml/reader.py +185 -0
- metaflow/_vendor/yaml/representer.py +389 -0
- metaflow/_vendor/yaml/resolver.py +227 -0
- metaflow/_vendor/yaml/scanner.py +1435 -0
- metaflow/_vendor/yaml/serializer.py +111 -0
- metaflow/_vendor/yaml/tokens.py +104 -0
- metaflow/cards.py +5 -0
- metaflow/cli.py +331 -785
- metaflow/cli_args.py +17 -0
- metaflow/cli_components/__init__.py +0 -0
- metaflow/cli_components/dump_cmd.py +96 -0
- metaflow/cli_components/init_cmd.py +52 -0
- metaflow/cli_components/run_cmds.py +546 -0
- metaflow/cli_components/step_cmd.py +334 -0
- metaflow/cli_components/utils.py +140 -0
- metaflow/client/__init__.py +1 -0
- metaflow/client/core.py +467 -73
- metaflow/client/filecache.py +75 -35
- metaflow/clone_util.py +7 -1
- metaflow/cmd/code/__init__.py +231 -0
- metaflow/cmd/develop/stub_generator.py +756 -288
- metaflow/cmd/develop/stubs.py +12 -28
- metaflow/cmd/main_cli.py +6 -4
- metaflow/cmd/make_wrapper.py +78 -0
- metaflow/datastore/__init__.py +1 -0
- metaflow/datastore/content_addressed_store.py +41 -10
- metaflow/datastore/datastore_set.py +11 -2
- metaflow/datastore/flow_datastore.py +156 -10
- metaflow/datastore/spin_datastore.py +91 -0
- metaflow/datastore/task_datastore.py +154 -39
- metaflow/debug.py +5 -0
- metaflow/decorators.py +404 -78
- metaflow/exception.py +8 -2
- metaflow/extension_support/__init__.py +527 -376
- metaflow/extension_support/_empty_file.py +2 -2
- metaflow/extension_support/plugins.py +49 -31
- metaflow/flowspec.py +482 -33
- metaflow/graph.py +210 -42
- metaflow/includefile.py +84 -40
- metaflow/lint.py +141 -22
- metaflow/meta_files.py +13 -0
- metaflow/{metadata → metadata_provider}/heartbeat.py +24 -8
- metaflow/{metadata → metadata_provider}/metadata.py +86 -1
- metaflow/metaflow_config.py +175 -28
- metaflow/metaflow_config_funcs.py +51 -3
- metaflow/metaflow_current.py +4 -10
- metaflow/metaflow_environment.py +139 -53
- metaflow/metaflow_git.py +115 -0
- metaflow/metaflow_profile.py +18 -0
- metaflow/metaflow_version.py +150 -66
- metaflow/mflog/__init__.py +4 -3
- metaflow/mflog/save_logs.py +2 -2
- metaflow/multicore_utils.py +31 -14
- metaflow/package/__init__.py +673 -0
- metaflow/packaging_sys/__init__.py +880 -0
- metaflow/packaging_sys/backend.py +128 -0
- metaflow/packaging_sys/distribution_support.py +153 -0
- metaflow/packaging_sys/tar_backend.py +99 -0
- metaflow/packaging_sys/utils.py +54 -0
- metaflow/packaging_sys/v1.py +527 -0
- metaflow/parameters.py +149 -28
- metaflow/plugins/__init__.py +74 -5
- metaflow/plugins/airflow/airflow.py +40 -25
- metaflow/plugins/airflow/airflow_cli.py +22 -5
- metaflow/plugins/airflow/airflow_decorator.py +1 -1
- metaflow/plugins/airflow/airflow_utils.py +5 -3
- metaflow/plugins/airflow/sensors/base_sensor.py +4 -4
- metaflow/plugins/airflow/sensors/external_task_sensor.py +2 -2
- metaflow/plugins/airflow/sensors/s3_sensor.py +2 -2
- metaflow/plugins/argo/argo_client.py +78 -33
- metaflow/plugins/argo/argo_events.py +6 -6
- metaflow/plugins/argo/argo_workflows.py +2410 -527
- metaflow/plugins/argo/argo_workflows_cli.py +571 -121
- metaflow/plugins/argo/argo_workflows_decorator.py +43 -12
- metaflow/plugins/argo/argo_workflows_deployer.py +106 -0
- metaflow/plugins/argo/argo_workflows_deployer_objects.py +453 -0
- metaflow/plugins/argo/capture_error.py +73 -0
- metaflow/plugins/argo/conditional_input_paths.py +35 -0
- metaflow/plugins/argo/exit_hooks.py +209 -0
- metaflow/plugins/argo/jobset_input_paths.py +15 -0
- metaflow/plugins/argo/param_val.py +19 -0
- metaflow/plugins/aws/aws_client.py +10 -3
- metaflow/plugins/aws/aws_utils.py +55 -2
- metaflow/plugins/aws/batch/batch.py +72 -5
- metaflow/plugins/aws/batch/batch_cli.py +33 -10
- metaflow/plugins/aws/batch/batch_client.py +4 -3
- metaflow/plugins/aws/batch/batch_decorator.py +102 -35
- metaflow/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.py +13 -10
- metaflow/plugins/aws/step_functions/dynamo_db_client.py +0 -3
- metaflow/plugins/aws/step_functions/production_token.py +1 -1
- metaflow/plugins/aws/step_functions/step_functions.py +65 -8
- metaflow/plugins/aws/step_functions/step_functions_cli.py +101 -7
- metaflow/plugins/aws/step_functions/step_functions_decorator.py +1 -2
- metaflow/plugins/aws/step_functions/step_functions_deployer.py +97 -0
- metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +264 -0
- metaflow/plugins/azure/azure_exceptions.py +1 -1
- metaflow/plugins/azure/azure_secret_manager_secrets_provider.py +240 -0
- metaflow/plugins/azure/azure_tail.py +1 -1
- metaflow/plugins/azure/includefile_support.py +2 -0
- metaflow/plugins/cards/card_cli.py +66 -30
- metaflow/plugins/cards/card_creator.py +25 -1
- metaflow/plugins/cards/card_datastore.py +21 -49
- metaflow/plugins/cards/card_decorator.py +132 -8
- metaflow/plugins/cards/card_modules/basic.py +112 -17
- metaflow/plugins/cards/card_modules/bundle.css +1 -1
- metaflow/plugins/cards/card_modules/card.py +16 -1
- metaflow/plugins/cards/card_modules/chevron/renderer.py +1 -1
- metaflow/plugins/cards/card_modules/components.py +665 -28
- metaflow/plugins/cards/card_modules/convert_to_native_type.py +36 -7
- metaflow/plugins/cards/card_modules/json_viewer.py +232 -0
- metaflow/plugins/cards/card_modules/main.css +1 -0
- metaflow/plugins/cards/card_modules/main.js +68 -49
- metaflow/plugins/cards/card_modules/renderer_tools.py +1 -0
- metaflow/plugins/cards/card_modules/test_cards.py +26 -12
- metaflow/plugins/cards/card_server.py +39 -14
- metaflow/plugins/cards/component_serializer.py +2 -9
- metaflow/plugins/cards/metadata.py +22 -0
- metaflow/plugins/catch_decorator.py +9 -0
- metaflow/plugins/datastores/azure_storage.py +10 -1
- metaflow/plugins/datastores/gs_storage.py +6 -2
- metaflow/plugins/datastores/local_storage.py +12 -6
- metaflow/plugins/datastores/spin_storage.py +12 -0
- metaflow/plugins/datatools/local.py +2 -0
- metaflow/plugins/datatools/s3/s3.py +126 -75
- metaflow/plugins/datatools/s3/s3op.py +254 -121
- metaflow/plugins/env_escape/__init__.py +3 -3
- metaflow/plugins/env_escape/client_modules.py +102 -72
- metaflow/plugins/env_escape/server.py +7 -0
- metaflow/plugins/env_escape/stub.py +24 -5
- metaflow/plugins/events_decorator.py +343 -185
- metaflow/plugins/exit_hook/__init__.py +0 -0
- metaflow/plugins/exit_hook/exit_hook_decorator.py +46 -0
- metaflow/plugins/exit_hook/exit_hook_script.py +52 -0
- metaflow/plugins/gcp/__init__.py +1 -1
- metaflow/plugins/gcp/gcp_secret_manager_secrets_provider.py +11 -6
- metaflow/plugins/gcp/gs_tail.py +10 -6
- metaflow/plugins/gcp/includefile_support.py +3 -0
- metaflow/plugins/kubernetes/kube_utils.py +108 -0
- metaflow/plugins/kubernetes/kubernetes.py +411 -130
- metaflow/plugins/kubernetes/kubernetes_cli.py +168 -36
- metaflow/plugins/kubernetes/kubernetes_client.py +104 -2
- metaflow/plugins/kubernetes/kubernetes_decorator.py +246 -88
- metaflow/plugins/kubernetes/kubernetes_job.py +253 -581
- metaflow/plugins/kubernetes/kubernetes_jobsets.py +1071 -0
- metaflow/plugins/kubernetes/spot_metadata_cli.py +69 -0
- metaflow/plugins/kubernetes/spot_monitor_sidecar.py +109 -0
- metaflow/plugins/logs_cli.py +359 -0
- metaflow/plugins/{metadata → metadata_providers}/local.py +144 -84
- metaflow/plugins/{metadata → metadata_providers}/service.py +103 -26
- metaflow/plugins/metadata_providers/spin.py +16 -0
- metaflow/plugins/package_cli.py +36 -24
- metaflow/plugins/parallel_decorator.py +128 -11
- metaflow/plugins/parsers.py +16 -0
- metaflow/plugins/project_decorator.py +51 -5
- metaflow/plugins/pypi/bootstrap.py +357 -105
- metaflow/plugins/pypi/conda_decorator.py +82 -81
- metaflow/plugins/pypi/conda_environment.py +187 -52
- metaflow/plugins/pypi/micromamba.py +157 -47
- metaflow/plugins/pypi/parsers.py +268 -0
- metaflow/plugins/pypi/pip.py +88 -13
- metaflow/plugins/pypi/pypi_decorator.py +37 -1
- metaflow/plugins/pypi/utils.py +48 -2
- metaflow/plugins/resources_decorator.py +2 -2
- metaflow/plugins/secrets/__init__.py +3 -0
- metaflow/plugins/secrets/secrets_decorator.py +26 -181
- metaflow/plugins/secrets/secrets_func.py +49 -0
- metaflow/plugins/secrets/secrets_spec.py +101 -0
- metaflow/plugins/secrets/utils.py +74 -0
- metaflow/plugins/tag_cli.py +4 -7
- metaflow/plugins/test_unbounded_foreach_decorator.py +41 -6
- metaflow/plugins/timeout_decorator.py +3 -3
- metaflow/plugins/uv/__init__.py +0 -0
- metaflow/plugins/uv/bootstrap.py +128 -0
- metaflow/plugins/uv/uv_environment.py +72 -0
- metaflow/procpoll.py +1 -1
- metaflow/pylint_wrapper.py +5 -1
- metaflow/runner/__init__.py +0 -0
- metaflow/runner/click_api.py +717 -0
- metaflow/runner/deployer.py +470 -0
- metaflow/runner/deployer_impl.py +201 -0
- metaflow/runner/metaflow_runner.py +714 -0
- metaflow/runner/nbdeploy.py +132 -0
- metaflow/runner/nbrun.py +225 -0
- metaflow/runner/subprocess_manager.py +650 -0
- metaflow/runner/utils.py +335 -0
- metaflow/runtime.py +1078 -260
- metaflow/sidecar/sidecar_worker.py +1 -1
- metaflow/system/__init__.py +5 -0
- metaflow/system/system_logger.py +85 -0
- metaflow/system/system_monitor.py +108 -0
- metaflow/system/system_utils.py +19 -0
- metaflow/task.py +521 -225
- metaflow/tracing/__init__.py +7 -7
- metaflow/tracing/span_exporter.py +31 -38
- metaflow/tracing/tracing_modules.py +38 -43
- metaflow/tuple_util.py +27 -0
- metaflow/user_configs/__init__.py +0 -0
- metaflow/user_configs/config_options.py +563 -0
- metaflow/user_configs/config_parameters.py +598 -0
- metaflow/user_decorators/__init__.py +0 -0
- metaflow/user_decorators/common.py +144 -0
- metaflow/user_decorators/mutable_flow.py +512 -0
- metaflow/user_decorators/mutable_step.py +424 -0
- metaflow/user_decorators/user_flow_decorator.py +264 -0
- metaflow/user_decorators/user_step_decorator.py +749 -0
- metaflow/util.py +243 -27
- metaflow/vendor.py +23 -7
- metaflow/version.py +1 -1
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/Makefile +355 -0
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/Tiltfile +726 -0
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/pick_services.sh +105 -0
- ob_metaflow-2.19.7.1rc0.dist-info/METADATA +87 -0
- ob_metaflow-2.19.7.1rc0.dist-info/RECORD +445 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/WHEEL +1 -1
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/entry_points.txt +1 -0
- metaflow/_vendor/v3_5/__init__.py +0 -1
- metaflow/_vendor/v3_5/importlib_metadata/__init__.py +0 -644
- metaflow/_vendor/v3_5/importlib_metadata/_compat.py +0 -152
- metaflow/package.py +0 -188
- ob_metaflow-2.11.13.1.dist-info/METADATA +0 -85
- ob_metaflow-2.11.13.1.dist-info/RECORD +0 -308
- /metaflow/_vendor/{v3_5/zipp.py → zipp.py} +0 -0
- /metaflow/{metadata → metadata_provider}/__init__.py +0 -0
- /metaflow/{metadata → metadata_provider}/util.py +0 -0
- /metaflow/plugins/{metadata → metadata_providers}/__init__.py +0 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info/licenses}/LICENSE +0 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/top_level.txt +0 -0
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import os
|
|
3
|
-
|
|
3
|
+
|
|
4
4
|
|
|
5
5
|
from metaflow import current
|
|
6
6
|
from metaflow.decorators import StepDecorator
|
|
7
7
|
from metaflow.events import Trigger
|
|
8
|
-
from metaflow.
|
|
9
|
-
from metaflow.
|
|
10
|
-
|
|
8
|
+
from metaflow.metadata_provider import MetaDatum
|
|
9
|
+
from metaflow.graph import FlowGraph
|
|
10
|
+
from metaflow.flowspec import FlowSpec
|
|
11
11
|
from .argo_events import ArgoEvent
|
|
12
12
|
|
|
13
13
|
|
|
@@ -40,7 +40,7 @@ class ArgoWorkflowsInternalDecorator(StepDecorator):
|
|
|
40
40
|
if payload != "null": # Argo-Workflow's None
|
|
41
41
|
try:
|
|
42
42
|
payload = json.loads(payload)
|
|
43
|
-
except (TypeError, ValueError)
|
|
43
|
+
except (TypeError, ValueError):
|
|
44
44
|
# There could be arbitrary events that Metaflow doesn't know of
|
|
45
45
|
payload = {}
|
|
46
46
|
triggers.append(
|
|
@@ -52,7 +52,7 @@ class ArgoWorkflowsInternalDecorator(StepDecorator):
|
|
|
52
52
|
"_", 1
|
|
53
53
|
)[
|
|
54
54
|
0
|
|
55
|
-
] # infer type from env var key
|
|
55
|
+
], # infer type from env var key
|
|
56
56
|
# Add more event metadata here in the future
|
|
57
57
|
}
|
|
58
58
|
)
|
|
@@ -83,7 +83,13 @@ class ArgoWorkflowsInternalDecorator(StepDecorator):
|
|
|
83
83
|
metadata.register_metadata(run_id, step_name, task_id, entries)
|
|
84
84
|
|
|
85
85
|
def task_finished(
|
|
86
|
-
self,
|
|
86
|
+
self,
|
|
87
|
+
step_name,
|
|
88
|
+
flow: FlowSpec,
|
|
89
|
+
graph: FlowGraph,
|
|
90
|
+
is_task_ok,
|
|
91
|
+
retry_count,
|
|
92
|
+
max_user_code_retries,
|
|
87
93
|
):
|
|
88
94
|
if not is_task_ok:
|
|
89
95
|
# The task finished with an exception - execution won't
|
|
@@ -100,16 +106,41 @@ class ArgoWorkflowsInternalDecorator(StepDecorator):
|
|
|
100
106
|
# we run pods with a security context. We work around this constraint by
|
|
101
107
|
# mounting an emptyDir volume.
|
|
102
108
|
if graph[step_name].type == "foreach":
|
|
109
|
+
if graph[step_name].parallel_foreach:
|
|
110
|
+
# If a node is marked as a `parallel_foreach`, pass down the value of
|
|
111
|
+
# `num_parallel` to the subsequent steps.
|
|
112
|
+
with open("/mnt/out/num_parallel", "w") as f:
|
|
113
|
+
json.dump(flow._parallel_ubf_iter.num_parallel, f)
|
|
114
|
+
# Set splits to 1 since parallelism is handled by JobSet.
|
|
115
|
+
flow._foreach_num_splits = 1
|
|
116
|
+
with open("/mnt/out/task_id_entropy", "w") as file:
|
|
117
|
+
import uuid
|
|
118
|
+
|
|
119
|
+
file.write(uuid.uuid4().hex[:6])
|
|
120
|
+
|
|
103
121
|
with open("/mnt/out/splits", "w") as file:
|
|
104
122
|
json.dump(list(range(flow._foreach_num_splits)), file)
|
|
105
123
|
with open("/mnt/out/split_cardinality", "w") as file:
|
|
106
124
|
json.dump(flow._foreach_num_splits, file)
|
|
107
125
|
|
|
108
|
-
#
|
|
109
|
-
#
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
126
|
+
# For conditional branches we need to record the value of the switch to disk, in order to pass it as an
|
|
127
|
+
# output from the switching step to be used further down the DAG
|
|
128
|
+
if graph[step_name].type == "split-switch":
|
|
129
|
+
# TODO: A nicer way to access the chosen step?
|
|
130
|
+
_out_funcs, _ = flow._transition
|
|
131
|
+
chosen_step = _out_funcs[0]
|
|
132
|
+
with open("/mnt/out/switch_step", "w") as file:
|
|
133
|
+
file.write(chosen_step)
|
|
134
|
+
|
|
135
|
+
# For steps that have a `@parallel` decorator set to them, we will be relying on Jobsets
|
|
136
|
+
# to run the task. In this case, we cannot set anything in the
|
|
137
|
+
# `/mnt/out` directory, since such form of output mounts are not available to Jobset executions.
|
|
138
|
+
if not graph[step_name].parallel_step:
|
|
139
|
+
# Unfortunately, we can't always use pod names as task-ids since the pod names
|
|
140
|
+
# are not static across retries. We write the task-id to a file that is read
|
|
141
|
+
# by the next task here.
|
|
142
|
+
with open("/mnt/out/task_id", "w") as file:
|
|
143
|
+
file.write(self.task_id)
|
|
113
144
|
|
|
114
145
|
# Emit Argo Events given that the flow has succeeded. Given that we only
|
|
115
146
|
# emit events when the task succeeds, we can piggy back on this decorator
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
from typing import Any, ClassVar, Dict, Optional, TYPE_CHECKING, Type
|
|
2
|
+
|
|
3
|
+
from metaflow.runner.deployer_impl import DeployerImpl
|
|
4
|
+
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
import metaflow.plugins.argo.argo_workflows_deployer_objects
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ArgoWorkflowsDeployer(DeployerImpl):
|
|
10
|
+
"""
|
|
11
|
+
Deployer implementation for Argo Workflows.
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
name : str, optional, default None
|
|
16
|
+
Argo workflow name. The flow name is used instead if this option is not specified.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
TYPE: ClassVar[Optional[str]] = "argo-workflows"
|
|
20
|
+
|
|
21
|
+
def __init__(self, deployer_kwargs: Dict[str, str], **kwargs):
|
|
22
|
+
"""
|
|
23
|
+
Initialize the ArgoWorkflowsDeployer.
|
|
24
|
+
|
|
25
|
+
Parameters
|
|
26
|
+
----------
|
|
27
|
+
deployer_kwargs : Dict[str, str]
|
|
28
|
+
The deployer-specific keyword arguments.
|
|
29
|
+
**kwargs : Any
|
|
30
|
+
Additional arguments to pass to the superclass constructor.
|
|
31
|
+
"""
|
|
32
|
+
self._deployer_kwargs = deployer_kwargs
|
|
33
|
+
super().__init__(**kwargs)
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def deployer_kwargs(self) -> Dict[str, Any]:
|
|
37
|
+
return self._deployer_kwargs
|
|
38
|
+
|
|
39
|
+
@staticmethod
|
|
40
|
+
def deployed_flow_type() -> (
|
|
41
|
+
Type[
|
|
42
|
+
"metaflow.plugins.argo.argo_workflows_deployer_objects.ArgoWorkflowsDeployedFlow"
|
|
43
|
+
]
|
|
44
|
+
):
|
|
45
|
+
from .argo_workflows_deployer_objects import ArgoWorkflowsDeployedFlow
|
|
46
|
+
|
|
47
|
+
return ArgoWorkflowsDeployedFlow
|
|
48
|
+
|
|
49
|
+
def create(
|
|
50
|
+
self, **kwargs
|
|
51
|
+
) -> "metaflow.plugins.argo.argo_workflows_deployer_objects.ArgoWorkflowsDeployedFlow":
|
|
52
|
+
"""
|
|
53
|
+
Create a new ArgoWorkflow deployment.
|
|
54
|
+
|
|
55
|
+
Parameters
|
|
56
|
+
----------
|
|
57
|
+
authorize : str, optional, default None
|
|
58
|
+
Authorize using this production token. Required when re-deploying an existing flow
|
|
59
|
+
for the first time. The token is cached in METAFLOW_HOME.
|
|
60
|
+
generate_new_token : bool, optional, default False
|
|
61
|
+
Generate a new production token for this flow. Moves the production flow to a new namespace.
|
|
62
|
+
given_token : str, optional, default None
|
|
63
|
+
Use the given production token for this flow. Moves the production flow to the given namespace.
|
|
64
|
+
tags : List[str], optional, default None
|
|
65
|
+
Annotate all objects produced by Argo Workflows runs with these tags.
|
|
66
|
+
user_namespace : str, optional, default None
|
|
67
|
+
Change the namespace from the default (production token) to the given tag.
|
|
68
|
+
only_json : bool, optional, default False
|
|
69
|
+
Only print out JSON sent to Argo Workflows without deploying anything.
|
|
70
|
+
max_workers : int, optional, default 100
|
|
71
|
+
Maximum number of parallel processes.
|
|
72
|
+
workflow_timeout : int, optional, default None
|
|
73
|
+
Workflow timeout in seconds.
|
|
74
|
+
workflow_priority : int, optional, default None
|
|
75
|
+
Workflow priority as an integer. Higher priority workflows are processed first
|
|
76
|
+
if Argo Workflows controller is configured to process limited parallel workflows.
|
|
77
|
+
auto_emit_argo_events : bool, optional, default True
|
|
78
|
+
Auto emits Argo Events when the run completes successfully.
|
|
79
|
+
notify_on_error : bool, optional, default False
|
|
80
|
+
Notify if the workflow fails.
|
|
81
|
+
notify_on_success : bool, optional, default False
|
|
82
|
+
Notify if the workflow succeeds.
|
|
83
|
+
notify_slack_webhook_url : str, optional, default ''
|
|
84
|
+
Slack incoming webhook url for workflow success/failure notifications.
|
|
85
|
+
notify_pager_duty_integration_key : str, optional, default ''
|
|
86
|
+
PagerDuty Events API V2 Integration key for workflow success/failure notifications.
|
|
87
|
+
enable_heartbeat_daemon : bool, optional, default False
|
|
88
|
+
Use a daemon container to broadcast heartbeats.
|
|
89
|
+
deployer_attribute_file : str, optional, default None
|
|
90
|
+
Write the workflow name to the specified file. Used internally for Metaflow's Deployer API.
|
|
91
|
+
enable_error_msg_capture : bool, optional, default True
|
|
92
|
+
Capture stack trace of first failed task in exit hook.
|
|
93
|
+
|
|
94
|
+
Returns
|
|
95
|
+
-------
|
|
96
|
+
ArgoWorkflowsDeployedFlow
|
|
97
|
+
The Flow deployed to Argo Workflows.
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
# Prevent circular import
|
|
101
|
+
from .argo_workflows_deployer_objects import ArgoWorkflowsDeployedFlow
|
|
102
|
+
|
|
103
|
+
return self._create(ArgoWorkflowsDeployedFlow, **kwargs)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
_addl_stubgen_modules = ["metaflow.plugins.argo.argo_workflows_deployer_objects"]
|
|
@@ -0,0 +1,453 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import json
|
|
3
|
+
import time
|
|
4
|
+
import tempfile
|
|
5
|
+
from typing import ClassVar, Optional
|
|
6
|
+
|
|
7
|
+
from metaflow.client.core import get_metadata
|
|
8
|
+
from metaflow.exception import MetaflowException
|
|
9
|
+
from metaflow.plugins.argo.argo_client import ArgoClient
|
|
10
|
+
from metaflow.metaflow_config import KUBERNETES_NAMESPACE
|
|
11
|
+
from metaflow.plugins.argo.argo_workflows import ArgoWorkflows
|
|
12
|
+
from metaflow.runner.deployer import (
|
|
13
|
+
Deployer,
|
|
14
|
+
DeployedFlow,
|
|
15
|
+
TriggeredRun,
|
|
16
|
+
generate_fake_flow_file_contents,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
from metaflow.runner.utils import get_lower_level_group, handle_timeout, temporary_fifo
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ArgoWorkflowsTriggeredRun(TriggeredRun):
|
|
23
|
+
"""
|
|
24
|
+
A class representing a triggered Argo Workflow execution.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def suspend(self, **kwargs) -> bool:
|
|
28
|
+
"""
|
|
29
|
+
Suspend the running workflow.
|
|
30
|
+
|
|
31
|
+
Parameters
|
|
32
|
+
----------
|
|
33
|
+
authorize : str, optional, default None
|
|
34
|
+
Authorize the suspension with a production token.
|
|
35
|
+
|
|
36
|
+
Returns
|
|
37
|
+
-------
|
|
38
|
+
bool
|
|
39
|
+
True if the command was successful, False otherwise.
|
|
40
|
+
"""
|
|
41
|
+
_, run_id = self.pathspec.split("/")
|
|
42
|
+
|
|
43
|
+
# every subclass needs to have `self.deployer_kwargs`
|
|
44
|
+
command = get_lower_level_group(
|
|
45
|
+
self.deployer.api,
|
|
46
|
+
self.deployer.top_level_kwargs,
|
|
47
|
+
self.deployer.TYPE,
|
|
48
|
+
self.deployer.deployer_kwargs,
|
|
49
|
+
).suspend(run_id=run_id, **kwargs)
|
|
50
|
+
|
|
51
|
+
pid = self.deployer.spm.run_command(
|
|
52
|
+
[sys.executable, *command],
|
|
53
|
+
env=self.deployer.env_vars,
|
|
54
|
+
cwd=self.deployer.cwd,
|
|
55
|
+
show_output=self.deployer.show_output,
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
command_obj = self.deployer.spm.get(pid)
|
|
59
|
+
command_obj.sync_wait()
|
|
60
|
+
return command_obj.process.returncode == 0
|
|
61
|
+
|
|
62
|
+
def unsuspend(self, **kwargs) -> bool:
|
|
63
|
+
"""
|
|
64
|
+
Unsuspend the suspended workflow.
|
|
65
|
+
|
|
66
|
+
Parameters
|
|
67
|
+
----------
|
|
68
|
+
authorize : str, optional, default None
|
|
69
|
+
Authorize the unsuspend with a production token.
|
|
70
|
+
|
|
71
|
+
Returns
|
|
72
|
+
-------
|
|
73
|
+
bool
|
|
74
|
+
True if the command was successful, False otherwise.
|
|
75
|
+
"""
|
|
76
|
+
_, run_id = self.pathspec.split("/")
|
|
77
|
+
|
|
78
|
+
# every subclass needs to have `self.deployer_kwargs`
|
|
79
|
+
command = get_lower_level_group(
|
|
80
|
+
self.deployer.api,
|
|
81
|
+
self.deployer.top_level_kwargs,
|
|
82
|
+
self.deployer.TYPE,
|
|
83
|
+
self.deployer.deployer_kwargs,
|
|
84
|
+
).unsuspend(run_id=run_id, **kwargs)
|
|
85
|
+
|
|
86
|
+
pid = self.deployer.spm.run_command(
|
|
87
|
+
[sys.executable, *command],
|
|
88
|
+
env=self.deployer.env_vars,
|
|
89
|
+
cwd=self.deployer.cwd,
|
|
90
|
+
show_output=self.deployer.show_output,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
command_obj = self.deployer.spm.get(pid)
|
|
94
|
+
command_obj.sync_wait()
|
|
95
|
+
return command_obj.process.returncode == 0
|
|
96
|
+
|
|
97
|
+
def terminate(self, **kwargs) -> bool:
|
|
98
|
+
"""
|
|
99
|
+
Terminate the running workflow.
|
|
100
|
+
|
|
101
|
+
Parameters
|
|
102
|
+
----------
|
|
103
|
+
authorize : str, optional, default None
|
|
104
|
+
Authorize the termination with a production token.
|
|
105
|
+
|
|
106
|
+
Returns
|
|
107
|
+
-------
|
|
108
|
+
bool
|
|
109
|
+
True if the command was successful, False otherwise.
|
|
110
|
+
"""
|
|
111
|
+
_, run_id = self.pathspec.split("/")
|
|
112
|
+
|
|
113
|
+
# every subclass needs to have `self.deployer_kwargs`
|
|
114
|
+
command = get_lower_level_group(
|
|
115
|
+
self.deployer.api,
|
|
116
|
+
self.deployer.top_level_kwargs,
|
|
117
|
+
self.deployer.TYPE,
|
|
118
|
+
self.deployer.deployer_kwargs,
|
|
119
|
+
).terminate(run_id=run_id, **kwargs)
|
|
120
|
+
|
|
121
|
+
pid = self.deployer.spm.run_command(
|
|
122
|
+
[sys.executable, *command],
|
|
123
|
+
env=self.deployer.env_vars,
|
|
124
|
+
cwd=self.deployer.cwd,
|
|
125
|
+
show_output=self.deployer.show_output,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
command_obj = self.deployer.spm.get(pid)
|
|
129
|
+
command_obj.sync_wait()
|
|
130
|
+
return command_obj.process.returncode == 0
|
|
131
|
+
|
|
132
|
+
def wait_for_completion(
|
|
133
|
+
self, check_interval: int = 5, timeout: Optional[int] = None
|
|
134
|
+
):
|
|
135
|
+
"""
|
|
136
|
+
Wait for the workflow to complete or timeout.
|
|
137
|
+
|
|
138
|
+
Parameters
|
|
139
|
+
----------
|
|
140
|
+
check_interval: int, default: 5
|
|
141
|
+
Frequency of checking for workflow completion, in seconds.
|
|
142
|
+
timeout : int, optional, default None
|
|
143
|
+
Maximum time in seconds to wait for workflow completion.
|
|
144
|
+
If None, waits indefinitely.
|
|
145
|
+
|
|
146
|
+
Raises
|
|
147
|
+
------
|
|
148
|
+
TimeoutError
|
|
149
|
+
If the workflow does not complete within the specified timeout period.
|
|
150
|
+
"""
|
|
151
|
+
start_time = time.time()
|
|
152
|
+
while self.is_running:
|
|
153
|
+
if timeout is not None and (time.time() - start_time) > timeout:
|
|
154
|
+
raise TimeoutError(
|
|
155
|
+
"Workflow did not complete within specified timeout."
|
|
156
|
+
)
|
|
157
|
+
time.sleep(check_interval)
|
|
158
|
+
|
|
159
|
+
@property
|
|
160
|
+
def is_running(self):
|
|
161
|
+
"""
|
|
162
|
+
Check if the workflow is currently running.
|
|
163
|
+
|
|
164
|
+
Returns
|
|
165
|
+
-------
|
|
166
|
+
bool
|
|
167
|
+
True if the workflow status is either 'Pending' or 'Running',
|
|
168
|
+
False otherwise.
|
|
169
|
+
"""
|
|
170
|
+
workflow_status = self.status
|
|
171
|
+
# full list of all states present here:
|
|
172
|
+
# https://github.com/argoproj/argo-workflows/blob/main/pkg/apis/workflow/v1alpha1/workflow_types.go#L54
|
|
173
|
+
# we only consider non-terminal states to determine if the workflow has not finished
|
|
174
|
+
return workflow_status is not None and workflow_status in ["Pending", "Running"]
|
|
175
|
+
|
|
176
|
+
@property
|
|
177
|
+
def status(self) -> Optional[str]:
|
|
178
|
+
"""
|
|
179
|
+
Get the status of the triggered run.
|
|
180
|
+
|
|
181
|
+
Returns
|
|
182
|
+
-------
|
|
183
|
+
str, optional
|
|
184
|
+
The status of the workflow considering the run object, or None if
|
|
185
|
+
the status could not be retrieved.
|
|
186
|
+
"""
|
|
187
|
+
from metaflow.plugins.argo.argo_workflows_cli import (
|
|
188
|
+
get_status_considering_run_object,
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
flow_name, run_id = self.pathspec.split("/")
|
|
192
|
+
name = run_id[5:]
|
|
193
|
+
status = ArgoWorkflows.get_workflow_status(flow_name, name)
|
|
194
|
+
if status is not None:
|
|
195
|
+
return get_status_considering_run_object(status, self.run)
|
|
196
|
+
return None
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
class ArgoWorkflowsDeployedFlow(DeployedFlow):
|
|
200
|
+
"""
|
|
201
|
+
A class representing a deployed Argo Workflow template.
|
|
202
|
+
"""
|
|
203
|
+
|
|
204
|
+
TYPE: ClassVar[Optional[str]] = "argo-workflows"
|
|
205
|
+
|
|
206
|
+
@classmethod
|
|
207
|
+
def list_deployed_flows(cls, flow_name: Optional[str] = None):
|
|
208
|
+
"""
|
|
209
|
+
List all deployed Argo Workflow templates.
|
|
210
|
+
|
|
211
|
+
Parameters
|
|
212
|
+
----------
|
|
213
|
+
flow_name : str, optional, default None
|
|
214
|
+
If specified, only list deployed flows for this specific flow name.
|
|
215
|
+
If None, list all deployed flows.
|
|
216
|
+
|
|
217
|
+
Yields
|
|
218
|
+
------
|
|
219
|
+
ArgoWorkflowsDeployedFlow
|
|
220
|
+
`ArgoWorkflowsDeployedFlow` objects representing deployed
|
|
221
|
+
workflow templates on Argo Workflows.
|
|
222
|
+
"""
|
|
223
|
+
from metaflow.plugins.argo.argo_workflows import ArgoWorkflows
|
|
224
|
+
|
|
225
|
+
# When flow_name is None, use all=True to get all templates
|
|
226
|
+
# When flow_name is specified, use all=False to filter by flow_name
|
|
227
|
+
all_templates = flow_name is None
|
|
228
|
+
for template_name in ArgoWorkflows.list_templates(
|
|
229
|
+
flow_name=flow_name, all=all_templates
|
|
230
|
+
):
|
|
231
|
+
try:
|
|
232
|
+
deployed_flow = cls.from_deployment(template_name)
|
|
233
|
+
yield deployed_flow
|
|
234
|
+
except Exception:
|
|
235
|
+
# Skip templates that can't be converted to DeployedFlow objects
|
|
236
|
+
continue
|
|
237
|
+
|
|
238
|
+
@classmethod
|
|
239
|
+
def from_deployment(cls, identifier: str, metadata: Optional[str] = None):
|
|
240
|
+
"""
|
|
241
|
+
Retrieves a `ArgoWorkflowsDeployedFlow` object from an identifier and optional
|
|
242
|
+
metadata.
|
|
243
|
+
|
|
244
|
+
Parameters
|
|
245
|
+
----------
|
|
246
|
+
identifier : str
|
|
247
|
+
Deployer specific identifier for the workflow to retrieve
|
|
248
|
+
metadata : str, optional, default None
|
|
249
|
+
Optional deployer specific metadata.
|
|
250
|
+
|
|
251
|
+
Returns
|
|
252
|
+
-------
|
|
253
|
+
ArgoWorkflowsDeployedFlow
|
|
254
|
+
A `ArgoWorkflowsDeployedFlow` object representing the
|
|
255
|
+
deployed flow on argo workflows.
|
|
256
|
+
"""
|
|
257
|
+
client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
|
|
258
|
+
workflow_template = client.get_workflow_template(identifier)
|
|
259
|
+
|
|
260
|
+
if workflow_template is None:
|
|
261
|
+
raise MetaflowException("No deployed flow found for: %s" % identifier)
|
|
262
|
+
|
|
263
|
+
metadata_annotations = workflow_template.get("metadata", {}).get(
|
|
264
|
+
"annotations", {}
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
flow_name = metadata_annotations.get("metaflow/flow_name", "")
|
|
268
|
+
username = metadata_annotations.get("metaflow/owner", "")
|
|
269
|
+
parameters = json.loads(metadata_annotations.get("metaflow/parameters", "{}"))
|
|
270
|
+
|
|
271
|
+
# these two only exist if @project decorator is used..
|
|
272
|
+
branch_name = metadata_annotations.get("metaflow/branch_name", None)
|
|
273
|
+
project_name = metadata_annotations.get("metaflow/project_name", None)
|
|
274
|
+
|
|
275
|
+
project_kwargs = {}
|
|
276
|
+
if branch_name is not None:
|
|
277
|
+
if branch_name.startswith("prod."):
|
|
278
|
+
project_kwargs["production"] = True
|
|
279
|
+
project_kwargs["branch"] = branch_name[len("prod.") :]
|
|
280
|
+
elif branch_name.startswith("test."):
|
|
281
|
+
project_kwargs["branch"] = branch_name[len("test.") :]
|
|
282
|
+
elif branch_name == "prod":
|
|
283
|
+
project_kwargs["production"] = True
|
|
284
|
+
|
|
285
|
+
fake_flow_file_contents = generate_fake_flow_file_contents(
|
|
286
|
+
flow_name=flow_name, param_info=parameters, project_name=project_name
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
with tempfile.NamedTemporaryFile(suffix=".py", delete=False) as fake_flow_file:
|
|
290
|
+
with open(fake_flow_file.name, "w") as fp:
|
|
291
|
+
fp.write(fake_flow_file_contents)
|
|
292
|
+
|
|
293
|
+
if branch_name is not None:
|
|
294
|
+
d = Deployer(
|
|
295
|
+
fake_flow_file.name,
|
|
296
|
+
env={"METAFLOW_USER": username},
|
|
297
|
+
**project_kwargs,
|
|
298
|
+
).argo_workflows()
|
|
299
|
+
else:
|
|
300
|
+
d = Deployer(
|
|
301
|
+
fake_flow_file.name, env={"METAFLOW_USER": username}
|
|
302
|
+
).argo_workflows(name=identifier)
|
|
303
|
+
|
|
304
|
+
d.name = identifier
|
|
305
|
+
d.flow_name = flow_name
|
|
306
|
+
if metadata is None:
|
|
307
|
+
d.metadata = get_metadata()
|
|
308
|
+
else:
|
|
309
|
+
d.metadata = metadata
|
|
310
|
+
|
|
311
|
+
return cls(deployer=d)
|
|
312
|
+
|
|
313
|
+
@classmethod
|
|
314
|
+
def get_triggered_run(
|
|
315
|
+
cls, identifier: str, run_id: str, metadata: Optional[str] = None
|
|
316
|
+
):
|
|
317
|
+
"""
|
|
318
|
+
Retrieves a `ArgoWorkflowsTriggeredRun` object from an identifier, a run id and
|
|
319
|
+
optional metadata.
|
|
320
|
+
|
|
321
|
+
Parameters
|
|
322
|
+
----------
|
|
323
|
+
identifier : str
|
|
324
|
+
Deployer specific identifier for the workflow to retrieve
|
|
325
|
+
run_id : str
|
|
326
|
+
Run ID for the which to fetch the triggered run object
|
|
327
|
+
metadata : str, optional, default None
|
|
328
|
+
Optional deployer specific metadata.
|
|
329
|
+
|
|
330
|
+
Returns
|
|
331
|
+
-------
|
|
332
|
+
ArgoWorkflowsTriggeredRun
|
|
333
|
+
A `ArgoWorkflowsTriggeredRun` object representing the
|
|
334
|
+
triggered run on argo workflows.
|
|
335
|
+
"""
|
|
336
|
+
deployed_flow_obj = cls.from_deployment(identifier, metadata)
|
|
337
|
+
return ArgoWorkflowsTriggeredRun(
|
|
338
|
+
deployer=deployed_flow_obj.deployer,
|
|
339
|
+
content=json.dumps(
|
|
340
|
+
{
|
|
341
|
+
"metadata": deployed_flow_obj.deployer.metadata,
|
|
342
|
+
"pathspec": "/".join(
|
|
343
|
+
(deployed_flow_obj.deployer.flow_name, run_id)
|
|
344
|
+
),
|
|
345
|
+
"name": run_id,
|
|
346
|
+
}
|
|
347
|
+
),
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
@property
|
|
351
|
+
def production_token(self) -> Optional[str]:
|
|
352
|
+
"""
|
|
353
|
+
Get the production token for the deployed flow.
|
|
354
|
+
|
|
355
|
+
Returns
|
|
356
|
+
-------
|
|
357
|
+
str, optional
|
|
358
|
+
The production token, None if it cannot be retrieved.
|
|
359
|
+
"""
|
|
360
|
+
try:
|
|
361
|
+
_, production_token = ArgoWorkflows.get_existing_deployment(
|
|
362
|
+
self.deployer.name
|
|
363
|
+
)
|
|
364
|
+
return production_token
|
|
365
|
+
except TypeError:
|
|
366
|
+
return None
|
|
367
|
+
|
|
368
|
+
def delete(self, **kwargs) -> bool:
|
|
369
|
+
"""
|
|
370
|
+
Delete the deployed workflow template.
|
|
371
|
+
|
|
372
|
+
Parameters
|
|
373
|
+
----------
|
|
374
|
+
authorize : str, optional, default None
|
|
375
|
+
Authorize the deletion with a production token.
|
|
376
|
+
|
|
377
|
+
Returns
|
|
378
|
+
-------
|
|
379
|
+
bool
|
|
380
|
+
True if the command was successful, False otherwise.
|
|
381
|
+
"""
|
|
382
|
+
command = get_lower_level_group(
|
|
383
|
+
self.deployer.api,
|
|
384
|
+
self.deployer.top_level_kwargs,
|
|
385
|
+
self.deployer.TYPE,
|
|
386
|
+
self.deployer.deployer_kwargs,
|
|
387
|
+
).delete(**kwargs)
|
|
388
|
+
|
|
389
|
+
pid = self.deployer.spm.run_command(
|
|
390
|
+
[sys.executable, *command],
|
|
391
|
+
env=self.deployer.env_vars,
|
|
392
|
+
cwd=self.deployer.cwd,
|
|
393
|
+
show_output=self.deployer.show_output,
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
command_obj = self.deployer.spm.get(pid)
|
|
397
|
+
command_obj.sync_wait()
|
|
398
|
+
return command_obj.process.returncode == 0
|
|
399
|
+
|
|
400
|
+
def trigger(self, **kwargs) -> ArgoWorkflowsTriggeredRun:
|
|
401
|
+
"""
|
|
402
|
+
Trigger a new run for the deployed flow.
|
|
403
|
+
|
|
404
|
+
Parameters
|
|
405
|
+
----------
|
|
406
|
+
**kwargs : Any
|
|
407
|
+
Additional arguments to pass to the trigger command,
|
|
408
|
+
`Parameters` in particular.
|
|
409
|
+
|
|
410
|
+
Returns
|
|
411
|
+
-------
|
|
412
|
+
ArgoWorkflowsTriggeredRun
|
|
413
|
+
The triggered run instance.
|
|
414
|
+
|
|
415
|
+
Raises
|
|
416
|
+
------
|
|
417
|
+
Exception
|
|
418
|
+
If there is an error during the trigger process.
|
|
419
|
+
"""
|
|
420
|
+
with temporary_fifo() as (attribute_file_path, attribute_file_fd):
|
|
421
|
+
# every subclass needs to have `self.deployer_kwargs`
|
|
422
|
+
command = get_lower_level_group(
|
|
423
|
+
self.deployer.api,
|
|
424
|
+
self.deployer.top_level_kwargs,
|
|
425
|
+
self.deployer.TYPE,
|
|
426
|
+
self.deployer.deployer_kwargs,
|
|
427
|
+
).trigger(deployer_attribute_file=attribute_file_path, **kwargs)
|
|
428
|
+
|
|
429
|
+
pid = self.deployer.spm.run_command(
|
|
430
|
+
[sys.executable, *command],
|
|
431
|
+
env=self.deployer.env_vars,
|
|
432
|
+
cwd=self.deployer.cwd,
|
|
433
|
+
show_output=self.deployer.show_output,
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
command_obj = self.deployer.spm.get(pid)
|
|
437
|
+
content = handle_timeout(
|
|
438
|
+
attribute_file_fd, command_obj, self.deployer.file_read_timeout
|
|
439
|
+
)
|
|
440
|
+
command_obj.sync_wait()
|
|
441
|
+
if command_obj.process.returncode == 0:
|
|
442
|
+
return ArgoWorkflowsTriggeredRun(
|
|
443
|
+
deployer=self.deployer, content=content
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
raise Exception(
|
|
447
|
+
"Error triggering %s on %s for %s"
|
|
448
|
+
% (
|
|
449
|
+
self.deployer.name,
|
|
450
|
+
self.deployer.TYPE,
|
|
451
|
+
self.deployer.flow_file,
|
|
452
|
+
)
|
|
453
|
+
)
|