ob-metaflow 2.16.8.2rc1__py2.py3-none-any.whl → 2.17.0.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow might be problematic. Click here for more details.
- metaflow/_vendor/click/core.py +3 -4
- metaflow/_vendor/imghdr/__init__.py +7 -1
- metaflow/_vendor/yaml/__init__.py +427 -0
- metaflow/_vendor/yaml/composer.py +139 -0
- metaflow/_vendor/yaml/constructor.py +748 -0
- metaflow/_vendor/yaml/cyaml.py +101 -0
- metaflow/_vendor/yaml/dumper.py +62 -0
- metaflow/_vendor/yaml/emitter.py +1137 -0
- metaflow/_vendor/yaml/error.py +75 -0
- metaflow/_vendor/yaml/events.py +86 -0
- metaflow/_vendor/yaml/loader.py +63 -0
- metaflow/_vendor/yaml/nodes.py +49 -0
- metaflow/_vendor/yaml/parser.py +589 -0
- metaflow/_vendor/yaml/reader.py +185 -0
- metaflow/_vendor/yaml/representer.py +389 -0
- metaflow/_vendor/yaml/resolver.py +227 -0
- metaflow/_vendor/yaml/scanner.py +1435 -0
- metaflow/_vendor/yaml/serializer.py +111 -0
- metaflow/_vendor/yaml/tokens.py +104 -0
- metaflow/cli.py +11 -2
- metaflow/cli_components/run_cmds.py +0 -15
- metaflow/client/core.py +6 -1
- metaflow/extension_support/__init__.py +4 -3
- metaflow/flowspec.py +1 -113
- metaflow/graph.py +10 -134
- metaflow/lint.py +3 -70
- metaflow/metaflow_environment.py +14 -6
- metaflow/package/__init__.py +18 -9
- metaflow/packaging_sys/__init__.py +53 -43
- metaflow/packaging_sys/backend.py +21 -6
- metaflow/packaging_sys/tar_backend.py +16 -3
- metaflow/packaging_sys/v1.py +21 -21
- metaflow/plugins/argo/argo_client.py +31 -14
- metaflow/plugins/argo/argo_workflows.py +67 -22
- metaflow/plugins/argo/argo_workflows_cli.py +348 -85
- metaflow/plugins/argo/argo_workflows_deployer_objects.py +69 -0
- metaflow/plugins/aws/step_functions/step_functions.py +0 -6
- metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +30 -0
- metaflow/plugins/cards/card_modules/basic.py +3 -14
- metaflow/plugins/cards/card_modules/convert_to_native_type.py +7 -1
- metaflow/plugins/kubernetes/kubernetes_decorator.py +1 -1
- metaflow/plugins/kubernetes/kubernetes_job.py +8 -2
- metaflow/plugins/kubernetes/kubernetes_jobsets.py +26 -28
- metaflow/plugins/pypi/conda_decorator.py +4 -2
- metaflow/runner/click_api.py +14 -7
- metaflow/runner/deployer.py +160 -7
- metaflow/runner/subprocess_manager.py +20 -12
- metaflow/runtime.py +27 -102
- metaflow/task.py +25 -46
- metaflow/user_decorators/mutable_flow.py +3 -1
- metaflow/util.py +0 -29
- metaflow/vendor.py +23 -6
- metaflow/version.py +1 -1
- {ob_metaflow-2.16.8.2rc1.dist-info → ob_metaflow-2.17.0.1.dist-info}/METADATA +2 -2
- {ob_metaflow-2.16.8.2rc1.dist-info → ob_metaflow-2.17.0.1.dist-info}/RECORD +62 -45
- {ob_metaflow-2.16.8.2rc1.data → ob_metaflow-2.17.0.1.data}/data/share/metaflow/devtools/Makefile +0 -0
- {ob_metaflow-2.16.8.2rc1.data → ob_metaflow-2.17.0.1.data}/data/share/metaflow/devtools/Tiltfile +0 -0
- {ob_metaflow-2.16.8.2rc1.data → ob_metaflow-2.17.0.1.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
- {ob_metaflow-2.16.8.2rc1.dist-info → ob_metaflow-2.17.0.1.dist-info}/WHEEL +0 -0
- {ob_metaflow-2.16.8.2rc1.dist-info → ob_metaflow-2.17.0.1.dist-info}/entry_points.txt +0 -0
- {ob_metaflow-2.16.8.2rc1.dist-info → ob_metaflow-2.17.0.1.dist-info}/licenses/LICENSE +0 -0
- {ob_metaflow-2.16.8.2rc1.dist-info → ob_metaflow-2.17.0.1.dist-info}/top_level.txt +0 -0
|
@@ -56,6 +56,20 @@ class StepFunctionsDeployedFlow(DeployedFlow):
|
|
|
56
56
|
|
|
57
57
|
TYPE: ClassVar[Optional[str]] = "step-functions"
|
|
58
58
|
|
|
59
|
+
@classmethod
|
|
60
|
+
def list_deployed_flows(cls, flow_name: Optional[str] = None):
|
|
61
|
+
"""
|
|
62
|
+
This method is not currently implemented for Step Functions.
|
|
63
|
+
|
|
64
|
+
Raises
|
|
65
|
+
------
|
|
66
|
+
NotImplementedError
|
|
67
|
+
This method is not implemented for Step Functions.
|
|
68
|
+
"""
|
|
69
|
+
raise NotImplementedError(
|
|
70
|
+
"list_deployed_flows is not implemented for StepFunctions"
|
|
71
|
+
)
|
|
72
|
+
|
|
59
73
|
@classmethod
|
|
60
74
|
def from_deployment(cls, identifier: str, metadata: Optional[str] = None):
|
|
61
75
|
"""
|
|
@@ -70,6 +84,22 @@ class StepFunctionsDeployedFlow(DeployedFlow):
|
|
|
70
84
|
"from_deployment is not implemented for StepFunctions"
|
|
71
85
|
)
|
|
72
86
|
|
|
87
|
+
@classmethod
|
|
88
|
+
def get_triggered_run(
|
|
89
|
+
cls, identifier: str, run_id: str, metadata: Optional[str] = None
|
|
90
|
+
):
|
|
91
|
+
"""
|
|
92
|
+
This method is not currently implemented for Step Functions.
|
|
93
|
+
|
|
94
|
+
Raises
|
|
95
|
+
------
|
|
96
|
+
NotImplementedError
|
|
97
|
+
This method is not implemented for Step Functions.
|
|
98
|
+
"""
|
|
99
|
+
raise NotImplementedError(
|
|
100
|
+
"get_triggered_run is not implemented for StepFunctions"
|
|
101
|
+
)
|
|
102
|
+
|
|
73
103
|
@property
|
|
74
104
|
def production_token(self: DeployedFlow) -> Optional[str]:
|
|
75
105
|
"""
|
|
@@ -20,15 +20,12 @@ def transform_flow_graph(step_info):
|
|
|
20
20
|
return "split"
|
|
21
21
|
elif node_type == "split-parallel" or node_type == "split-foreach":
|
|
22
22
|
return "foreach"
|
|
23
|
-
elif node_type == "split-switch":
|
|
24
|
-
return "switch"
|
|
25
23
|
return "unknown" # Should never happen
|
|
26
24
|
|
|
27
25
|
graph_dict = {}
|
|
28
26
|
for stepname in step_info:
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
"type": node_type,
|
|
27
|
+
graph_dict[stepname] = {
|
|
28
|
+
"type": node_to_type(step_info[stepname]["type"]),
|
|
32
29
|
"box_next": step_info[stepname]["type"] not in ("linear", "join"),
|
|
33
30
|
"box_ends": (
|
|
34
31
|
None
|
|
@@ -38,15 +35,6 @@ def transform_flow_graph(step_info):
|
|
|
38
35
|
"next": step_info[stepname]["next"],
|
|
39
36
|
"doc": step_info[stepname]["doc"],
|
|
40
37
|
}
|
|
41
|
-
|
|
42
|
-
if node_type == "switch":
|
|
43
|
-
if "condition" in step_info[stepname]:
|
|
44
|
-
node_info["condition"] = step_info[stepname]["condition"]
|
|
45
|
-
if "switch_cases" in step_info[stepname]:
|
|
46
|
-
node_info["switch_cases"] = step_info[stepname]["switch_cases"]
|
|
47
|
-
|
|
48
|
-
graph_dict[stepname] = node_info
|
|
49
|
-
|
|
50
38
|
return graph_dict
|
|
51
39
|
|
|
52
40
|
|
|
@@ -430,6 +418,7 @@ class TaskInfoComponent(MetaflowCardComponent):
|
|
|
430
418
|
"Task Finished On": task_data_dict["finished_at"],
|
|
431
419
|
# Remove Microseconds from timedelta
|
|
432
420
|
"Tags": ", ".join(tags),
|
|
421
|
+
"Attempt": self._task.current_attempt,
|
|
433
422
|
}
|
|
434
423
|
if not self.runtime:
|
|
435
424
|
task_metadata_dict["Task Duration"] = str(
|
|
@@ -146,7 +146,13 @@ class TaskToDict:
|
|
|
146
146
|
# Python 3.13 removes the standard ``imghdr`` module. Metaflow
|
|
147
147
|
# vendors a copy so we can keep using ``what`` to detect image
|
|
148
148
|
# formats irrespective of the Python version.
|
|
149
|
-
|
|
149
|
+
import warnings
|
|
150
|
+
|
|
151
|
+
with warnings.catch_warnings():
|
|
152
|
+
warnings.filterwarnings(
|
|
153
|
+
"ignore", category=DeprecationWarning, module="imghdr"
|
|
154
|
+
)
|
|
155
|
+
from metaflow._vendor import imghdr
|
|
150
156
|
|
|
151
157
|
resp = imghdr.what(None, h=data_object)
|
|
152
158
|
# Only accept types supported on the web
|
|
@@ -98,7 +98,7 @@ class KubernetesDecorator(StepDecorator):
|
|
|
98
98
|
the scheduled node should not have GPUs.
|
|
99
99
|
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
|
100
100
|
The vendor of the GPUs to be used for this step.
|
|
101
|
-
tolerations : List[str], default []
|
|
101
|
+
tolerations : List[Dict[str,str]], default []
|
|
102
102
|
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
|
103
103
|
Kubernetes tolerations to use when launching pod in Kubernetes.
|
|
104
104
|
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
|
@@ -522,12 +522,10 @@ class RunningJob(object):
|
|
|
522
522
|
# 3. If the pod object hasn't shown up yet, we set the parallelism to 0
|
|
523
523
|
# to preempt it.
|
|
524
524
|
client = self._client.get()
|
|
525
|
-
|
|
526
525
|
if not self.is_done:
|
|
527
526
|
if self.is_running:
|
|
528
527
|
# Case 1.
|
|
529
528
|
from kubernetes.stream import stream
|
|
530
|
-
|
|
531
529
|
api_instance = client.CoreV1Api
|
|
532
530
|
try:
|
|
533
531
|
# TODO: stream opens a web-socket connection. It may
|
|
@@ -593,6 +591,10 @@ class RunningJob(object):
|
|
|
593
591
|
return self.id
|
|
594
592
|
return "job %s" % self._name
|
|
595
593
|
|
|
594
|
+
@property
|
|
595
|
+
def is_unschedulable(self):
|
|
596
|
+
return self._job["metadata"]["annotations"].get("metaflow/job_status", "") == "Unsatisfiable_Resource_Request"
|
|
597
|
+
|
|
596
598
|
@property
|
|
597
599
|
def is_done(self):
|
|
598
600
|
# Check if the container is done. As a side effect, also refreshes self._job and
|
|
@@ -606,6 +608,7 @@ class RunningJob(object):
|
|
|
606
608
|
or bool(self._job["status"].get("failed"))
|
|
607
609
|
or self._are_pod_containers_done
|
|
608
610
|
or (self._job["spec"]["parallelism"] == 0)
|
|
611
|
+
or self.is_unschedulable
|
|
609
612
|
)
|
|
610
613
|
|
|
611
614
|
if not done():
|
|
@@ -663,6 +666,7 @@ class RunningJob(object):
|
|
|
663
666
|
bool(self._job["status"].get("failed"))
|
|
664
667
|
or self._has_any_container_failed
|
|
665
668
|
or (self._job["spec"]["parallelism"] == 0)
|
|
669
|
+
or self.is_unschedulable
|
|
666
670
|
)
|
|
667
671
|
return retval
|
|
668
672
|
|
|
@@ -760,6 +764,8 @@ class RunningJob(object):
|
|
|
760
764
|
return 0, None
|
|
761
765
|
# Best effort since Pod object can disappear on us at anytime
|
|
762
766
|
else:
|
|
767
|
+
if self.is_unschedulable:
|
|
768
|
+
return 1, self._job["metadata"]["annotations"].get("metaflow/job_status_reason", "")
|
|
763
769
|
if self._pod.get("status", {}).get("phase") not in (
|
|
764
770
|
"Succeeded",
|
|
765
771
|
"Failed",
|
|
@@ -6,6 +6,7 @@ from collections import namedtuple
|
|
|
6
6
|
from metaflow.exception import MetaflowException
|
|
7
7
|
from metaflow.metaflow_config import KUBERNETES_JOBSET_GROUP, KUBERNETES_JOBSET_VERSION
|
|
8
8
|
from metaflow.tracing import inject_tracing_vars
|
|
9
|
+
from metaflow._vendor import yaml
|
|
9
10
|
|
|
10
11
|
from .kube_utils import qos_requests_and_limits
|
|
11
12
|
|
|
@@ -1025,34 +1026,32 @@ class KubernetesArgoJobSet(object):
|
|
|
1025
1026
|
|
|
1026
1027
|
def dump(self):
|
|
1027
1028
|
client = self._kubernetes_sdk
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
),
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
),
|
|
1052
|
-
status=None,
|
|
1053
|
-
)
|
|
1029
|
+
js_dict = client.ApiClient().sanitize_for_serialization(
|
|
1030
|
+
dict(
|
|
1031
|
+
apiVersion=self._group + "/" + self._version,
|
|
1032
|
+
kind="JobSet",
|
|
1033
|
+
metadata=client.api_client.ApiClient().sanitize_for_serialization(
|
|
1034
|
+
client.V1ObjectMeta(
|
|
1035
|
+
name=self.name,
|
|
1036
|
+
labels=self._labels,
|
|
1037
|
+
annotations=self._annotations,
|
|
1038
|
+
)
|
|
1039
|
+
),
|
|
1040
|
+
spec=dict(
|
|
1041
|
+
replicatedJobs=[self.control.dump(), self.worker.dump()],
|
|
1042
|
+
suspend=False,
|
|
1043
|
+
startupPolicy=None,
|
|
1044
|
+
successPolicy=None,
|
|
1045
|
+
# The Failure Policy helps setting the number of retries for the jobset.
|
|
1046
|
+
# but we don't rely on it and instead rely on either the local scheduler
|
|
1047
|
+
# or the Argo Workflows to handle retries.
|
|
1048
|
+
failurePolicy=None,
|
|
1049
|
+
network=None,
|
|
1050
|
+
),
|
|
1051
|
+
status=None,
|
|
1054
1052
|
)
|
|
1055
1053
|
)
|
|
1054
|
+
data = yaml.dump(js_dict, default_flow_style=False, indent=2)
|
|
1056
1055
|
# The values we populate in the Jobset manifest (for Argo Workflows) piggybacks on the Argo Workflow's templating engine.
|
|
1057
1056
|
# Even though Argo Workflows's templating helps us constructing all the necessary IDs and populating the fields
|
|
1058
1057
|
# required by Metaflow, we run into one glitch. When we construct JSON/YAML serializable objects,
|
|
@@ -1067,7 +1066,6 @@ class KubernetesArgoJobSet(object):
|
|
|
1067
1066
|
# Since the value of `num_parallel` can be dynamic and can change from run to run, we need to ensure that the
|
|
1068
1067
|
# value can be passed-down dynamically and is **explicitly set as a integer** in the Jobset Manifest submitted as a
|
|
1069
1068
|
# part of the Argo Workflow
|
|
1070
|
-
|
|
1071
|
-
quoted_substring = '"{{=asInt(inputs.parameters.workerCount)}}"'
|
|
1069
|
+
quoted_substring = "'{{=asInt(inputs.parameters.workerCount)}}'"
|
|
1072
1070
|
unquoted_substring = "{{=asInt(inputs.parameters.workerCount)}}"
|
|
1073
1071
|
return data.replace(quoted_substring, unquoted_substring)
|
|
@@ -243,9 +243,11 @@ class CondaStepDecorator(StepDecorator):
|
|
|
243
243
|
# Ensure local installation of Metaflow is visible to user code
|
|
244
244
|
python_path = self.__class__._metaflow_home.name
|
|
245
245
|
addl_env_vars = {}
|
|
246
|
-
if self.__class__._addl_env_vars
|
|
246
|
+
if self.__class__._addl_env_vars:
|
|
247
247
|
for key, value in self.__class__._addl_env_vars.items():
|
|
248
|
-
if key
|
|
248
|
+
if key.endswith(":"):
|
|
249
|
+
addl_env_vars[key[:-1]] = value
|
|
250
|
+
elif key == "PYTHONPATH":
|
|
249
251
|
addl_env_vars[key] = os.pathsep.join([value, python_path])
|
|
250
252
|
else:
|
|
251
253
|
addl_env_vars[key] = value
|
metaflow/runner/click_api.py
CHANGED
|
@@ -43,6 +43,7 @@ from metaflow._vendor.click.types import (
|
|
|
43
43
|
)
|
|
44
44
|
from metaflow.decorators import add_decorator_options
|
|
45
45
|
from metaflow.exception import MetaflowException
|
|
46
|
+
from metaflow.flowspec import _FlowState
|
|
46
47
|
from metaflow.includefile import FilePathClass
|
|
47
48
|
from metaflow.metaflow_config import CLICK_API_PROCESS_CONFIG
|
|
48
49
|
from metaflow.parameters import JSONTypeClass, flow_context
|
|
@@ -171,7 +172,6 @@ def _lazy_load_command(
|
|
|
171
172
|
_self,
|
|
172
173
|
name: str,
|
|
173
174
|
):
|
|
174
|
-
|
|
175
175
|
# Context is not used in get_command so we can pass None. Since we pin click,
|
|
176
176
|
# this won't change from under us.
|
|
177
177
|
|
|
@@ -516,6 +516,11 @@ class MetaflowAPI(object):
|
|
|
516
516
|
# Note that if CLICK_API_PROCESS_CONFIG is False, we still do this because
|
|
517
517
|
# it will init all parameters (config_options will be None)
|
|
518
518
|
# We ignore any errors if we don't check the configs in the click API.
|
|
519
|
+
|
|
520
|
+
# Init all values in the flow mutators and then process them
|
|
521
|
+
for decorator in self._flow_cls._flow_state.get(_FlowState.FLOW_MUTATORS, []):
|
|
522
|
+
decorator.external_init()
|
|
523
|
+
|
|
519
524
|
new_cls = self._flow_cls._process_config_decorators(
|
|
520
525
|
config_options, process_configs=CLICK_API_PROCESS_CONFIG
|
|
521
526
|
)
|
|
@@ -541,14 +546,16 @@ def extract_all_params(cmd_obj: Union[click.Command, click.Group]):
|
|
|
541
546
|
|
|
542
547
|
for each_param in cmd_obj.params:
|
|
543
548
|
if isinstance(each_param, click.Argument):
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
549
|
+
(
|
|
550
|
+
arg_params_sigs[each_param.name],
|
|
551
|
+
annotations[each_param.name],
|
|
552
|
+
) = get_inspect_param_obj(each_param, inspect.Parameter.POSITIONAL_ONLY)
|
|
547
553
|
arg_parameters[each_param.name] = each_param
|
|
548
554
|
elif isinstance(each_param, click.Option):
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
555
|
+
(
|
|
556
|
+
opt_params_sigs[each_param.name],
|
|
557
|
+
annotations[each_param.name],
|
|
558
|
+
) = get_inspect_param_obj(each_param, inspect.Parameter.KEYWORD_ONLY)
|
|
552
559
|
opt_parameters[each_param.name] = each_param
|
|
553
560
|
|
|
554
561
|
defaults[each_param.name] = each_param.default
|
metaflow/runner/deployer.py
CHANGED
|
@@ -13,7 +13,9 @@ def generate_fake_flow_file_contents(
|
|
|
13
13
|
):
|
|
14
14
|
params_code = ""
|
|
15
15
|
for _, param_details in param_info.items():
|
|
16
|
-
param_python_var_name = param_details
|
|
16
|
+
param_python_var_name = param_details.get(
|
|
17
|
+
"python_var_name", param_details["name"]
|
|
18
|
+
)
|
|
17
19
|
param_name = param_details["name"]
|
|
18
20
|
param_type = param_details["type"]
|
|
19
21
|
param_help = param_details["description"]
|
|
@@ -229,7 +231,68 @@ class DeployedFlowMeta(type):
|
|
|
229
231
|
}
|
|
230
232
|
)
|
|
231
233
|
|
|
232
|
-
def
|
|
234
|
+
def _get_triggered_run_injected_method():
|
|
235
|
+
def f(
|
|
236
|
+
cls,
|
|
237
|
+
identifier: str,
|
|
238
|
+
run_id: str,
|
|
239
|
+
metadata: Optional[str] = None,
|
|
240
|
+
impl: str = DEFAULT_FROM_DEPLOYMENT_IMPL.replace("-", "_"),
|
|
241
|
+
) -> "TriggeredRun":
|
|
242
|
+
"""
|
|
243
|
+
Retrieves a `TriggeredRun` object from an identifier, a run id and optional
|
|
244
|
+
metadata. The `impl` parameter specifies the deployer implementation
|
|
245
|
+
to use (like `argo-workflows`).
|
|
246
|
+
|
|
247
|
+
Parameters
|
|
248
|
+
----------
|
|
249
|
+
identifier : str
|
|
250
|
+
Deployer specific identifier for the workflow to retrieve
|
|
251
|
+
run_id : str
|
|
252
|
+
Run ID for the which to fetch the triggered run object
|
|
253
|
+
metadata : str, optional, default None
|
|
254
|
+
Optional deployer specific metadata.
|
|
255
|
+
impl : str, optional, default given by METAFLOW_DEFAULT_FROM_DEPLOYMENT_IMPL
|
|
256
|
+
The default implementation to use if not specified
|
|
257
|
+
|
|
258
|
+
Returns
|
|
259
|
+
-------
|
|
260
|
+
TriggeredRun
|
|
261
|
+
A `TriggeredRun` object representing the triggered run corresponding
|
|
262
|
+
to the identifier and the run id.
|
|
263
|
+
"""
|
|
264
|
+
if impl in allowed_providers:
|
|
265
|
+
return (
|
|
266
|
+
allowed_providers[impl]
|
|
267
|
+
.deployed_flow_type()
|
|
268
|
+
.get_triggered_run(identifier, run_id, metadata)
|
|
269
|
+
)
|
|
270
|
+
else:
|
|
271
|
+
raise ValueError(
|
|
272
|
+
f"No deployer '{impl}' exists; valid deployers are: "
|
|
273
|
+
f"{list(allowed_providers.keys())}"
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
f.__name__ = "get_triggered_run"
|
|
277
|
+
return f
|
|
278
|
+
|
|
279
|
+
def _per_type_get_triggered_run_injected_method(method_name, impl):
|
|
280
|
+
def f(
|
|
281
|
+
cls,
|
|
282
|
+
identifier: str,
|
|
283
|
+
run_id: str,
|
|
284
|
+
metadata: Optional[str] = None,
|
|
285
|
+
):
|
|
286
|
+
return (
|
|
287
|
+
allowed_providers[impl]
|
|
288
|
+
.deployed_flow_type()
|
|
289
|
+
.get_triggered_run(identifier, run_id, metadata)
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
f.__name__ = method_name
|
|
293
|
+
return f
|
|
294
|
+
|
|
295
|
+
def _from_deployment_injected_method():
|
|
233
296
|
def f(
|
|
234
297
|
cls,
|
|
235
298
|
identifier: str,
|
|
@@ -271,7 +334,7 @@ class DeployedFlowMeta(type):
|
|
|
271
334
|
f.__name__ = "from_deployment"
|
|
272
335
|
return f
|
|
273
336
|
|
|
274
|
-
def
|
|
337
|
+
def _per_type_from_deployment_injected_method(method_name, impl):
|
|
275
338
|
def f(
|
|
276
339
|
cls,
|
|
277
340
|
identifier: str,
|
|
@@ -286,14 +349,104 @@ class DeployedFlowMeta(type):
|
|
|
286
349
|
f.__name__ = method_name
|
|
287
350
|
return f
|
|
288
351
|
|
|
289
|
-
|
|
352
|
+
def _list_deployed_flows_injected_method():
|
|
353
|
+
def f(
|
|
354
|
+
cls,
|
|
355
|
+
flow_name: Optional[str] = None,
|
|
356
|
+
impl: str = DEFAULT_FROM_DEPLOYMENT_IMPL.replace("-", "_"),
|
|
357
|
+
):
|
|
358
|
+
"""
|
|
359
|
+
List all deployed flows for the specified implementation.
|
|
360
|
+
|
|
361
|
+
Parameters
|
|
362
|
+
----------
|
|
363
|
+
flow_name : str, optional, default None
|
|
364
|
+
If specified, only list deployed flows for this specific flow name.
|
|
365
|
+
If None, list all deployed flows.
|
|
366
|
+
impl : str, optional, default given by METAFLOW_DEFAULT_FROM_DEPLOYMENT_IMPL
|
|
367
|
+
The default implementation to use if not specified
|
|
368
|
+
|
|
369
|
+
Yields
|
|
370
|
+
------
|
|
371
|
+
DeployedFlow
|
|
372
|
+
`DeployedFlow` objects representing deployed flows.
|
|
373
|
+
"""
|
|
374
|
+
if impl in allowed_providers:
|
|
375
|
+
return (
|
|
376
|
+
allowed_providers[impl]
|
|
377
|
+
.deployed_flow_type()
|
|
378
|
+
.list_deployed_flows(flow_name)
|
|
379
|
+
)
|
|
380
|
+
else:
|
|
381
|
+
raise ValueError(
|
|
382
|
+
f"No deployer '{impl}' exists; valid deployers are: "
|
|
383
|
+
f"{list(allowed_providers.keys())}"
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
f.__name__ = "list_deployed_flows"
|
|
387
|
+
return f
|
|
388
|
+
|
|
389
|
+
def _per_type_list_deployed_flows_injected_method(method_name, impl):
|
|
390
|
+
def f(
|
|
391
|
+
cls,
|
|
392
|
+
flow_name: Optional[str] = None,
|
|
393
|
+
):
|
|
394
|
+
return (
|
|
395
|
+
allowed_providers[impl]
|
|
396
|
+
.deployed_flow_type()
|
|
397
|
+
.list_deployed_flows(flow_name)
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
f.__name__ = method_name
|
|
401
|
+
return f
|
|
402
|
+
|
|
403
|
+
setattr(
|
|
404
|
+
cls, "from_deployment", classmethod(_from_deployment_injected_method())
|
|
405
|
+
)
|
|
406
|
+
setattr(
|
|
407
|
+
cls,
|
|
408
|
+
"list_deployed_flows",
|
|
409
|
+
classmethod(_list_deployed_flows_injected_method()),
|
|
410
|
+
)
|
|
411
|
+
setattr(
|
|
412
|
+
cls,
|
|
413
|
+
"get_triggered_run",
|
|
414
|
+
classmethod(_get_triggered_run_injected_method()),
|
|
415
|
+
)
|
|
290
416
|
|
|
291
417
|
for impl in allowed_providers:
|
|
292
|
-
|
|
418
|
+
from_deployment_method_name = f"from_{impl}"
|
|
419
|
+
list_deployed_flows_method_name = f"list_{impl}"
|
|
420
|
+
get_triggered_run_method_name = f"get_triggered_{impl}_run"
|
|
421
|
+
|
|
422
|
+
setattr(
|
|
423
|
+
cls,
|
|
424
|
+
from_deployment_method_name,
|
|
425
|
+
classmethod(
|
|
426
|
+
_per_type_from_deployment_injected_method(
|
|
427
|
+
from_deployment_method_name, impl
|
|
428
|
+
)
|
|
429
|
+
),
|
|
430
|
+
)
|
|
431
|
+
|
|
293
432
|
setattr(
|
|
294
433
|
cls,
|
|
295
|
-
|
|
296
|
-
classmethod(
|
|
434
|
+
list_deployed_flows_method_name,
|
|
435
|
+
classmethod(
|
|
436
|
+
_per_type_list_deployed_flows_injected_method(
|
|
437
|
+
list_deployed_flows_method_name, impl
|
|
438
|
+
)
|
|
439
|
+
),
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
setattr(
|
|
443
|
+
cls,
|
|
444
|
+
get_triggered_run_method_name,
|
|
445
|
+
classmethod(
|
|
446
|
+
_per_type_get_triggered_run_injected_method(
|
|
447
|
+
get_triggered_run_method_name, impl
|
|
448
|
+
)
|
|
449
|
+
),
|
|
297
450
|
)
|
|
298
451
|
|
|
299
452
|
return cls
|
|
@@ -152,12 +152,20 @@ class SubprocessManager(object):
|
|
|
152
152
|
int
|
|
153
153
|
The process ID of the subprocess.
|
|
154
154
|
"""
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
155
|
+
env = env or {}
|
|
156
|
+
installed_root = os.environ.get("METAFLOW_EXTRACTED_ROOT", get_metaflow_root())
|
|
157
|
+
|
|
158
|
+
for k, v in MetaflowCodeContent.get_env_vars_for_packaged_metaflow(
|
|
159
|
+
installed_root
|
|
160
|
+
).items():
|
|
161
|
+
if k.endswith(":"):
|
|
162
|
+
# Override
|
|
163
|
+
env[k[:-1]] = v
|
|
164
|
+
elif k in env:
|
|
165
|
+
env[k] = "%s:%s" % (v, env[k])
|
|
166
|
+
else:
|
|
167
|
+
env[k] = v
|
|
168
|
+
|
|
161
169
|
command_obj = CommandManager(command, env, cwd)
|
|
162
170
|
pid = command_obj.run(show_output=show_output)
|
|
163
171
|
self.commands[pid] = command_obj
|
|
@@ -188,12 +196,12 @@ class SubprocessManager(object):
|
|
|
188
196
|
int
|
|
189
197
|
The process ID of the subprocess.
|
|
190
198
|
"""
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
env =
|
|
196
|
-
|
|
199
|
+
env = env or {}
|
|
200
|
+
if "PYTHONPATH" in env:
|
|
201
|
+
env["PYTHONPATH"] = "%s:%s" % (get_metaflow_root(), env["PYTHONPATH"])
|
|
202
|
+
else:
|
|
203
|
+
env["PYTHONPATH"] = get_metaflow_root()
|
|
204
|
+
|
|
197
205
|
command_obj = CommandManager(command, env, cwd)
|
|
198
206
|
pid = await command_obj.async_run()
|
|
199
207
|
self.commands[pid] = command_obj
|