ob-metaflow 2.15.14.1__py2.py3-none-any.whl → 2.15.17.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow might be problematic. Click here for more details.
- metaflow/__init__.py +2 -2
- metaflow/_vendor/click/core.py +4 -3
- metaflow/cli.py +2 -2
- metaflow/cli_components/run_cmds.py +1 -1
- metaflow/cmd/develop/stub_generator.py +30 -16
- metaflow/cmd/develop/stubs.py +9 -27
- metaflow/datastore/task_datastore.py +3 -3
- metaflow/decorators.py +3 -3
- metaflow/extension_support/__init__.py +25 -42
- metaflow/flowspec.py +16 -4
- metaflow/metaflow_config.py +2 -0
- metaflow/parameters.py +2 -2
- metaflow/plugins/argo/argo_workflows.py +7 -0
- metaflow/plugins/argo/argo_workflows_cli.py +4 -4
- metaflow/plugins/argo/argo_workflows_deployer_objects.py +6 -49
- metaflow/plugins/aws/aws_client.py +6 -0
- metaflow/plugins/cards/card_modules/chevron/renderer.py +1 -1
- metaflow/plugins/cards/card_modules/test_cards.py +6 -6
- metaflow/plugins/cards/component_serializer.py +1 -8
- metaflow/plugins/datatools/s3/s3op.py +1 -1
- metaflow/plugins/kubernetes/kubernetes.py +4 -0
- metaflow/plugins/kubernetes/kubernetes_cli.py +8 -0
- metaflow/plugins/kubernetes/kubernetes_decorator.py +11 -0
- metaflow/plugins/kubernetes/kubernetes_job.py +4 -2
- metaflow/plugins/kubernetes/kubernetes_jobsets.py +5 -2
- metaflow/plugins/metadata_providers/service.py +12 -8
- metaflow/plugins/package_cli.py +12 -2
- metaflow/plugins/pypi/bootstrap.py +2 -2
- metaflow/plugins/uv/bootstrap.py +18 -1
- metaflow/plugins/uv/uv_environment.py +1 -1
- metaflow/runner/click_api.py +16 -9
- metaflow/runner/deployer.py +49 -0
- metaflow/runner/deployer_impl.py +17 -5
- metaflow/runner/metaflow_runner.py +40 -13
- metaflow/runner/subprocess_manager.py +1 -1
- metaflow/runner/utils.py +8 -0
- metaflow/user_configs/config_decorators.py +1 -1
- metaflow/user_configs/config_options.py +12 -8
- metaflow/user_configs/config_parameters.py +211 -45
- metaflow/util.py +2 -5
- metaflow/vendor.py +0 -1
- metaflow/version.py +1 -1
- {ob_metaflow-2.15.14.1.dist-info → ob_metaflow-2.15.17.1.dist-info}/METADATA +2 -2
- {ob_metaflow-2.15.14.1.dist-info → ob_metaflow-2.15.17.1.dist-info}/RECORD +51 -55
- {ob_metaflow-2.15.14.1.dist-info → ob_metaflow-2.15.17.1.dist-info}/WHEEL +1 -1
- metaflow/_vendor/v3_5/__init__.py +0 -1
- metaflow/_vendor/v3_5/importlib_metadata/__init__.py +0 -644
- metaflow/_vendor/v3_5/importlib_metadata/_compat.py +0 -152
- metaflow/_vendor/v3_5/zipp.py +0 -329
- {ob_metaflow-2.15.14.1.data → ob_metaflow-2.15.17.1.data}/data/share/metaflow/devtools/Makefile +0 -0
- {ob_metaflow-2.15.14.1.data → ob_metaflow-2.15.17.1.data}/data/share/metaflow/devtools/Tiltfile +0 -0
- {ob_metaflow-2.15.14.1.data → ob_metaflow-2.15.17.1.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
- {ob_metaflow-2.15.14.1.dist-info → ob_metaflow-2.15.17.1.dist-info}/entry_points.txt +0 -0
- {ob_metaflow-2.15.14.1.dist-info → ob_metaflow-2.15.17.1.dist-info}/licenses/LICENSE +0 -0
- {ob_metaflow-2.15.14.1.dist-info → ob_metaflow-2.15.17.1.dist-info}/top_level.txt +0 -0
|
@@ -34,7 +34,7 @@ class TestPathSpecCard(MetaflowCard):
|
|
|
34
34
|
class TestEditableCard(MetaflowCard):
|
|
35
35
|
type = "test_editable_card"
|
|
36
36
|
|
|
37
|
-
|
|
37
|
+
separator = "$&#!!@*"
|
|
38
38
|
|
|
39
39
|
ALLOW_USER_COMPONENTS = True
|
|
40
40
|
|
|
@@ -42,13 +42,13 @@ class TestEditableCard(MetaflowCard):
|
|
|
42
42
|
self._components = components
|
|
43
43
|
|
|
44
44
|
def render(self, task):
|
|
45
|
-
return self.
|
|
45
|
+
return self.separator.join([str(comp) for comp in self._components])
|
|
46
46
|
|
|
47
47
|
|
|
48
48
|
class TestEditableCard2(MetaflowCard):
|
|
49
49
|
type = "test_editable_card_2"
|
|
50
50
|
|
|
51
|
-
|
|
51
|
+
separator = "$&#!!@*"
|
|
52
52
|
|
|
53
53
|
ALLOW_USER_COMPONENTS = True
|
|
54
54
|
|
|
@@ -56,19 +56,19 @@ class TestEditableCard2(MetaflowCard):
|
|
|
56
56
|
self._components = components
|
|
57
57
|
|
|
58
58
|
def render(self, task):
|
|
59
|
-
return self.
|
|
59
|
+
return self.separator.join([str(comp) for comp in self._components])
|
|
60
60
|
|
|
61
61
|
|
|
62
62
|
class TestNonEditableCard(MetaflowCard):
|
|
63
63
|
type = "test_non_editable_card"
|
|
64
64
|
|
|
65
|
-
|
|
65
|
+
separator = "$&#!!@*"
|
|
66
66
|
|
|
67
67
|
def __init__(self, components=[], **kwargs):
|
|
68
68
|
self._components = components
|
|
69
69
|
|
|
70
70
|
def render(self, task):
|
|
71
|
-
return self.
|
|
71
|
+
return self.separator.join([str(comp) for comp in self._components])
|
|
72
72
|
|
|
73
73
|
|
|
74
74
|
class TestMockCard(MetaflowCard):
|
|
@@ -57,15 +57,8 @@ class ComponentStore:
|
|
|
57
57
|
The `_component_map` attribute is supposed to be a dictionary so that we can access the components by their ids.
|
|
58
58
|
But we also want to maintain order in which components are inserted since all of these components are going to be visible on a UI.
|
|
59
59
|
Since python3.6 dictionaries are ordered by default so we can use the default python `dict`.
|
|
60
|
-
For python3.5 and below we need to use an OrderedDict since `dict`'s are not ordered by default.
|
|
61
60
|
"""
|
|
62
|
-
|
|
63
|
-
platform.python_version_tuple()[1]
|
|
64
|
-
)
|
|
65
|
-
if python_version < 36:
|
|
66
|
-
self._component_map = OrderedDict()
|
|
67
|
-
else:
|
|
68
|
-
self._component_map = {}
|
|
61
|
+
self._component_map = {}
|
|
69
62
|
|
|
70
63
|
def __init__(self, logger, card_type=None, components=None, user_set_id=None):
|
|
71
64
|
self._logger = logger
|
|
@@ -131,7 +131,7 @@ def normalize_client_error(err):
|
|
|
131
131
|
except ValueError:
|
|
132
132
|
if error_code in ("AccessDenied", "AllAccessDisabled", "InvalidAccessKeyId"):
|
|
133
133
|
return 403
|
|
134
|
-
if error_code
|
|
134
|
+
if error_code in ("NoSuchKey", "NoSuchBucket"):
|
|
135
135
|
return 404
|
|
136
136
|
if error_code == "InvalidRange":
|
|
137
137
|
return 416
|
|
@@ -170,6 +170,7 @@ class Kubernetes(object):
|
|
|
170
170
|
code_package_ds,
|
|
171
171
|
docker_image,
|
|
172
172
|
docker_image_pull_policy,
|
|
173
|
+
image_pull_secrets=None,
|
|
173
174
|
step_cli=None,
|
|
174
175
|
service_account=None,
|
|
175
176
|
secrets=None,
|
|
@@ -206,6 +207,7 @@ class Kubernetes(object):
|
|
|
206
207
|
node_selector=node_selector,
|
|
207
208
|
image=docker_image,
|
|
208
209
|
image_pull_policy=docker_image_pull_policy,
|
|
210
|
+
image_pull_secrets=image_pull_secrets,
|
|
209
211
|
cpu=cpu,
|
|
210
212
|
memory=memory,
|
|
211
213
|
disk=disk,
|
|
@@ -483,6 +485,7 @@ class Kubernetes(object):
|
|
|
483
485
|
step_cli,
|
|
484
486
|
docker_image,
|
|
485
487
|
docker_image_pull_policy,
|
|
488
|
+
image_pull_secrets=None,
|
|
486
489
|
service_account=None,
|
|
487
490
|
secrets=None,
|
|
488
491
|
node_selector=None,
|
|
@@ -529,6 +532,7 @@ class Kubernetes(object):
|
|
|
529
532
|
),
|
|
530
533
|
image=docker_image,
|
|
531
534
|
image_pull_policy=docker_image_pull_policy,
|
|
535
|
+
image_pull_secrets=image_pull_secrets,
|
|
532
536
|
cpu=cpu,
|
|
533
537
|
memory=memory,
|
|
534
538
|
disk=disk,
|
|
@@ -53,6 +53,12 @@ def kubernetes():
|
|
|
53
53
|
default=None,
|
|
54
54
|
help="Optional Docker Image Pull Policy for Kubernetes pod.",
|
|
55
55
|
)
|
|
56
|
+
@click.option(
|
|
57
|
+
"--image-pull-secrets",
|
|
58
|
+
default=None,
|
|
59
|
+
type=JSONTypeClass(),
|
|
60
|
+
multiple=False,
|
|
61
|
+
)
|
|
56
62
|
@click.option(
|
|
57
63
|
"--service-account",
|
|
58
64
|
help="IRSA requirement for Kubernetes pod.",
|
|
@@ -160,6 +166,7 @@ def step(
|
|
|
160
166
|
executable=None,
|
|
161
167
|
image=None,
|
|
162
168
|
image_pull_policy=None,
|
|
169
|
+
image_pull_secrets=None,
|
|
163
170
|
service_account=None,
|
|
164
171
|
secrets=None,
|
|
165
172
|
node_selector=None,
|
|
@@ -303,6 +310,7 @@ def step(
|
|
|
303
310
|
step_cli=step_cli,
|
|
304
311
|
docker_image=image,
|
|
305
312
|
docker_image_pull_policy=image_pull_policy,
|
|
313
|
+
image_pull_secrets=image_pull_secrets,
|
|
306
314
|
service_account=service_account,
|
|
307
315
|
secrets=secrets,
|
|
308
316
|
node_selector=node_selector,
|
|
@@ -18,6 +18,7 @@ from metaflow.metaflow_config import (
|
|
|
18
18
|
KUBERNETES_FETCH_EC2_METADATA,
|
|
19
19
|
KUBERNETES_GPU_VENDOR,
|
|
20
20
|
KUBERNETES_IMAGE_PULL_POLICY,
|
|
21
|
+
KUBERNETES_IMAGE_PULL_SECRETS,
|
|
21
22
|
KUBERNETES_MEMORY,
|
|
22
23
|
KUBERNETES_LABELS,
|
|
23
24
|
KUBERNETES_ANNOTATIONS,
|
|
@@ -74,6 +75,10 @@ class KubernetesDecorator(StepDecorator):
|
|
|
74
75
|
not, a default Docker image mapping to the current version of Python is used.
|
|
75
76
|
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
|
76
77
|
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
|
78
|
+
image_pull_secrets: List[str], default []
|
|
79
|
+
The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
|
|
80
|
+
Kubernetes image pull secrets to use when pulling container images
|
|
81
|
+
in Kubernetes.
|
|
77
82
|
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
|
78
83
|
Kubernetes service account to use when launching pod in Kubernetes.
|
|
79
84
|
secrets : List[str], optional, default None
|
|
@@ -141,6 +146,7 @@ class KubernetesDecorator(StepDecorator):
|
|
|
141
146
|
"disk": "10240",
|
|
142
147
|
"image": None,
|
|
143
148
|
"image_pull_policy": None,
|
|
149
|
+
"image_pull_secrets": None, # e.g., ["regcred"]
|
|
144
150
|
"service_account": None,
|
|
145
151
|
"secrets": None, # e.g., mysecret
|
|
146
152
|
"node_selector": None, # e.g., kubernetes.io/os=linux
|
|
@@ -194,6 +200,10 @@ class KubernetesDecorator(StepDecorator):
|
|
|
194
200
|
)
|
|
195
201
|
if not self.attributes["image_pull_policy"] and KUBERNETES_IMAGE_PULL_POLICY:
|
|
196
202
|
self.attributes["image_pull_policy"] = KUBERNETES_IMAGE_PULL_POLICY
|
|
203
|
+
if not self.attributes["image_pull_secrets"] and KUBERNETES_IMAGE_PULL_SECRETS:
|
|
204
|
+
self.attributes["image_pull_secrets"] = json.loads(
|
|
205
|
+
KUBERNETES_IMAGE_PULL_SECRETS
|
|
206
|
+
)
|
|
197
207
|
|
|
198
208
|
if isinstance(self.attributes["node_selector"], str):
|
|
199
209
|
self.attributes["node_selector"] = parse_kube_keyvalue_list(
|
|
@@ -494,6 +504,7 @@ class KubernetesDecorator(StepDecorator):
|
|
|
494
504
|
for key, val in v.items()
|
|
495
505
|
]
|
|
496
506
|
elif k in [
|
|
507
|
+
"image_pull_secrets",
|
|
497
508
|
"tolerations",
|
|
498
509
|
"persistent_volume_claims",
|
|
499
510
|
"labels",
|
|
@@ -235,8 +235,10 @@ class KubernetesJob(object):
|
|
|
235
235
|
)
|
|
236
236
|
],
|
|
237
237
|
node_selector=self._kwargs.get("node_selector"),
|
|
238
|
-
|
|
239
|
-
|
|
238
|
+
image_pull_secrets=[
|
|
239
|
+
client.V1LocalObjectReference(secret)
|
|
240
|
+
for secret in self._kwargs.get("image_pull_secrets") or []
|
|
241
|
+
],
|
|
240
242
|
# TODO (savin): Support preemption policies
|
|
241
243
|
# preemption_policy=?,
|
|
242
244
|
#
|
|
@@ -718,8 +718,11 @@ class JobSetSpec(object):
|
|
|
718
718
|
)
|
|
719
719
|
],
|
|
720
720
|
node_selector=self._kwargs.get("node_selector"),
|
|
721
|
-
|
|
722
|
-
|
|
721
|
+
image_pull_secrets=[
|
|
722
|
+
client.V1LocalObjectReference(secret)
|
|
723
|
+
for secret in self._kwargs.get("image_pull_secrets")
|
|
724
|
+
or []
|
|
725
|
+
],
|
|
723
726
|
# TODO (savin): Support preemption policies
|
|
724
727
|
# preemption_policy=?,
|
|
725
728
|
#
|
|
@@ -72,14 +72,18 @@ class ServiceMetadataProvider(MetadataProvider):
|
|
|
72
72
|
@classmethod
|
|
73
73
|
def compute_info(cls, val):
|
|
74
74
|
v = val.rstrip("/")
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
75
|
+
for i in range(SERVICE_RETRY_COUNT):
|
|
76
|
+
try:
|
|
77
|
+
resp = cls._session.get(
|
|
78
|
+
os.path.join(v, "ping"), headers=SERVICE_HEADERS.copy()
|
|
79
|
+
)
|
|
80
|
+
resp.raise_for_status()
|
|
81
|
+
except: # noqa E722
|
|
82
|
+
time.sleep(2 ** (i - 1))
|
|
83
|
+
else:
|
|
84
|
+
return v
|
|
85
|
+
|
|
86
|
+
raise ValueError("Metaflow service [%s] unreachable." % v)
|
|
83
87
|
|
|
84
88
|
@classmethod
|
|
85
89
|
def default_info(cls):
|
metaflow/plugins/package_cli.py
CHANGED
|
@@ -38,14 +38,24 @@ def info(obj):
|
|
|
38
38
|
|
|
39
39
|
|
|
40
40
|
@package.command(help="List files included in the code package.")
|
|
41
|
+
@click.option(
|
|
42
|
+
"--archive/--no-archive",
|
|
43
|
+
default=False,
|
|
44
|
+
help="If True, lists the file paths as present in the code package archive; "
|
|
45
|
+
"otherwise, lists the files on your filesystem included in the code package",
|
|
46
|
+
show_default=True,
|
|
47
|
+
)
|
|
41
48
|
@click.pass_obj
|
|
42
|
-
def list(obj):
|
|
49
|
+
def list(obj, archive=False):
|
|
43
50
|
obj.echo(
|
|
44
51
|
"Files included in the code package " "(change with --package-suffixes):",
|
|
45
52
|
fg="magenta",
|
|
46
53
|
bold=False,
|
|
47
54
|
)
|
|
48
|
-
|
|
55
|
+
if archive:
|
|
56
|
+
obj.echo_always("\n".join(path for _, path in obj.package.path_tuples()))
|
|
57
|
+
else:
|
|
58
|
+
obj.echo_always("\n".join(path for path, _ in obj.package.path_tuples()))
|
|
49
59
|
|
|
50
60
|
|
|
51
61
|
@package.command(help="Save the current code package in a tar file")
|
|
@@ -14,7 +14,7 @@ from urllib.request import urlopen
|
|
|
14
14
|
from metaflow.metaflow_config import DATASTORE_LOCAL_DIR, CONDA_USE_FAST_INIT
|
|
15
15
|
from metaflow.plugins import DATASTORES
|
|
16
16
|
from metaflow.plugins.pypi.utils import MICROMAMBA_MIRROR_URL, MICROMAMBA_URL
|
|
17
|
-
from metaflow.util import which
|
|
17
|
+
from metaflow.util import which, get_metaflow_root
|
|
18
18
|
from urllib.request import Request
|
|
19
19
|
import warnings
|
|
20
20
|
|
|
@@ -366,7 +366,7 @@ if __name__ == "__main__":
|
|
|
366
366
|
# Move MAGIC_FILE inside local datastore.
|
|
367
367
|
os.makedirs(manifest_dir, exist_ok=True)
|
|
368
368
|
shutil.move(
|
|
369
|
-
os.path.join(
|
|
369
|
+
os.path.join(get_metaflow_root(), MAGIC_FILE),
|
|
370
370
|
os.path.join(manifest_dir, MAGIC_FILE),
|
|
371
371
|
)
|
|
372
372
|
with open(os.path.join(manifest_dir, MAGIC_FILE)) as f:
|
metaflow/plugins/uv/bootstrap.py
CHANGED
|
@@ -4,6 +4,7 @@ import sys
|
|
|
4
4
|
import time
|
|
5
5
|
|
|
6
6
|
from metaflow.util import which
|
|
7
|
+
from metaflow.info_file import read_info_file
|
|
7
8
|
from metaflow.metaflow_config import get_pinned_conda_libs
|
|
8
9
|
from urllib.request import Request, urlopen
|
|
9
10
|
from urllib.error import URLError
|
|
@@ -78,11 +79,27 @@ if __name__ == "__main__":
|
|
|
78
79
|
# return only dependency names instead of pinned versions
|
|
79
80
|
return pinned.keys()
|
|
80
81
|
|
|
82
|
+
def skip_metaflow_dependencies():
|
|
83
|
+
skip_pkgs = ["metaflow", "ob-metaflow"]
|
|
84
|
+
info = read_info_file()
|
|
85
|
+
if info is not None:
|
|
86
|
+
try:
|
|
87
|
+
skip_pkgs.extend([ext_name for ext_name in info["ext_info"][0].keys()])
|
|
88
|
+
except Exception:
|
|
89
|
+
print(
|
|
90
|
+
"Failed to read INFO. Metaflow-related packages might get installed during runtime."
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
return skip_pkgs
|
|
94
|
+
|
|
81
95
|
def sync_uv_project(datastore_type):
|
|
82
96
|
print("Syncing uv project...")
|
|
83
97
|
dependencies = " ".join(get_dependencies(datastore_type))
|
|
98
|
+
skip_pkgs = " ".join(
|
|
99
|
+
[f"--no-install-package {dep}" for dep in skip_metaflow_dependencies()]
|
|
100
|
+
)
|
|
84
101
|
cmd = f"""set -e;
|
|
85
|
-
uv sync --frozen
|
|
102
|
+
uv sync --frozen {skip_pkgs};
|
|
86
103
|
uv pip install {dependencies} --strict
|
|
87
104
|
"""
|
|
88
105
|
run_cmd(cmd)
|
|
@@ -22,7 +22,7 @@ class UVEnvironment(MetaflowEnvironment):
|
|
|
22
22
|
self.logger("Bootstrapping uv...")
|
|
23
23
|
|
|
24
24
|
def executable(self, step_name, default=None):
|
|
25
|
-
return "uv run python"
|
|
25
|
+
return "uv run --no-sync python"
|
|
26
26
|
|
|
27
27
|
def add_to_package(self):
|
|
28
28
|
# NOTE: We treat uv.lock and pyproject.toml as regular project assets and ship these along user code as part of the code package
|
metaflow/runner/click_api.py
CHANGED
|
@@ -467,9 +467,14 @@ class MetaflowAPI(object):
|
|
|
467
467
|
config_file = defaults.get("config")
|
|
468
468
|
|
|
469
469
|
if config_file:
|
|
470
|
-
config_file =
|
|
471
|
-
|
|
472
|
-
|
|
470
|
+
config_file = dict(
|
|
471
|
+
map(
|
|
472
|
+
lambda x: (
|
|
473
|
+
x[0],
|
|
474
|
+
ConvertPath.convert_value(x[1], is_default),
|
|
475
|
+
),
|
|
476
|
+
config_file,
|
|
477
|
+
)
|
|
473
478
|
)
|
|
474
479
|
|
|
475
480
|
is_default = False
|
|
@@ -479,12 +484,14 @@ class MetaflowAPI(object):
|
|
|
479
484
|
config_value = defaults.get("config_value")
|
|
480
485
|
|
|
481
486
|
if config_value:
|
|
482
|
-
config_value =
|
|
483
|
-
|
|
484
|
-
x
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
487
|
+
config_value = dict(
|
|
488
|
+
map(
|
|
489
|
+
lambda x: (
|
|
490
|
+
x[0],
|
|
491
|
+
ConvertDictOrStr.convert_value(x[1], is_default),
|
|
492
|
+
),
|
|
493
|
+
config_value,
|
|
494
|
+
)
|
|
488
495
|
)
|
|
489
496
|
|
|
490
497
|
if (config_file is None) ^ (config_value is None):
|
metaflow/runner/deployer.py
CHANGED
|
@@ -7,6 +7,55 @@ from typing import ClassVar, Dict, Optional, TYPE_CHECKING
|
|
|
7
7
|
from metaflow.exception import MetaflowNotFound
|
|
8
8
|
from metaflow.metaflow_config import DEFAULT_FROM_DEPLOYMENT_IMPL
|
|
9
9
|
|
|
10
|
+
|
|
11
|
+
def generate_fake_flow_file_contents(
|
|
12
|
+
flow_name: str, param_info: dict, project_name: Optional[str] = None
|
|
13
|
+
):
|
|
14
|
+
params_code = ""
|
|
15
|
+
for _, param_details in param_info.items():
|
|
16
|
+
param_python_var_name = param_details["python_var_name"]
|
|
17
|
+
param_name = param_details["name"]
|
|
18
|
+
param_type = param_details["type"]
|
|
19
|
+
param_help = param_details["description"]
|
|
20
|
+
param_required = param_details["is_required"]
|
|
21
|
+
|
|
22
|
+
if param_type == "JSON":
|
|
23
|
+
params_code += (
|
|
24
|
+
f" {param_python_var_name} = Parameter('{param_name}', "
|
|
25
|
+
f"type=JSONType, help='''{param_help}''', required={param_required})\n"
|
|
26
|
+
)
|
|
27
|
+
elif param_type == "FilePath":
|
|
28
|
+
is_text = param_details.get("is_text", True)
|
|
29
|
+
encoding = param_details.get("encoding", "utf-8")
|
|
30
|
+
params_code += (
|
|
31
|
+
f" {param_python_var_name} = IncludeFile('{param_name}', "
|
|
32
|
+
f"is_text={is_text}, encoding='{encoding}', help='''{param_help}''', "
|
|
33
|
+
f"required={param_required})\n"
|
|
34
|
+
)
|
|
35
|
+
else:
|
|
36
|
+
params_code += (
|
|
37
|
+
f" {param_python_var_name} = Parameter('{param_name}', "
|
|
38
|
+
f"type={param_type}, help='''{param_help}''', required={param_required})\n"
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
project_decorator = f"@project(name='{project_name}')\n" if project_name else ""
|
|
42
|
+
|
|
43
|
+
contents = f"""\
|
|
44
|
+
from metaflow import FlowSpec, Parameter, IncludeFile, JSONType, step, project
|
|
45
|
+
{project_decorator}class {flow_name}(FlowSpec):
|
|
46
|
+
{params_code}
|
|
47
|
+
@step
|
|
48
|
+
def start(self):
|
|
49
|
+
self.next(self.end)
|
|
50
|
+
@step
|
|
51
|
+
def end(self):
|
|
52
|
+
pass
|
|
53
|
+
if __name__ == '__main__':
|
|
54
|
+
{flow_name}()
|
|
55
|
+
"""
|
|
56
|
+
return contents
|
|
57
|
+
|
|
58
|
+
|
|
10
59
|
if TYPE_CHECKING:
|
|
11
60
|
import metaflow
|
|
12
61
|
import metaflow.runner.deployer_impl
|
metaflow/runner/deployer_impl.py
CHANGED
|
@@ -5,8 +5,10 @@ import sys
|
|
|
5
5
|
|
|
6
6
|
from typing import Any, ClassVar, Dict, Optional, TYPE_CHECKING, Type
|
|
7
7
|
|
|
8
|
+
from metaflow.metaflow_config import CLICK_API_PROCESS_CONFIG
|
|
9
|
+
|
|
8
10
|
from .subprocess_manager import SubprocessManager
|
|
9
|
-
from .utils import get_lower_level_group, handle_timeout, temporary_fifo
|
|
11
|
+
from .utils import get_lower_level_group, handle_timeout, temporary_fifo, with_dir
|
|
10
12
|
|
|
11
13
|
if TYPE_CHECKING:
|
|
12
14
|
import metaflow.runner.deployer
|
|
@@ -88,7 +90,7 @@ class DeployerImpl(object):
|
|
|
88
90
|
self.show_output = show_output
|
|
89
91
|
self.profile = profile
|
|
90
92
|
self.env = env
|
|
91
|
-
self.cwd = cwd
|
|
93
|
+
self.cwd = cwd or os.getcwd()
|
|
92
94
|
self.file_read_timeout = file_read_timeout
|
|
93
95
|
|
|
94
96
|
self.env_vars = os.environ.copy()
|
|
@@ -140,9 +142,19 @@ class DeployerImpl(object):
|
|
|
140
142
|
) -> "metaflow.runner.deployer.DeployedFlow":
|
|
141
143
|
with temporary_fifo() as (attribute_file_path, attribute_file_fd):
|
|
142
144
|
# every subclass needs to have `self.deployer_kwargs`
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
145
|
+
# TODO: Get rid of CLICK_API_PROCESS_CONFIG in the near future
|
|
146
|
+
if CLICK_API_PROCESS_CONFIG:
|
|
147
|
+
# We need to run this in the cwd because configs depend on files
|
|
148
|
+
# that may be located in paths relative to the directory the user
|
|
149
|
+
# wants to run in
|
|
150
|
+
with with_dir(self.cwd):
|
|
151
|
+
command = get_lower_level_group(
|
|
152
|
+
self.api, self.top_level_kwargs, self.TYPE, self.deployer_kwargs
|
|
153
|
+
).create(deployer_attribute_file=attribute_file_path, **kwargs)
|
|
154
|
+
else:
|
|
155
|
+
command = get_lower_level_group(
|
|
156
|
+
self.api, self.top_level_kwargs, self.TYPE, self.deployer_kwargs
|
|
157
|
+
).create(deployer_attribute_file=attribute_file_path, **kwargs)
|
|
146
158
|
|
|
147
159
|
pid = self.spm.run_command(
|
|
148
160
|
[sys.executable, *command],
|
|
@@ -7,12 +7,15 @@ from typing import Dict, Iterator, Optional, Tuple
|
|
|
7
7
|
|
|
8
8
|
from metaflow import Run
|
|
9
9
|
|
|
10
|
+
from metaflow.metaflow_config import CLICK_API_PROCESS_CONFIG
|
|
11
|
+
|
|
10
12
|
from metaflow.plugins import get_runner_cli
|
|
11
13
|
|
|
12
14
|
from .utils import (
|
|
13
15
|
temporary_fifo,
|
|
14
16
|
handle_timeout,
|
|
15
17
|
async_handle_timeout,
|
|
18
|
+
with_dir,
|
|
16
19
|
)
|
|
17
20
|
from .subprocess_manager import CommandManager, SubprocessManager
|
|
18
21
|
|
|
@@ -299,7 +302,7 @@ class Runner(metaclass=RunnerMeta):
|
|
|
299
302
|
if profile:
|
|
300
303
|
self.env_vars["METAFLOW_PROFILE"] = profile
|
|
301
304
|
|
|
302
|
-
self.cwd = cwd
|
|
305
|
+
self.cwd = cwd or os.getcwd()
|
|
303
306
|
self.file_read_timeout = file_read_timeout
|
|
304
307
|
self.spm = SubprocessManager()
|
|
305
308
|
self.top_level_kwargs = kwargs
|
|
@@ -359,9 +362,15 @@ class Runner(metaclass=RunnerMeta):
|
|
|
359
362
|
ExecutingRun containing the results of the run.
|
|
360
363
|
"""
|
|
361
364
|
with temporary_fifo() as (attribute_file_path, attribute_file_fd):
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
+
if CLICK_API_PROCESS_CONFIG:
|
|
366
|
+
with with_dir(self.cwd):
|
|
367
|
+
command = self.api(**self.top_level_kwargs).run(
|
|
368
|
+
runner_attribute_file=attribute_file_path, **kwargs
|
|
369
|
+
)
|
|
370
|
+
else:
|
|
371
|
+
command = self.api(**self.top_level_kwargs).run(
|
|
372
|
+
runner_attribute_file=attribute_file_path, **kwargs
|
|
373
|
+
)
|
|
365
374
|
|
|
366
375
|
pid = self.spm.run_command(
|
|
367
376
|
[sys.executable, *command],
|
|
@@ -390,9 +399,15 @@ class Runner(metaclass=RunnerMeta):
|
|
|
390
399
|
ExecutingRun containing the results of the resumed run.
|
|
391
400
|
"""
|
|
392
401
|
with temporary_fifo() as (attribute_file_path, attribute_file_fd):
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
402
|
+
if CLICK_API_PROCESS_CONFIG:
|
|
403
|
+
with with_dir(self.cwd):
|
|
404
|
+
command = self.api(**self.top_level_kwargs).resume(
|
|
405
|
+
runner_attribute_file=attribute_file_path, **kwargs
|
|
406
|
+
)
|
|
407
|
+
else:
|
|
408
|
+
command = self.api(**self.top_level_kwargs).resume(
|
|
409
|
+
runner_attribute_file=attribute_file_path, **kwargs
|
|
410
|
+
)
|
|
396
411
|
|
|
397
412
|
pid = self.spm.run_command(
|
|
398
413
|
[sys.executable, *command],
|
|
@@ -423,9 +438,15 @@ class Runner(metaclass=RunnerMeta):
|
|
|
423
438
|
ExecutingRun representing the run that was started.
|
|
424
439
|
"""
|
|
425
440
|
with temporary_fifo() as (attribute_file_path, attribute_file_fd):
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
441
|
+
if CLICK_API_PROCESS_CONFIG:
|
|
442
|
+
with with_dir(self.cwd):
|
|
443
|
+
command = self.api(**self.top_level_kwargs).run(
|
|
444
|
+
runner_attribute_file=attribute_file_path, **kwargs
|
|
445
|
+
)
|
|
446
|
+
else:
|
|
447
|
+
command = self.api(**self.top_level_kwargs).run(
|
|
448
|
+
runner_attribute_file=attribute_file_path, **kwargs
|
|
449
|
+
)
|
|
429
450
|
|
|
430
451
|
pid = await self.spm.async_run_command(
|
|
431
452
|
[sys.executable, *command],
|
|
@@ -455,9 +476,15 @@ class Runner(metaclass=RunnerMeta):
|
|
|
455
476
|
ExecutingRun representing the resumed run that was started.
|
|
456
477
|
"""
|
|
457
478
|
with temporary_fifo() as (attribute_file_path, attribute_file_fd):
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
479
|
+
if CLICK_API_PROCESS_CONFIG:
|
|
480
|
+
with with_dir(self.cwd):
|
|
481
|
+
command = self.api(**self.top_level_kwargs).resume(
|
|
482
|
+
runner_attribute_file=attribute_file_path, **kwargs
|
|
483
|
+
)
|
|
484
|
+
else:
|
|
485
|
+
command = self.api(**self.top_level_kwargs).resume(
|
|
486
|
+
runner_attribute_file=attribute_file_path, **kwargs
|
|
487
|
+
)
|
|
461
488
|
|
|
462
489
|
pid = await self.spm.async_run_command(
|
|
463
490
|
[sys.executable, *command],
|
|
@@ -237,7 +237,7 @@ class CommandManager(object):
|
|
|
237
237
|
self.command = command
|
|
238
238
|
|
|
239
239
|
self.env = env if env is not None else os.environ.copy()
|
|
240
|
-
self.cwd = cwd
|
|
240
|
+
self.cwd = cwd or os.getcwd()
|
|
241
241
|
|
|
242
242
|
self.process = None
|
|
243
243
|
self.stdout_thread = None
|
metaflow/runner/utils.py
CHANGED
|
@@ -322,3 +322,11 @@ def get_lower_level_group(
|
|
|
322
322
|
raise ValueError(f"Sub-command '{sub_command}' not found in API '{api.name}'")
|
|
323
323
|
|
|
324
324
|
return sub_command_obj(**sub_command_kwargs)
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
@contextmanager
|
|
328
|
+
def with_dir(new_dir):
|
|
329
|
+
current_dir = os.getcwd()
|
|
330
|
+
os.chdir(new_dir)
|
|
331
|
+
yield new_dir
|
|
332
|
+
os.chdir(current_dir)
|
|
@@ -200,7 +200,7 @@ class MutableFlow:
|
|
|
200
200
|
for name, value in self._flow_cls._flow_state.get(
|
|
201
201
|
_FlowState.CONFIGS, {}
|
|
202
202
|
).items():
|
|
203
|
-
yield name, ConfigValue(value)
|
|
203
|
+
yield name, ConfigValue(value) if value is not None else None
|
|
204
204
|
|
|
205
205
|
@property
|
|
206
206
|
def parameters(self) -> Generator[Tuple[str, Any], None, None]:
|