metaflow 2.15.0__py2.py3-none-any.whl → 2.15.2__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow/__init__.py +7 -0
- metaflow/cmd/code/__init__.py +230 -0
- metaflow/cmd/develop/stub_generator.py +5 -2
- metaflow/cmd/main_cli.py +1 -0
- metaflow/cmd/make_wrapper.py +35 -3
- metaflow/metaflow_config.py +2 -0
- metaflow/metaflow_environment.py +3 -1
- metaflow/mflog/__init__.py +4 -3
- metaflow/plugins/kubernetes/kubernetes_decorator.py +2 -1
- metaflow/plugins/kubernetes/kubernetes_jobsets.py +2 -0
- metaflow/plugins/pypi/bootstrap.py +17 -26
- metaflow/plugins/pypi/conda_environment.py +8 -8
- metaflow/plugins/pypi/parsers.py +268 -0
- metaflow/plugins/pypi/utils.py +18 -0
- metaflow/version.py +1 -1
- {metaflow-2.15.0.data → metaflow-2.15.2.data}/data/share/metaflow/devtools/Makefile +21 -12
- {metaflow-2.15.0.data → metaflow-2.15.2.data}/data/share/metaflow/devtools/Tiltfile +23 -5
- {metaflow-2.15.0.dist-info → metaflow-2.15.2.dist-info}/METADATA +2 -2
- {metaflow-2.15.0.dist-info → metaflow-2.15.2.dist-info}/RECORD +24 -22
- {metaflow-2.15.0.dist-info → metaflow-2.15.2.dist-info}/WHEEL +1 -1
- {metaflow-2.15.0.data → metaflow-2.15.2.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
- {metaflow-2.15.0.dist-info → metaflow-2.15.2.dist-info}/LICENSE +0 -0
- {metaflow-2.15.0.dist-info → metaflow-2.15.2.dist-info}/entry_points.txt +0 -0
- {metaflow-2.15.0.dist-info → metaflow-2.15.2.dist-info}/top_level.txt +0 -0
metaflow/__init__.py
CHANGED
@@ -119,6 +119,13 @@ from .includefile import IncludeFile
|
|
119
119
|
# Decorators
|
120
120
|
from .decorators import step, _import_plugin_decorators
|
121
121
|
|
122
|
+
# Config parsers
|
123
|
+
from .plugins.pypi.parsers import (
|
124
|
+
requirements_txt_parser,
|
125
|
+
pyproject_toml_parser,
|
126
|
+
conda_environment_yml_parser,
|
127
|
+
)
|
128
|
+
|
122
129
|
# this auto-generates decorator functions from Decorator objects
|
123
130
|
# in the top-level metaflow namespace
|
124
131
|
_import_plugin_decorators(globals())
|
@@ -0,0 +1,230 @@
|
|
1
|
+
import os
|
2
|
+
import shutil
|
3
|
+
import sys
|
4
|
+
from subprocess import PIPE, CompletedProcess, run
|
5
|
+
from tempfile import TemporaryDirectory
|
6
|
+
from typing import Any, Callable, List, Mapping, Optional, cast
|
7
|
+
|
8
|
+
from metaflow import Run
|
9
|
+
from metaflow._vendor import click
|
10
|
+
from metaflow.cli import echo_always
|
11
|
+
|
12
|
+
|
13
|
+
@click.group()
|
14
|
+
def cli():
|
15
|
+
pass
|
16
|
+
|
17
|
+
|
18
|
+
@cli.group(help="Access, compare, and manage code associated with Metaflow runs.")
|
19
|
+
def code():
|
20
|
+
pass
|
21
|
+
|
22
|
+
|
23
|
+
def echo(line: str) -> None:
|
24
|
+
echo_always(line, err=True, fg="magenta")
|
25
|
+
|
26
|
+
|
27
|
+
def extract_code_package(runspec: str) -> TemporaryDirectory:
|
28
|
+
try:
|
29
|
+
mf_run = Run(runspec, _namespace_check=False)
|
30
|
+
echo(f"✅ Run *{runspec}* found, downloading code..")
|
31
|
+
except Exception as e:
|
32
|
+
echo(f"❌ Run **{runspec}** not found")
|
33
|
+
raise e
|
34
|
+
|
35
|
+
if mf_run.code is None:
|
36
|
+
echo(
|
37
|
+
f"❌ Run **{runspec}** doesn't have a code package. Maybe it's a local run?"
|
38
|
+
)
|
39
|
+
raise RuntimeError("no code package found")
|
40
|
+
|
41
|
+
return mf_run.code.extract()
|
42
|
+
|
43
|
+
|
44
|
+
def perform_diff(
|
45
|
+
source_dir: str,
|
46
|
+
target_dir: Optional[str] = None,
|
47
|
+
output: bool = False,
|
48
|
+
**kwargs: Mapping[str, Any],
|
49
|
+
) -> Optional[List[str]]:
|
50
|
+
if target_dir is None:
|
51
|
+
target_dir = os.getcwd()
|
52
|
+
|
53
|
+
diffs = []
|
54
|
+
for dirpath, dirnames, filenames in os.walk(source_dir, followlinks=True):
|
55
|
+
for fname in filenames:
|
56
|
+
# NOTE: the paths below need to be set up carefully
|
57
|
+
# for the `patch` command to work. Better not to touch
|
58
|
+
# the directories below. If you must, test that patches
|
59
|
+
# work after your changes.
|
60
|
+
#
|
61
|
+
# target_file is the git repo in the current working directory
|
62
|
+
rel = os.path.relpath(dirpath, source_dir)
|
63
|
+
target_file = os.path.join(rel, fname)
|
64
|
+
# source_file is the run file loaded in a tmp directory
|
65
|
+
source_file = os.path.join(dirpath, fname)
|
66
|
+
|
67
|
+
if sys.stdout.isatty() and not output:
|
68
|
+
color = ["--color"]
|
69
|
+
else:
|
70
|
+
color = ["--no-color"]
|
71
|
+
|
72
|
+
if os.path.exists(os.path.join(target_dir, target_file)):
|
73
|
+
cmd = (
|
74
|
+
["git", "diff", "--no-index", "--exit-code"]
|
75
|
+
+ color
|
76
|
+
+ [
|
77
|
+
target_file,
|
78
|
+
source_file,
|
79
|
+
]
|
80
|
+
)
|
81
|
+
result: CompletedProcess = run(
|
82
|
+
cmd, text=True, stdout=PIPE, cwd=target_dir
|
83
|
+
)
|
84
|
+
if result.returncode == 0:
|
85
|
+
if not output:
|
86
|
+
echo(f"✅ {target_file} is identical, skipping")
|
87
|
+
continue
|
88
|
+
|
89
|
+
if output:
|
90
|
+
diffs.append(result.stdout)
|
91
|
+
else:
|
92
|
+
run(["less", "-R"], input=result.stdout, text=True)
|
93
|
+
else:
|
94
|
+
if not output:
|
95
|
+
echo(f"❗ {target_file} not in the target directory, skipping")
|
96
|
+
return diffs if output else None
|
97
|
+
|
98
|
+
|
99
|
+
def run_op(
|
100
|
+
runspec: str, op: Callable[..., Optional[List[str]]], **op_args: Mapping[str, Any]
|
101
|
+
) -> Optional[List[str]]:
|
102
|
+
tmp = None
|
103
|
+
try:
|
104
|
+
tmp = extract_code_package(runspec)
|
105
|
+
return op(tmp.name, **op_args)
|
106
|
+
finally:
|
107
|
+
if tmp and os.path.exists(tmp.name):
|
108
|
+
shutil.rmtree(tmp.name)
|
109
|
+
|
110
|
+
|
111
|
+
def run_op_diff_runs(
|
112
|
+
source_run_pathspec: str, target_run_pathspec: str, **op_args: Mapping[str, Any]
|
113
|
+
) -> Optional[List[str]]:
|
114
|
+
source_tmp = None
|
115
|
+
target_tmp = None
|
116
|
+
try:
|
117
|
+
source_tmp = extract_code_package(source_run_pathspec)
|
118
|
+
target_tmp = extract_code_package(target_run_pathspec)
|
119
|
+
return perform_diff(source_tmp.name, target_tmp.name, **op_args)
|
120
|
+
finally:
|
121
|
+
for d in [source_tmp, target_tmp]:
|
122
|
+
if d and os.path.exists(d.name):
|
123
|
+
shutil.rmtree(d.name)
|
124
|
+
|
125
|
+
|
126
|
+
def op_diff(tmpdir: str, **kwargs: Mapping[str, Any]) -> Optional[List[str]]:
|
127
|
+
kwargs_dict = dict(kwargs)
|
128
|
+
target_dir = cast(Optional[str], kwargs_dict.pop("target_dir", None))
|
129
|
+
output: bool = bool(kwargs_dict.pop("output", False))
|
130
|
+
op_args: Mapping[str, Any] = {**kwargs_dict}
|
131
|
+
return perform_diff(tmpdir, target_dir=target_dir, output=output, **op_args)
|
132
|
+
|
133
|
+
|
134
|
+
def op_pull(tmpdir: str, dst: str, **op_args: Mapping[str, Any]) -> None:
|
135
|
+
if os.path.exists(dst):
|
136
|
+
echo(f"❌ Directory *{dst}* already exists")
|
137
|
+
else:
|
138
|
+
shutil.move(tmpdir, dst)
|
139
|
+
echo(f"Code downloaded to *{dst}*")
|
140
|
+
|
141
|
+
|
142
|
+
def op_patch(tmpdir: str, dst: str, **kwargs: Mapping[str, Any]) -> None:
|
143
|
+
diffs = perform_diff(tmpdir, output=True) or []
|
144
|
+
with open(dst, "w", encoding="utf-8") as f:
|
145
|
+
for out in diffs:
|
146
|
+
out = out.replace(tmpdir, "/.")
|
147
|
+
out = out.replace("+++ b/./", "+++ b/")
|
148
|
+
out = out.replace("--- b/./", "--- b/")
|
149
|
+
out = out.replace("--- a/./", "--- a/")
|
150
|
+
out = out.replace("+++ a/./", "+++ a/")
|
151
|
+
f.write(out)
|
152
|
+
echo(f"Patch saved in *{dst}*")
|
153
|
+
path = run(
|
154
|
+
["git", "rev-parse", "--show-prefix"], text=True, stdout=PIPE
|
155
|
+
).stdout.strip()
|
156
|
+
if path:
|
157
|
+
diropt = f" --directory={path.rstrip('/')}"
|
158
|
+
else:
|
159
|
+
diropt = ""
|
160
|
+
echo("Apply the patch by running:")
|
161
|
+
echo_always(
|
162
|
+
f"git apply --verbose{diropt} {dst}", highlight=True, bold=True, err=True
|
163
|
+
)
|
164
|
+
|
165
|
+
|
166
|
+
@code.command()
|
167
|
+
@click.argument("run_pathspec")
|
168
|
+
def diff(run_pathspec: str, **kwargs: Mapping[str, Any]) -> None:
|
169
|
+
"""
|
170
|
+
Do a 'git diff' of the current directory and a Metaflow run.
|
171
|
+
"""
|
172
|
+
_ = run_op(run_pathspec, op_diff, **kwargs)
|
173
|
+
|
174
|
+
|
175
|
+
@code.command()
|
176
|
+
@click.argument("source_run_pathspec")
|
177
|
+
@click.argument("target_run_pathspec")
|
178
|
+
def diff_runs(
|
179
|
+
source_run_pathspec: str, target_run_pathspec: str, **kwargs: Mapping[str, Any]
|
180
|
+
) -> None:
|
181
|
+
"""
|
182
|
+
Do a 'git diff' between two Metaflow runs.
|
183
|
+
"""
|
184
|
+
_ = run_op_diff_runs(source_run_pathspec, target_run_pathspec, **kwargs)
|
185
|
+
|
186
|
+
|
187
|
+
@code.command()
|
188
|
+
@click.argument("run_pathspec")
|
189
|
+
@click.option(
|
190
|
+
"--dir", help="Destination directory (default: {run_pathspec}_code)", default=None
|
191
|
+
)
|
192
|
+
def pull(
|
193
|
+
run_pathspec: str, dir: Optional[str] = None, **kwargs: Mapping[str, Any]
|
194
|
+
) -> None:
|
195
|
+
"""
|
196
|
+
Pull the code of a Metaflow run.
|
197
|
+
"""
|
198
|
+
if dir is None:
|
199
|
+
dir = run_pathspec.lower().replace("/", "_") + "_code"
|
200
|
+
op_args: Mapping[str, Any] = {**kwargs, "dst": dir}
|
201
|
+
run_op(run_pathspec, op_pull, **op_args)
|
202
|
+
|
203
|
+
|
204
|
+
@code.command()
|
205
|
+
@click.argument("run_pathspec")
|
206
|
+
@click.option(
|
207
|
+
"--file_path",
|
208
|
+
help="Patch file name. If not provided, defaults to a sanitized version of RUN_PATHSPEC "
|
209
|
+
"with slashes replaced by underscores, plus '.patch'.",
|
210
|
+
show_default=False,
|
211
|
+
)
|
212
|
+
@click.option(
|
213
|
+
"--overwrite", is_flag=True, help="Overwrite the patch file if it exists."
|
214
|
+
)
|
215
|
+
def patch(
|
216
|
+
run_pathspec: str,
|
217
|
+
file_path: Optional[str] = None,
|
218
|
+
overwrite: bool = False,
|
219
|
+
**kwargs: Mapping[str, Any],
|
220
|
+
) -> None:
|
221
|
+
"""
|
222
|
+
Create a patch by comparing current dir with a Metaflow run.
|
223
|
+
"""
|
224
|
+
if file_path is None:
|
225
|
+
file_path = run_pathspec.lower().replace("/", "_") + ".patch"
|
226
|
+
if os.path.exists(file_path) and not overwrite:
|
227
|
+
echo(f"File *{file_path}* already exists. To overwrite, specify --overwrite.")
|
228
|
+
return
|
229
|
+
op_args: Mapping[str, Any] = {**kwargs, "dst": file_path}
|
230
|
+
run_op(run_pathspec, op_patch, **op_args)
|
@@ -1133,13 +1133,16 @@ class StubGenerator:
|
|
1133
1133
|
result = result[1:]
|
1134
1134
|
# Add doc to first and last overloads. Jedi uses the last one and pycharm
|
1135
1135
|
# the first one. Go figure.
|
1136
|
+
result_docstring = docs["func_doc"]
|
1137
|
+
if docs["param_doc"]:
|
1138
|
+
result_docstring += "\nParameters\n----------\n" + docs["param_doc"]
|
1136
1139
|
result[0] = (
|
1137
1140
|
result[0][0],
|
1138
|
-
|
1141
|
+
result_docstring,
|
1139
1142
|
)
|
1140
1143
|
result[-1] = (
|
1141
1144
|
result[-1][0],
|
1142
|
-
|
1145
|
+
result_docstring,
|
1143
1146
|
)
|
1144
1147
|
return result
|
1145
1148
|
|
metaflow/cmd/main_cli.py
CHANGED
metaflow/cmd/make_wrapper.py
CHANGED
@@ -2,15 +2,47 @@ import sys
|
|
2
2
|
import subprocess
|
3
3
|
from pathlib import Path
|
4
4
|
import sysconfig
|
5
|
+
import site
|
6
|
+
|
7
|
+
|
8
|
+
def find_makefile():
|
9
|
+
possible_dirs = []
|
10
|
+
|
11
|
+
# 1) The standard sysconfig-based location
|
12
|
+
data_dir = sysconfig.get_paths()["data"]
|
13
|
+
possible_dirs.append(Path(data_dir) / "share" / "metaflow" / "devtools")
|
14
|
+
|
15
|
+
# 2) The user base (e.g. ~/.local on many systems)
|
16
|
+
user_base = site.getuserbase() # e.g. /home/runner/.local
|
17
|
+
possible_dirs.append(Path(user_base) / "share" / "metaflow" / "devtools")
|
18
|
+
|
19
|
+
# 3) site-packages can vary, we can guess share/.. near each site-packages
|
20
|
+
# (Works if pip actually placed devtools near site-packages.)
|
21
|
+
for p in site.getsitepackages():
|
22
|
+
possible_dirs.append(Path(p).parent / "share" / "metaflow" / "devtools")
|
23
|
+
user_site = site.getusersitepackages()
|
24
|
+
possible_dirs.append(Path(user_site).parent / "share" / "metaflow" / "devtools")
|
25
|
+
|
26
|
+
for candidate_dir in possible_dirs:
|
27
|
+
makefile_candidate = candidate_dir / "Makefile"
|
28
|
+
if makefile_candidate.is_file():
|
29
|
+
return makefile_candidate
|
30
|
+
|
31
|
+
return None
|
5
32
|
|
6
33
|
|
7
34
|
def main():
|
8
|
-
|
9
|
-
|
35
|
+
makefile_path = find_makefile()
|
36
|
+
if not makefile_path:
|
37
|
+
print("ERROR: Could not find executable in any known location.")
|
38
|
+
sys.exit(1)
|
10
39
|
cmd = ["make", "-f", str(makefile_path)] + sys.argv[1:]
|
11
|
-
|
40
|
+
|
12
41
|
try:
|
13
42
|
completed = subprocess.run(cmd, check=True)
|
14
43
|
sys.exit(completed.returncode)
|
15
44
|
except subprocess.CalledProcessError as ex:
|
16
45
|
sys.exit(ex.returncode)
|
46
|
+
except KeyboardInterrupt:
|
47
|
+
print("Process interrupted by user. Exiting cleanly.")
|
48
|
+
sys.exit(1)
|
metaflow/metaflow_config.py
CHANGED
@@ -381,6 +381,8 @@ KUBERNETES_DISK = from_conf("KUBERNETES_DISK", None)
|
|
381
381
|
# Default kubernetes QoS class
|
382
382
|
KUBERNETES_QOS = from_conf("KUBERNETES_QOS", "burstable")
|
383
383
|
|
384
|
+
# Architecture of kubernetes nodes - used for @conda/@pypi in metaflow-dev
|
385
|
+
KUBERNETES_CONDA_ARCH = from_conf("KUBERNETES_CONDA_ARCH")
|
384
386
|
ARGO_WORKFLOWS_KUBERNETES_SECRETS = from_conf("ARGO_WORKFLOWS_KUBERNETES_SECRETS", "")
|
385
387
|
ARGO_WORKFLOWS_ENV_VARS_TO_SKIP = from_conf("ARGO_WORKFLOWS_ENV_VARS_TO_SKIP", "")
|
386
388
|
|
metaflow/metaflow_environment.py
CHANGED
@@ -6,7 +6,7 @@ from .util import get_username
|
|
6
6
|
from . import metaflow_version
|
7
7
|
from metaflow.exception import MetaflowException
|
8
8
|
from metaflow.extension_support import dump_module_info
|
9
|
-
from metaflow.mflog import BASH_MFLOG
|
9
|
+
from metaflow.mflog import BASH_MFLOG, BASH_FLUSH_LOGS
|
10
10
|
from . import R
|
11
11
|
|
12
12
|
|
@@ -159,6 +159,7 @@ class MetaflowEnvironment(object):
|
|
159
159
|
def get_package_commands(self, code_package_url, datastore_type):
|
160
160
|
cmds = [
|
161
161
|
BASH_MFLOG,
|
162
|
+
BASH_FLUSH_LOGS,
|
162
163
|
"mflog 'Setting up task environment.'",
|
163
164
|
self._get_install_dependencies_cmd(datastore_type),
|
164
165
|
"mkdir metaflow",
|
@@ -176,6 +177,7 @@ class MetaflowEnvironment(object):
|
|
176
177
|
"fi" % code_package_url,
|
177
178
|
"TAR_OPTIONS='--warning=no-timestamp' tar xf job.tar",
|
178
179
|
"mflog 'Task is starting.'",
|
180
|
+
"flush_mflogs",
|
179
181
|
]
|
180
182
|
return cmds
|
181
183
|
|
metaflow/mflog/__init__.py
CHANGED
@@ -44,6 +44,8 @@ BASH_MFLOG = (
|
|
44
44
|
BASH_SAVE_LOGS_ARGS = ["python", "-m", "metaflow.mflog.save_logs"]
|
45
45
|
BASH_SAVE_LOGS = " ".join(BASH_SAVE_LOGS_ARGS)
|
46
46
|
|
47
|
+
BASH_FLUSH_LOGS = "flush_mflogs(){ " f"{BASH_SAVE_LOGS}; " "}"
|
48
|
+
|
47
49
|
|
48
50
|
# this function returns a bash expression that redirects stdout
|
49
51
|
# and stderr of the given bash expression to mflog.tee
|
@@ -63,7 +65,7 @@ def bash_capture_logs(bash_expr, var_transform=None):
|
|
63
65
|
# update_delay determines how often logs should be uploaded to S3
|
64
66
|
# as a function of the task execution time
|
65
67
|
|
66
|
-
MIN_UPDATE_DELAY =
|
68
|
+
MIN_UPDATE_DELAY = 0.25 # the most frequent update interval
|
67
69
|
MAX_UPDATE_DELAY = 30.0 # the least frequent update interval
|
68
70
|
|
69
71
|
|
@@ -110,7 +112,6 @@ def export_mflog_env_vars(
|
|
110
112
|
|
111
113
|
def tail_logs(prefix, stdout_tail, stderr_tail, echo, has_log_updates):
|
112
114
|
def _available_logs(tail, stream, echo, should_persist=False):
|
113
|
-
# print the latest batch of lines
|
114
115
|
try:
|
115
116
|
for line in tail:
|
116
117
|
if should_persist:
|
@@ -128,7 +129,7 @@ def tail_logs(prefix, stdout_tail, stderr_tail, echo, has_log_updates):
|
|
128
129
|
|
129
130
|
start_time = time.time()
|
130
131
|
next_log_update = start_time
|
131
|
-
log_update_delay =
|
132
|
+
log_update_delay = update_delay(0)
|
132
133
|
while has_log_updates():
|
133
134
|
if time.time() > next_log_update:
|
134
135
|
_available_logs(stdout_tail, "stdout", echo)
|
@@ -29,6 +29,7 @@ from metaflow.metaflow_config import (
|
|
29
29
|
KUBERNETES_SHARED_MEMORY,
|
30
30
|
KUBERNETES_TOLERATIONS,
|
31
31
|
KUBERNETES_QOS,
|
32
|
+
KUBERNETES_CONDA_ARCH,
|
32
33
|
)
|
33
34
|
from metaflow.plugins.resources_decorator import ResourcesDecorator
|
34
35
|
from metaflow.plugins.timeout_decorator import get_run_time_limit_for_task
|
@@ -158,7 +159,7 @@ class KubernetesDecorator(StepDecorator):
|
|
158
159
|
|
159
160
|
# Conda environment support
|
160
161
|
supports_conda_environment = True
|
161
|
-
target_platform = "linux-64"
|
162
|
+
target_platform = KUBERNETES_CONDA_ARCH or "linux-64"
|
162
163
|
|
163
164
|
def init(self):
|
164
165
|
super(KubernetesDecorator, self).init()
|
@@ -319,6 +319,8 @@ class RunningJobSet(object):
|
|
319
319
|
def kill(self):
|
320
320
|
plural = "jobsets"
|
321
321
|
client = self._client.get()
|
322
|
+
if not (self.is_running or self.is_waiting):
|
323
|
+
return
|
322
324
|
try:
|
323
325
|
# Killing the control pod will trigger the jobset to mark everything as failed.
|
324
326
|
# Since jobsets have a successPolicy set to `All` which ensures that everything has
|
@@ -8,6 +8,7 @@ import subprocess
|
|
8
8
|
import sys
|
9
9
|
import tarfile
|
10
10
|
import time
|
11
|
+
import platform
|
11
12
|
from urllib.error import URLError
|
12
13
|
from urllib.request import urlopen
|
13
14
|
from metaflow.metaflow_config import DATASTORE_LOCAL_DIR, CONDA_USE_FAST_INIT
|
@@ -36,29 +37,6 @@ def timer(func):
|
|
36
37
|
|
37
38
|
|
38
39
|
if __name__ == "__main__":
|
39
|
-
# TODO: Detect architecture on the fly when dealing with arm architectures.
|
40
|
-
# ARCH=$(uname -m)
|
41
|
-
# OS=$(uname)
|
42
|
-
|
43
|
-
# if [[ "$OS" == "Linux" ]]; then
|
44
|
-
# PLATFORM="linux"
|
45
|
-
# if [[ "$ARCH" == "aarch64" ]]; then
|
46
|
-
# ARCH="aarch64";
|
47
|
-
# elif [[ $ARCH == "ppc64le" ]]; then
|
48
|
-
# ARCH="ppc64le";
|
49
|
-
# else
|
50
|
-
# ARCH="64";
|
51
|
-
# fi
|
52
|
-
# fi
|
53
|
-
|
54
|
-
# if [[ "$OS" == "Darwin" ]]; then
|
55
|
-
# PLATFORM="osx";
|
56
|
-
# if [[ "$ARCH" == "arm64" ]]; then
|
57
|
-
# ARCH="arm64";
|
58
|
-
# else
|
59
|
-
# ARCH="64"
|
60
|
-
# fi
|
61
|
-
# fi
|
62
40
|
|
63
41
|
def run_cmd(cmd, stdin_str=None):
|
64
42
|
result = subprocess.run(
|
@@ -350,12 +328,25 @@ if __name__ == "__main__":
|
|
350
328
|
cmd = f"fast-initializer --prefix {prefix} --packages-dir {pkgs_dir}"
|
351
329
|
run_cmd(cmd, all_package_urls)
|
352
330
|
|
353
|
-
if len(sys.argv) !=
|
354
|
-
print("Usage: bootstrap.py <flow_name> <id> <datastore_type>
|
331
|
+
if len(sys.argv) != 4:
|
332
|
+
print("Usage: bootstrap.py <flow_name> <id> <datastore_type>")
|
355
333
|
sys.exit(1)
|
356
334
|
|
357
335
|
try:
|
358
|
-
_, flow_name, id_, datastore_type
|
336
|
+
_, flow_name, id_, datastore_type = sys.argv
|
337
|
+
|
338
|
+
system = platform.system().lower()
|
339
|
+
arch_machine = platform.machine().lower()
|
340
|
+
|
341
|
+
if system == "darwin" and arch_machine == "arm64":
|
342
|
+
architecture = "osx-arm64"
|
343
|
+
elif system == "darwin":
|
344
|
+
architecture = "osx-64"
|
345
|
+
elif system == "linux" and arch_machine == "aarch64":
|
346
|
+
architecture = "linux-aarch64"
|
347
|
+
else:
|
348
|
+
# default fallback
|
349
|
+
architecture = "linux-64"
|
359
350
|
|
360
351
|
prefix = os.path.join(os.getcwd(), architecture, id_)
|
361
352
|
pkgs_dir = os.path.join(os.getcwd(), ".pkgs")
|
@@ -190,7 +190,6 @@ class CondaEnvironment(MetaflowEnvironment):
|
|
190
190
|
# 4. Start PyPI solves in parallel after each conda environment is created
|
191
191
|
# 5. Download PyPI packages sequentially
|
192
192
|
# 6. Create and cache PyPI environments in parallel
|
193
|
-
|
194
193
|
with ThreadPoolExecutor() as executor:
|
195
194
|
# Start all conda solves in parallel
|
196
195
|
conda_futures = [
|
@@ -213,14 +212,14 @@ class CondaEnvironment(MetaflowEnvironment):
|
|
213
212
|
|
214
213
|
# Queue PyPI solve to start after conda create
|
215
214
|
if result[0] in pypi_envs:
|
215
|
+
# solve pypi envs uniquely
|
216
|
+
pypi_env = pypi_envs.pop(result[0])
|
216
217
|
|
217
218
|
def pypi_solve(env):
|
218
219
|
create_future.result() # Wait for conda create
|
219
220
|
return solve(*env, "pypi")
|
220
221
|
|
221
|
-
pypi_futures.append(
|
222
|
-
executor.submit(pypi_solve, pypi_envs[result[0]])
|
223
|
-
)
|
222
|
+
pypi_futures.append(executor.submit(pypi_solve, pypi_env))
|
224
223
|
|
225
224
|
# Process PyPI results sequentially for downloads
|
226
225
|
for solve_future in pypi_futures:
|
@@ -242,7 +241,7 @@ class CondaEnvironment(MetaflowEnvironment):
|
|
242
241
|
if id_:
|
243
242
|
# bootstrap.py is responsible for ensuring the validity of this executable.
|
244
243
|
# -s is important! Can otherwise leak packages to other environments.
|
245
|
-
return os.path.join("
|
244
|
+
return os.path.join("$MF_ARCH", id_, "bin/python -s")
|
246
245
|
else:
|
247
246
|
# for @conda/@pypi(disabled=True).
|
248
247
|
return super().executable(step_name, default)
|
@@ -315,7 +314,6 @@ class CondaEnvironment(MetaflowEnvironment):
|
|
315
314
|
# 5. All resolved packages (Conda or PyPI) are cached
|
316
315
|
# 6. PyPI packages are only installed for local platform
|
317
316
|
|
318
|
-
# Resolve `linux-64` Conda environments if @batch or @kubernetes are in play
|
319
317
|
target_platform = conda_platform()
|
320
318
|
for decorator in step.decorators:
|
321
319
|
# NOTE: Keep the list of supported decorator names for backward compatibility purposes.
|
@@ -329,7 +327,6 @@ class CondaEnvironment(MetaflowEnvironment):
|
|
329
327
|
"snowpark",
|
330
328
|
"slurm",
|
331
329
|
]:
|
332
|
-
# TODO: Support arm architectures
|
333
330
|
target_platform = getattr(decorator, "target_platform", "linux-64")
|
334
331
|
break
|
335
332
|
|
@@ -424,15 +421,18 @@ class CondaEnvironment(MetaflowEnvironment):
|
|
424
421
|
if id_:
|
425
422
|
return [
|
426
423
|
"echo 'Bootstrapping virtual environment...'",
|
424
|
+
"flush_mflogs",
|
427
425
|
# We have to prevent the tracing module from loading,
|
428
426
|
# as the bootstrapping process uses the internal S3 client which would fail to import tracing
|
429
427
|
# due to the required dependencies being bundled into the conda environment,
|
430
428
|
# which is yet to be initialized at this point.
|
431
|
-
'DISABLE_TRACING=True python -m metaflow.plugins.pypi.bootstrap "%s" %s "%s"
|
429
|
+
'DISABLE_TRACING=True python -m metaflow.plugins.pypi.bootstrap "%s" %s "%s"'
|
432
430
|
% (self.flow.name, id_, self.datastore_type),
|
433
431
|
"echo 'Environment bootstrapped.'",
|
432
|
+
"flush_mflogs",
|
434
433
|
# To avoid having to install micromamba in the PATH in micromamba.py, we add it to the PATH here.
|
435
434
|
"export PATH=$PATH:$(pwd)/micromamba/bin",
|
435
|
+
"export MF_ARCH=$(case $(uname)/$(uname -m) in Darwin/arm64)echo osx-arm64;;Darwin/*)echo osx-64;;Linux/aarch64)echo linux-aarch64;;*)echo linux-64;;esac)",
|
436
436
|
]
|
437
437
|
else:
|
438
438
|
# for @conda/@pypi(disabled=True).
|
@@ -0,0 +1,268 @@
|
|
1
|
+
# this file can be overridden by extensions as is (e.g. metaflow-nflx-extensions)
|
2
|
+
from metaflow.exception import MetaflowException
|
3
|
+
|
4
|
+
|
5
|
+
class ParserValueError(MetaflowException):
|
6
|
+
headline = "Value error"
|
7
|
+
|
8
|
+
|
9
|
+
def requirements_txt_parser(content: str):
|
10
|
+
"""
|
11
|
+
Parse non-comment lines from a requirements.txt file as strictly valid
|
12
|
+
PEP 508 requirements.
|
13
|
+
|
14
|
+
Recognizes direct references (e.g. "my_lib @ git+https://..."), extras
|
15
|
+
(e.g. "requests[security]"), and version specifiers (e.g. "==2.0"). If
|
16
|
+
the package name is "python", its specifier is stored in the "python"
|
17
|
+
key instead of "packages".
|
18
|
+
|
19
|
+
Parameters
|
20
|
+
----------
|
21
|
+
content : str
|
22
|
+
Contents of a requirements.txt file.
|
23
|
+
|
24
|
+
Returns
|
25
|
+
-------
|
26
|
+
dict
|
27
|
+
A dictionary with two keys:
|
28
|
+
- "packages": dict(str -> str)
|
29
|
+
Mapping from package name (plus optional extras/references) to a
|
30
|
+
version specifier string.
|
31
|
+
- "python": str or None
|
32
|
+
The Python version constraints if present, otherwise None.
|
33
|
+
|
34
|
+
Raises
|
35
|
+
------
|
36
|
+
ParserValueError
|
37
|
+
If a requirement line is invalid PEP 508 or if environment markers are
|
38
|
+
detected, or if multiple Python constraints are specified.
|
39
|
+
"""
|
40
|
+
import re
|
41
|
+
from metaflow._vendor.packaging.requirements import Requirement, InvalidRequirement
|
42
|
+
|
43
|
+
parsed = {"packages": {}, "python": None}
|
44
|
+
|
45
|
+
inline_comment_pattern = re.compile(r"\s+#.*$")
|
46
|
+
for line in content.splitlines():
|
47
|
+
line = line.strip()
|
48
|
+
|
49
|
+
# support Rye lockfiles by skipping lines not compliant with requirements
|
50
|
+
if line == "-e file:.":
|
51
|
+
continue
|
52
|
+
|
53
|
+
if not line or line.startswith("#"):
|
54
|
+
continue
|
55
|
+
|
56
|
+
line = inline_comment_pattern.sub("", line).strip()
|
57
|
+
if not line:
|
58
|
+
continue
|
59
|
+
|
60
|
+
try:
|
61
|
+
req = Requirement(line)
|
62
|
+
except InvalidRequirement:
|
63
|
+
raise ParserValueError(f"Not a valid PEP 508 requirement: '{line}'")
|
64
|
+
|
65
|
+
if req.marker is not None:
|
66
|
+
raise ParserValueError(
|
67
|
+
"Environment markers (e.g. 'platform_system==\"Linux\"') "
|
68
|
+
f"are not supported for line: '{line}'"
|
69
|
+
)
|
70
|
+
|
71
|
+
dep_key = req.name
|
72
|
+
if req.extras:
|
73
|
+
dep_key += f"[{','.join(req.extras)}]"
|
74
|
+
if req.url:
|
75
|
+
dep_key += f"@{req.url}"
|
76
|
+
|
77
|
+
dep_spec = str(req.specifier).lstrip(" =")
|
78
|
+
|
79
|
+
if req.name.lower() == "python":
|
80
|
+
if parsed["python"] is not None and dep_spec:
|
81
|
+
raise ParserValueError(
|
82
|
+
f"Multiple Python version specs not allowed: '{line}'"
|
83
|
+
)
|
84
|
+
parsed["python"] = dep_spec or None
|
85
|
+
else:
|
86
|
+
parsed["packages"][dep_key] = dep_spec
|
87
|
+
|
88
|
+
return parsed
|
89
|
+
|
90
|
+
|
91
|
+
def pyproject_toml_parser(content: str):
|
92
|
+
"""
|
93
|
+
Parse a pyproject.toml file per PEP 621.
|
94
|
+
|
95
|
+
Reads the 'requires-python' and 'dependencies' fields from the "[project]" section.
|
96
|
+
Each dependency line must be a valid PEP 508 requirement. If the package name is
|
97
|
+
"python", its specifier is stored in the "python" key instead of "packages".
|
98
|
+
|
99
|
+
Parameters
|
100
|
+
----------
|
101
|
+
content : str
|
102
|
+
Contents of a pyproject.toml file.
|
103
|
+
|
104
|
+
Returns
|
105
|
+
-------
|
106
|
+
dict
|
107
|
+
A dictionary with two keys:
|
108
|
+
- "packages": dict(str -> str)
|
109
|
+
Mapping from package name (plus optional extras/references) to a
|
110
|
+
version specifier string.
|
111
|
+
- "python": str or None
|
112
|
+
The Python version constraints if present, otherwise None.
|
113
|
+
|
114
|
+
Raises
|
115
|
+
------
|
116
|
+
RuntimeError
|
117
|
+
If no TOML library (tomllib in Python 3.11+ or tomli in earlier versions) is found.
|
118
|
+
ParserValueError
|
119
|
+
If a dependency is not valid PEP 508, if environment markers are used, or if
|
120
|
+
multiple Python constraints are specified.
|
121
|
+
"""
|
122
|
+
try:
|
123
|
+
import tomllib as toml # Python 3.11+
|
124
|
+
except ImportError:
|
125
|
+
try:
|
126
|
+
import tomli as toml # Python < 3.11 (requires "tomli" package)
|
127
|
+
except ImportError:
|
128
|
+
raise RuntimeError(
|
129
|
+
"Could not import a TOML library. For Python <3.11, please install 'tomli'."
|
130
|
+
)
|
131
|
+
from metaflow._vendor.packaging.requirements import Requirement, InvalidRequirement
|
132
|
+
|
133
|
+
data = toml.loads(content)
|
134
|
+
|
135
|
+
project = data.get("project", {})
|
136
|
+
requirements = project.get("dependencies", [])
|
137
|
+
requires_python = project.get("requires-python")
|
138
|
+
|
139
|
+
parsed = {"packages": {}, "python": None}
|
140
|
+
|
141
|
+
if requires_python is not None:
|
142
|
+
# If present, store verbatim; note that PEP 621 does not necessarily
|
143
|
+
# require "python" to be a dependency in the usual sense.
|
144
|
+
# Example: "requires-python" = ">=3.7,<4"
|
145
|
+
parsed["python"] = requires_python.lstrip("=").strip()
|
146
|
+
|
147
|
+
for dep_line in requirements:
|
148
|
+
dep_line_stripped = dep_line.strip()
|
149
|
+
try:
|
150
|
+
req = Requirement(dep_line_stripped)
|
151
|
+
except InvalidRequirement:
|
152
|
+
raise ParserValueError(
|
153
|
+
f"Not a valid PEP 508 requirement: '{dep_line_stripped}'"
|
154
|
+
)
|
155
|
+
|
156
|
+
if req.marker is not None:
|
157
|
+
raise ParserValueError(
|
158
|
+
f"Environment markers not supported for line: '{dep_line_stripped}'"
|
159
|
+
)
|
160
|
+
|
161
|
+
dep_key = req.name
|
162
|
+
if req.extras:
|
163
|
+
dep_key += f"[{','.join(req.extras)}]"
|
164
|
+
if req.url:
|
165
|
+
dep_key += f"@{req.url}"
|
166
|
+
|
167
|
+
dep_spec = str(req.specifier).lstrip("=")
|
168
|
+
|
169
|
+
if req.name.lower() == "python":
|
170
|
+
if parsed["python"] is not None and dep_spec:
|
171
|
+
raise ParserValueError(
|
172
|
+
f"Multiple Python version specs not allowed: '{dep_line_stripped}'"
|
173
|
+
)
|
174
|
+
parsed["python"] = dep_spec or None
|
175
|
+
else:
|
176
|
+
parsed["packages"][dep_key] = dep_spec
|
177
|
+
|
178
|
+
return parsed
|
179
|
+
|
180
|
+
|
181
|
+
def conda_environment_yml_parser(content: str):
|
182
|
+
"""
|
183
|
+
Parse a minimal environment.yml file under strict assumptions.
|
184
|
+
|
185
|
+
The file must contain a 'dependencies:' line, after which each dependency line
|
186
|
+
appears with a '- ' prefix. Python can appear as 'python=3.9', etc.; other
|
187
|
+
packages as 'numpy=1.21.2' or simply 'numpy'. Non-compliant lines raise ParserValueError.
|
188
|
+
|
189
|
+
Parameters
|
190
|
+
----------
|
191
|
+
content : str
|
192
|
+
Contents of a environment.yml file.
|
193
|
+
|
194
|
+
Returns
|
195
|
+
-------
|
196
|
+
dict
|
197
|
+
A dictionary with keys:
|
198
|
+
{
|
199
|
+
"packages": dict(str -> str),
|
200
|
+
"python": str or None
|
201
|
+
}
|
202
|
+
|
203
|
+
Raises
|
204
|
+
------
|
205
|
+
ParserValueError
|
206
|
+
If the file has malformed lines or unsupported sections.
|
207
|
+
"""
|
208
|
+
import re
|
209
|
+
|
210
|
+
packages = {}
|
211
|
+
python_version = None
|
212
|
+
|
213
|
+
inside_dependencies = False
|
214
|
+
|
215
|
+
# Basic pattern for lines like "numpy=1.21.2"
|
216
|
+
# Group 1: package name
|
217
|
+
# Group 2: optional operator + version (could be "=1.21.2", "==1.21.2", etc.)
|
218
|
+
line_regex = re.compile(r"^([A-Za-z0-9_\-\.]+)(\s*[=<>!~].+\s*)?$")
|
219
|
+
inline_comment_pattern = re.compile(r"\s+#.*$")
|
220
|
+
|
221
|
+
for line in content.splitlines():
|
222
|
+
line = line.strip()
|
223
|
+
if not line or line.startswith("#"):
|
224
|
+
continue
|
225
|
+
|
226
|
+
line = inline_comment_pattern.sub("", line).strip()
|
227
|
+
if not line:
|
228
|
+
continue
|
229
|
+
|
230
|
+
if line.lower().startswith("dependencies:"):
|
231
|
+
inside_dependencies = True
|
232
|
+
continue
|
233
|
+
|
234
|
+
if inside_dependencies and not line.startswith("-"):
|
235
|
+
inside_dependencies = False
|
236
|
+
continue
|
237
|
+
|
238
|
+
if not inside_dependencies:
|
239
|
+
continue
|
240
|
+
|
241
|
+
dep_line = line.lstrip("-").strip()
|
242
|
+
if dep_line.endswith(":"):
|
243
|
+
raise ParserValueError(
|
244
|
+
f"Unsupported subsection '{dep_line}' in environment.yml."
|
245
|
+
)
|
246
|
+
|
247
|
+
match = line_regex.match(dep_line)
|
248
|
+
if not match:
|
249
|
+
raise ParserValueError(
|
250
|
+
f"Line '{dep_line}' is not a valid conda package specifier."
|
251
|
+
)
|
252
|
+
|
253
|
+
pkg_name, pkg_version_part = match.groups()
|
254
|
+
version_spec = pkg_version_part.strip() if pkg_version_part else ""
|
255
|
+
|
256
|
+
if version_spec.startswith("="):
|
257
|
+
version_spec = version_spec.lstrip("=").strip()
|
258
|
+
|
259
|
+
if pkg_name.lower() == "python":
|
260
|
+
if python_version is not None and version_spec:
|
261
|
+
raise ParserValueError(
|
262
|
+
f"Multiple Python version specs detected: '{dep_line}'"
|
263
|
+
)
|
264
|
+
python_version = version_spec
|
265
|
+
else:
|
266
|
+
packages[pkg_name] = version_spec
|
267
|
+
|
268
|
+
return {"packages": packages, "python": python_version}
|
metaflow/plugins/pypi/utils.py
CHANGED
@@ -72,6 +72,24 @@ def pip_tags(python_version, mamba_platform):
|
|
72
72
|
)
|
73
73
|
]
|
74
74
|
platforms.append("linux_x86_64")
|
75
|
+
elif mamba_platform == "linux-aarch64":
|
76
|
+
platforms = [
|
77
|
+
"manylinux%s_aarch64" % s
|
78
|
+
for s in (
|
79
|
+
"2014",
|
80
|
+
"_2_17",
|
81
|
+
"_2_18",
|
82
|
+
"_2_19",
|
83
|
+
"_2_20",
|
84
|
+
"_2_21",
|
85
|
+
"_2_23",
|
86
|
+
"_2_24",
|
87
|
+
"_2_25",
|
88
|
+
"_2_26",
|
89
|
+
"_2_27",
|
90
|
+
)
|
91
|
+
]
|
92
|
+
platforms.append("linux_aarch64")
|
75
93
|
elif mamba_platform == "osx-64":
|
76
94
|
platforms = tags.mac_platforms(arch="x86_64")
|
77
95
|
elif mamba_platform == "osx-arm64":
|
metaflow/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
metaflow_version = "2.15.
|
1
|
+
metaflow_version = "2.15.2"
|
@@ -8,6 +8,7 @@ help:
|
|
8
8
|
@echo " ui - Open Metaflow UI"
|
9
9
|
@echo " dashboard - Open Minikube dashboard"
|
10
10
|
@echo " down - Stop and clean up the environment"
|
11
|
+
@echo " all-up - Start the development environment with all services"
|
11
12
|
@echo " help - Show this help message"
|
12
13
|
|
13
14
|
HELM_VERSION := v3.14.0
|
@@ -24,10 +25,10 @@ MINIKUBE := $(MINIKUBE_DIR)/minikube
|
|
24
25
|
TILT_DIR := $(DEVTOOLS_DIR)/tilt
|
25
26
|
TILT := $(TILT_DIR)/tilt
|
26
27
|
TILTFILE := $(MKFILE_DIR)/Tiltfile
|
27
|
-
MAKE_CMD := $(MAKE) -
|
28
|
+
MAKE_CMD := $(MAKE) -f "$(MKFILE_PATH)"
|
28
29
|
|
29
30
|
MINIKUBE_CPUS ?= 4
|
30
|
-
MINIKUBE_MEMORY ?=
|
31
|
+
MINIKUBE_MEMORY ?= 6144
|
31
32
|
MINIKUBE_DISK_SIZE ?= 20g
|
32
33
|
|
33
34
|
ifeq ($(shell uname), Darwin)
|
@@ -129,7 +130,8 @@ setup-minikube:
|
|
129
130
|
--cpus $(MINIKUBE_CPUS) \
|
130
131
|
--memory $(MINIKUBE_MEMORY) \
|
131
132
|
--disk-size $(MINIKUBE_DISK_SIZE) \
|
132
|
-
--driver docker
|
133
|
+
--driver docker \
|
134
|
+
|| { echo "❌ Failed to start Minikube (check if Docker is running)"; exit 1; }; \
|
133
135
|
echo "🔌 Enabling metrics-server and dashboard (quietly)..."; \
|
134
136
|
$(MINIKUBE) addons enable metrics-server >/dev/null 2>&1; \
|
135
137
|
$(MINIKUBE) addons enable dashboard >/dev/null 2>&1; \
|
@@ -170,7 +172,6 @@ up: install-brew check-docker install-curl install-gum setup-minikube install-he
|
|
170
172
|
@echo 'trap "exit" INT TERM' >> $(DEVTOOLS_DIR)/start.sh
|
171
173
|
@echo 'trap "kill 0" EXIT' >> $(DEVTOOLS_DIR)/start.sh
|
172
174
|
@echo 'eval $$($(MINIKUBE) docker-env)' >> $(DEVTOOLS_DIR)/start.sh
|
173
|
-
|
174
175
|
@echo 'if [ -n "$$SERVICES_OVERRIDE" ]; then' >> "$(DEVTOOLS_DIR)/start.sh"
|
175
176
|
@echo ' echo "🌐 Using user-provided list of services: $$SERVICES_OVERRIDE"' >> "$(DEVTOOLS_DIR)/start.sh"
|
176
177
|
@echo ' SERVICES="$$SERVICES_OVERRIDE"' >> "$(DEVTOOLS_DIR)/start.sh"
|
@@ -186,11 +187,14 @@ up: install-brew check-docker install-curl install-gum setup-minikube install-he
|
|
186
187
|
@chmod +x $(DEVTOOLS_DIR)/start.sh
|
187
188
|
@$(DEVTOOLS_DIR)/start.sh
|
188
189
|
|
190
|
+
all-up:
|
191
|
+
@echo "🚀 Starting up all services..."
|
192
|
+
SERVICES_OVERRIDE=all $(MAKE_CMD) up
|
193
|
+
|
189
194
|
down:
|
190
195
|
@echo "🛑 Stopping all services..."
|
191
196
|
@-pkill -f "$(MINIKUBE) tunnel" 2>/dev/null || true
|
192
197
|
@echo "⏹️ Stopping Tilt..."
|
193
|
-
-PATH="$(MINIKUBE_DIR):$(TILT_DIR):$$PATH" tilt down -f $(TILTFILE)
|
194
198
|
@echo "🧹 Cleaning up Minikube..."
|
195
199
|
$(MAKE_CMD) teardown-minikube
|
196
200
|
@echo "🗑️ Removing Tilt binary and directory..."
|
@@ -201,7 +205,7 @@ down:
|
|
201
205
|
|
202
206
|
shell: setup-tilt
|
203
207
|
@echo "⏳ Checking if development environment is up..."
|
204
|
-
@set -
|
208
|
+
@set -eu; \
|
205
209
|
for i in $$(seq 1 90); do \
|
206
210
|
if "$(TILT)" get session >/dev/null 2>&1; then \
|
207
211
|
found_session=1; \
|
@@ -210,7 +214,7 @@ shell: setup-tilt
|
|
210
214
|
sleep 2; \
|
211
215
|
fi; \
|
212
216
|
done; \
|
213
|
-
if [ -z "$${found_session}" ]; then \
|
217
|
+
if [ -z "$${found_session:-}" ]; then \
|
214
218
|
echo "❌ Development environment is not up."; \
|
215
219
|
echo " Please run 'metaflow-dev up' in another terminal, then re-run 'metaflow-dev shell'."; \
|
216
220
|
exit 1; \
|
@@ -220,7 +224,10 @@ shell: setup-tilt
|
|
220
224
|
"$(TILT)" get uiresource generate-configs >/dev/null 2>&1; \
|
221
225
|
status=$$?; \
|
222
226
|
if [ $$status -eq 0 ]; then \
|
223
|
-
"$(TILT)" wait --for=condition=Ready uiresource/generate-configs; \
|
227
|
+
if ! "$(TILT)" wait --for=condition=Ready uiresource/generate-configs --timeout=300s; then \
|
228
|
+
echo "❌ Timed out waiting for development environment to be ready."; \
|
229
|
+
exit 1; \
|
230
|
+
fi; \
|
224
231
|
break; \
|
225
232
|
elif [ $$status -eq 127 ]; then \
|
226
233
|
echo "❌ Development environment is not up."; \
|
@@ -250,8 +257,7 @@ shell: setup-tilt
|
|
250
257
|
"$$user_shell" -i; \
|
251
258
|
fi'
|
252
259
|
|
253
|
-
|
254
|
-
#
|
260
|
+
|
255
261
|
# @echo '$(MAKE_CMD) create-dev-shell' >> $(DEVTOOLS_DIR)/start.sh
|
256
262
|
# @echo 'rm -f /tmp/metaflow-devshell-*' >> $(DEVTOOLS_DIR)/start.sh
|
257
263
|
create-dev-shell: setup-tilt
|
@@ -268,7 +274,10 @@ create-dev-shell: setup-tilt
|
|
268
274
|
echo "fi" >> $$SHELL_PATH && \
|
269
275
|
echo "" >> $$SHELL_PATH && \
|
270
276
|
echo "echo \"⏳ Waiting for development environment to be ready...\"" >> $$SHELL_PATH && \
|
271
|
-
echo "$(TILT) wait --for=condition=Ready uiresource/generate-configs" >> $$SHELL_PATH && \
|
277
|
+
echo "if ! $(TILT) wait --for=condition=Ready uiresource/generate-configs --timeout=300s; then" >> $$SHELL_PATH && \
|
278
|
+
echo " echo \"❌ Timed out waiting for development environment to be ready.\"" >> $$SHELL_PATH && \
|
279
|
+
echo " exit 1" >> $$SHELL_PATH && \
|
280
|
+
echo "fi" >> $$SHELL_PATH && \
|
272
281
|
echo "" >> $$SHELL_PATH && \
|
273
282
|
echo "echo \"🔧 Starting a new shell for development environment...\"" >> $$SHELL_PATH && \
|
274
283
|
echo "if [ -n \"\$$SHELL\" ]; then" >> $$SHELL_PATH && \
|
@@ -318,6 +327,6 @@ ui: setup-tilt
|
|
318
327
|
@echo "🔗 Opening Metaflow UI at http://localhost:3000"
|
319
328
|
@open http://localhost:3000
|
320
329
|
|
321
|
-
.PHONY: install-helm setup-minikube setup-tilt teardown-minikube tunnel up down check-docker install-curl install-gum install-brew up down dashboard shell ui help
|
330
|
+
.PHONY: install-helm setup-minikube setup-tilt teardown-minikube tunnel up down check-docker install-curl install-gum install-brew up down dashboard shell ui all-up help
|
322
331
|
|
323
332
|
.DEFAULT_GOAL := up
|
@@ -23,8 +23,13 @@ components = {
|
|
23
23
|
"argo-events": ["argo-workflows"],
|
24
24
|
}
|
25
25
|
|
26
|
-
|
27
|
-
|
26
|
+
services_env = os.getenv("SERVICES", "").strip().lower()
|
27
|
+
|
28
|
+
if services_env:
|
29
|
+
if services_env == "all":
|
30
|
+
requested_components = list(components.keys())
|
31
|
+
else:
|
32
|
+
requested_components = services_env.split(",")
|
28
33
|
else:
|
29
34
|
requested_components = list(components.keys())
|
30
35
|
|
@@ -78,7 +83,7 @@ for component in requested_components:
|
|
78
83
|
if result not in enabled_components:
|
79
84
|
enabled_components.append(result)
|
80
85
|
|
81
|
-
# Print a friendly summary when running `tilt up`.
|
86
|
+
# Print a friendly summary when running `tilt up`.
|
82
87
|
if config.tilt_subcommand == 'up':
|
83
88
|
print("\n📦 Components to install:")
|
84
89
|
for component in enabled_components:
|
@@ -99,6 +104,7 @@ if "minio" in enabled_components:
|
|
99
104
|
set=[
|
100
105
|
'rootUser=rootuser',
|
101
106
|
'rootPassword=rootpass123',
|
107
|
+
# TODO: perturb the bucket name to avoid conflicts
|
102
108
|
'buckets[0].name=metaflow-test',
|
103
109
|
'buckets[0].policy=none',
|
104
110
|
'buckets[0].purge=false',
|
@@ -498,7 +504,7 @@ if "argo-events" in enabled_components:
|
|
498
504
|
'argo-events-controller-manager',
|
499
505
|
labels=['argo-events'],
|
500
506
|
)
|
501
|
-
|
507
|
+
|
502
508
|
metaflow_config["METAFLOW_ARGO_EVENTS_EVENT"] = "metaflow-event"
|
503
509
|
metaflow_config["METAFLOW_ARGO_EVENTS_EVENT_BUS"] = "default"
|
504
510
|
metaflow_config["METAFLOW_ARGO_EVENTS_EVENT_SOURCE"] = "argo-events-webhook"
|
@@ -600,8 +606,20 @@ if "ui" in enabled_components:
|
|
600
606
|
config_resources.append('metaflow-ui')
|
601
607
|
config_resources.append('metaflow-ui-static')
|
602
608
|
|
609
|
+
cmd = '''
|
610
|
+
ARCH=$(kubectl get nodes -o jsonpath='{.items[0].status.nodeInfo.architecture}')
|
611
|
+
case "$ARCH" in
|
612
|
+
arm64) echo linux-aarch64 ;;
|
613
|
+
amd64) echo linux-64 ;;
|
614
|
+
*) echo linux-64 ;;
|
615
|
+
esac
|
616
|
+
'''
|
617
|
+
|
618
|
+
# For @conda/@pypi emulation
|
619
|
+
metaflow_config["METAFLOW_KUBERNETES_CONDA_ARCH"] = str(local(cmd)).strip()
|
620
|
+
|
603
621
|
local_resource(
|
604
622
|
name="generate-configs",
|
605
623
|
cmd=write_config_files(),
|
606
624
|
resource_deps=config_resources,
|
607
|
-
)
|
625
|
+
)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: metaflow
|
3
|
-
Version: 2.15.
|
3
|
+
Version: 2.15.2
|
4
4
|
Summary: Metaflow: More AI and ML, Less Engineering
|
5
5
|
Author: Metaflow Developers
|
6
6
|
Author-email: help@metaflow.org
|
@@ -26,7 +26,7 @@ License-File: LICENSE
|
|
26
26
|
Requires-Dist: requests
|
27
27
|
Requires-Dist: boto3
|
28
28
|
Provides-Extra: stubs
|
29
|
-
Requires-Dist: metaflow-stubs==2.15.
|
29
|
+
Requires-Dist: metaflow-stubs==2.15.2; extra == "stubs"
|
30
30
|
Dynamic: author
|
31
31
|
Dynamic: author-email
|
32
32
|
Dynamic: classifier
|
@@ -1,5 +1,5 @@
|
|
1
1
|
metaflow/R.py,sha256=CqVfIatvmjciuICNnoyyNGrwE7Va9iXfLdFbQa52hwA,3958
|
2
|
-
metaflow/__init__.py,sha256=
|
2
|
+
metaflow/__init__.py,sha256=6ws8LpYz5HwIrc1J4BK-2Hxs0okGLFQCm31bHFf2H8E,5978
|
3
3
|
metaflow/cards.py,sha256=IbRmredvmFEU0V6JL7DR8wCESwVmmZJubr6x24bo7U4,442
|
4
4
|
metaflow/cli.py,sha256=RU-yXpT-Lfl3xGyFNtL742e9KEqcRxEnQ-4mwXrXhvo,20928
|
5
5
|
metaflow/cli_args.py,sha256=hDsdWdRmfXYifVGq6b6FDfgoWxtIG2nr_lU6EBV0Pnk,3584
|
@@ -16,10 +16,10 @@ metaflow/includefile.py,sha256=kWKDSlzVcRVNGG9PV5eB3o2ynrzqhVsfaLtkqjshn7Q,20948
|
|
16
16
|
metaflow/info_file.py,sha256=wtf2_F0M6dgiUu74AFImM8lfy5RrUw5Yj7Rgs2swKRY,686
|
17
17
|
metaflow/integrations.py,sha256=LlsaoePRg03DjENnmLxZDYto3NwWc9z_PtU6nJxLldg,1480
|
18
18
|
metaflow/lint.py,sha256=x4p6tnRzYqNNniCGXyrUW0WuYfTUgnaOMRivxvnxask,11661
|
19
|
-
metaflow/metaflow_config.py,sha256=
|
19
|
+
metaflow/metaflow_config.py,sha256=oLbF4ZOfdejRBbemL_9NmFo2G2iAdTuUgbd7vNxV2lg,23567
|
20
20
|
metaflow/metaflow_config_funcs.py,sha256=5GlvoafV6SxykwfL8D12WXSfwjBN_NsyuKE_Q3gjGVE,6738
|
21
21
|
metaflow/metaflow_current.py,sha256=pfkXmkyHeMJhxIs6HBJNBEaBDpcl5kz9Wx5mW6F_3qo,7164
|
22
|
-
metaflow/metaflow_environment.py,sha256=
|
22
|
+
metaflow/metaflow_environment.py,sha256=e5BOkA7VdpjseI4HUkm_pR74NVJRNADL20LIQL4W1vU,8139
|
23
23
|
metaflow/metaflow_profile.py,sha256=jKPEW-hmAQO-htSxb9hXaeloLacAh41A35rMZH6G8pA,418
|
24
24
|
metaflow/metaflow_version.py,sha256=duhIzfKZtcxMVMs2uiBqBvUarSHJqyWDwMhaBOQd_g0,7491
|
25
25
|
metaflow/monitor.py,sha256=T0NMaBPvXynlJAO_avKtk8OIIRMyEuMAyF8bIp79aZU,5323
|
@@ -36,7 +36,7 @@ metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
|
|
36
36
|
metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
|
37
37
|
metaflow/util.py,sha256=mJBkV5tShIyCsLDeM1zygQGeciQVMrVPm_qI8Oi33G0,14656
|
38
38
|
metaflow/vendor.py,sha256=FchtA9tH22JM-eEtJ2c9FpUdMn8sSb1VHuQS56EcdZk,5139
|
39
|
-
metaflow/version.py,sha256=
|
39
|
+
metaflow/version.py,sha256=7oX8gtpKJpYIdf69SUJaWUTLRweqR54oqHS_DrQYBXk,28
|
40
40
|
metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
|
41
41
|
metaflow/_vendor/typing_extensions.py,sha256=0nUs5p1A_UrZigrAVBoOEM6TxU37zzPDUtiij1ZwpNc,110417
|
42
42
|
metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
|
@@ -121,12 +121,13 @@ metaflow/client/core.py,sha256=Cca6HbK-UBO72aELfFJxsl85ylYHHlCAd-uJP-lEepQ,83689
|
|
121
121
|
metaflow/client/filecache.py,sha256=Wy0yhhCqC1JZgebqi7z52GCwXYnkAqMZHTtxThvwBgM,15229
|
122
122
|
metaflow/cmd/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
123
123
|
metaflow/cmd/configure_cmd.py,sha256=o-DKnUf2FBo_HiMVyoyzQaGBSMtpbEPEdFTQZ0hkU-k,33396
|
124
|
-
metaflow/cmd/main_cli.py,sha256=
|
125
|
-
metaflow/cmd/make_wrapper.py,sha256=
|
124
|
+
metaflow/cmd/main_cli.py,sha256=LSehmMjkWojAN1XTtqW6S51ZpGNAdW4_VK5S7qH8-Ts,2982
|
125
|
+
metaflow/cmd/make_wrapper.py,sha256=N8L4u8QZAryH0sAjRsdEqG-gTj2S4LUsfDizOemrTR0,1604
|
126
126
|
metaflow/cmd/tutorials_cmd.py,sha256=8FdlKkicTOhCIDKcBR5b0Oz6giDvS-EMY3o9skIrRqw,5156
|
127
127
|
metaflow/cmd/util.py,sha256=jS_0rUjOnGGzPT65fzRLdGjrYAOOLA4jU2S0HJLV0oc,406
|
128
|
+
metaflow/cmd/code/__init__.py,sha256=VO4dNM9M9LHYy5nTgEiJvCV1RBl8lpDlYGJm6GIcaBA,7413
|
128
129
|
metaflow/cmd/develop/__init__.py,sha256=p1Sy8yU1MEKSrH5ttOWOZvNcI1qYu6J6jghdTHwPgOw,689
|
129
|
-
metaflow/cmd/develop/stub_generator.py,sha256=
|
130
|
+
metaflow/cmd/develop/stub_generator.py,sha256=bo2yWe0kvCZ-3arEFe9eAnPN-h8oNNPcQsDwsL350UM,65217
|
130
131
|
metaflow/cmd/develop/stubs.py,sha256=JX2qNZDvG0upvPueAcLhoR_zyLtRranZMwY05tLdpRQ,11884
|
131
132
|
metaflow/datastore/__init__.py,sha256=VxP6ddJt3rwiCkpiSfAhyVkUCOe1pgZZsytVEJzFmSQ,155
|
132
133
|
metaflow/datastore/content_addressed_store.py,sha256=6T7tNqL29kpmecyMLHF35RhoSBOb-OZcExnsB65AvnI,7641
|
@@ -145,7 +146,7 @@ metaflow/metadata_provider/__init__.py,sha256=FZNSnz26VB_m18DQG8mup6-Gfl7r1U6lRM
|
|
145
146
|
metaflow/metadata_provider/heartbeat.py,sha256=42mQo6wOHdFuaCh426uV6Kn8swe7e5I3gqA_G7cI_LA,3127
|
146
147
|
metaflow/metadata_provider/metadata.py,sha256=meO4Fhxu7tbMUGwasYb9_AtL06fwrrXKKjIK7KRWZDs,27093
|
147
148
|
metaflow/metadata_provider/util.py,sha256=lYoQKbqoTM1iZChgyVWN-gX-HyM9tt9bXEMJexY9XmM,1723
|
148
|
-
metaflow/mflog/__init__.py,sha256=
|
149
|
+
metaflow/mflog/__init__.py,sha256=TkR9ny_JYvNCWJTdLiHsbLSLc9cUvzAzpDuHLdG8nkA,6020
|
149
150
|
metaflow/mflog/mflog.py,sha256=VebXxqitOtNAs7VJixnNfziO_i_urG7bsJ5JiB5IXgY,4370
|
150
151
|
metaflow/mflog/save_logs.py,sha256=4p1OwozsHJBslOzAf0wUq2XPMNpEOZWM68MgWzh_jJY,2330
|
151
152
|
metaflow/mflog/save_logs_periodically.py,sha256=2Uvk9hi-zlCqXxOQoXmmjH1SCugfw6eG6w70WgfI-ho,1256
|
@@ -292,23 +293,24 @@ metaflow/plugins/kubernetes/kube_utils.py,sha256=jdFMGbEmIow-oli26v31W9CmbZXigx0
|
|
292
293
|
metaflow/plugins/kubernetes/kubernetes.py,sha256=g_E5jBhoMSDzGFnP5PDQiVPPllUr1wulVcG3tz247G8,29668
|
293
294
|
metaflow/plugins/kubernetes/kubernetes_cli.py,sha256=PY-aMaVelHddHq3jqEEu9cvNl7xEjT2lFFADN9dXMkw,13918
|
294
295
|
metaflow/plugins/kubernetes/kubernetes_client.py,sha256=tuvXP-QKpdeSmzVolB2R_TaacOr5DIb0j642eKcjsiM,6491
|
295
|
-
metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=
|
296
|
+
metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=ydJ6WyatCSwUzq_xGcjG1nCp1yRNhSyh09z0Y1cwDNI,30896
|
296
297
|
metaflow/plugins/kubernetes/kubernetes_job.py,sha256=pO9ExyAVCDoAoWFn9oFcos2aa0MQk4_D61O-T4E10E8,31826
|
297
|
-
metaflow/plugins/kubernetes/kubernetes_jobsets.py,sha256=
|
298
|
+
metaflow/plugins/kubernetes/kubernetes_jobsets.py,sha256=XjduAYY_H-jX3lGK6b-jYt6QnDvC2ac3qIAgr-iDsCQ,42543
|
298
299
|
metaflow/plugins/kubernetes/spot_metadata_cli.py,sha256=an0nWCxgflmqIPBCBrlb4m3DereDFFJBLt-KKhqcHc8,1670
|
299
300
|
metaflow/plugins/kubernetes/spot_monitor_sidecar.py,sha256=zrWU-smQwPnL6MBHmzTxWyEA00R6iKKQbhhy50xFwQ8,3832
|
300
301
|
metaflow/plugins/metadata_providers/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
301
302
|
metaflow/plugins/metadata_providers/local.py,sha256=Z0CXaGZJbAkj4II3WspJi-uCCtShH64yaXZQ5i9Ym7g,24390
|
302
303
|
metaflow/plugins/metadata_providers/service.py,sha256=K0Ym6lcmegX6wBC5uZbeAFQJSDFc8e6DzJiCB1VIqjc,22554
|
303
304
|
metaflow/plugins/pypi/__init__.py,sha256=0YFZpXvX7HCkyBFglatual7XGifdA1RwC3U4kcizyak,1037
|
304
|
-
metaflow/plugins/pypi/bootstrap.py,sha256=
|
305
|
+
metaflow/plugins/pypi/bootstrap.py,sha256=SNONquX6QnTbu7htmhaQeVeZ2ofaFaUCDScRIrTTERc,14718
|
305
306
|
metaflow/plugins/pypi/conda_decorator.py,sha256=piFcE4uGmWhhbGlxMK0GHd7BGEyqy6r9BFy8Mjoi80Q,15937
|
306
|
-
metaflow/plugins/pypi/conda_environment.py,sha256=
|
307
|
+
metaflow/plugins/pypi/conda_environment.py,sha256=yeTPGuu38EQ8aYzXUbc6cLc9b2NMLLEnxS-C73PcDHk,22250
|
307
308
|
metaflow/plugins/pypi/micromamba.py,sha256=LLJ2dGGOEyld07W8iI6dtE01h2Y1PQnBhU-dMBssZ3c,16502
|
309
|
+
metaflow/plugins/pypi/parsers.py,sha256=gpOOG2Ph95wI73MWCAi7XjpK0gYhv5k5YIGBs73QPuE,8556
|
308
310
|
metaflow/plugins/pypi/pip.py,sha256=H0cIy8odpZ-JTn4SwF0b74tuC3uRU7X8TdAQJ2kODG8,13971
|
309
311
|
metaflow/plugins/pypi/pypi_decorator.py,sha256=ybNgo-T5Z_0W2KNuED0pdjyI0qygZ4a1MXAzKqdHt_E,7250
|
310
312
|
metaflow/plugins/pypi/pypi_environment.py,sha256=FYMg8kF3lXqcLfRYWD83a9zpVjcoo_TARqMGZ763rRk,230
|
311
|
-
metaflow/plugins/pypi/utils.py,sha256=
|
313
|
+
metaflow/plugins/pypi/utils.py,sha256=glfXN0Do8HleB8YE9LE9Pb1tIBivqdPGmS0sUtO1e7k,3451
|
312
314
|
metaflow/plugins/secrets/__init__.py,sha256=mhJaN2eMS_ZZVewAMR2E-JdP5i0t3v9e6Dcwd-WpruE,310
|
313
315
|
metaflow/plugins/secrets/inline_secrets_provider.py,sha256=EChmoBGA1i7qM3jtYwPpLZDBybXLergiDlN63E0u3x8,294
|
314
316
|
metaflow/plugins/secrets/secrets_decorator.py,sha256=s-sFzPWOjahhpr5fMj-ZEaHkDYAPTO0isYXGvaUwlG8,11273
|
@@ -361,12 +363,12 @@ metaflow/user_configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3h
|
|
361
363
|
metaflow/user_configs/config_decorators.py,sha256=qCKVAvd0NKgaCxQ2OThes5-DYHXq6A1HqURubYNeFdw,20481
|
362
364
|
metaflow/user_configs/config_options.py,sha256=m6jccSpzI4qUJ7vyYkYBIf8G3V0Caunxg_k7zg4Zlqg,21067
|
363
365
|
metaflow/user_configs/config_parameters.py,sha256=oeJGVKu1ao_YQX6Lg6P2FEv5k5-_F4sARLlVpTW9ezM,15502
|
364
|
-
metaflow-2.15.
|
365
|
-
metaflow-2.15.
|
366
|
-
metaflow-2.15.
|
367
|
-
metaflow-2.15.
|
368
|
-
metaflow-2.15.
|
369
|
-
metaflow-2.15.
|
370
|
-
metaflow-2.15.
|
371
|
-
metaflow-2.15.
|
372
|
-
metaflow-2.15.
|
366
|
+
metaflow-2.15.2.data/data/share/metaflow/devtools/Makefile,sha256=uASGNuQaOrKWePYT8CmIuCvlOusm0P_HVWnGVzX74vs,13340
|
367
|
+
metaflow-2.15.2.data/data/share/metaflow/devtools/Tiltfile,sha256=fPinb8a7KvRnYPFsjmIIqqOic3ROjsc_kryHqe-SHGw,20499
|
368
|
+
metaflow-2.15.2.data/data/share/metaflow/devtools/pick_services.sh,sha256=DCnrMXwtApfx3B4S-YiZESMyAFHbXa3VuNL0MxPLyiE,2196
|
369
|
+
metaflow-2.15.2.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
|
370
|
+
metaflow-2.15.2.dist-info/METADATA,sha256=M79nxx71YBOurt_Gu8akE_RP9mA8njbL_o49IfsCISY,6118
|
371
|
+
metaflow-2.15.2.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
|
372
|
+
metaflow-2.15.2.dist-info/entry_points.txt,sha256=RvEq8VFlgGe_FfqGOZi0D7ze1hLD0pAtXeNyGfzc_Yc,103
|
373
|
+
metaflow-2.15.2.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
|
374
|
+
metaflow-2.15.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|