ob-metaflow 2.15.0.1__py2.py3-none-any.whl → 2.15.3.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (26) hide show
  1. metaflow/__init__.py +6 -0
  2. metaflow/cmd/code/__init__.py +230 -0
  3. metaflow/cmd/develop/stub_generator.py +5 -2
  4. metaflow/cmd/main_cli.py +1 -0
  5. metaflow/cmd/make_wrapper.py +35 -3
  6. metaflow/extension_support/plugins.py +1 -0
  7. metaflow/metaflow_config.py +2 -0
  8. metaflow/metaflow_environment.py +3 -1
  9. metaflow/mflog/__init__.py +4 -3
  10. metaflow/plugins/__init__.py +14 -0
  11. metaflow/plugins/kubernetes/kubernetes_decorator.py +2 -1
  12. metaflow/plugins/kubernetes/kubernetes_jobsets.py +2 -0
  13. metaflow/plugins/pypi/bootstrap.py +17 -26
  14. metaflow/plugins/pypi/conda_environment.py +8 -8
  15. metaflow/plugins/pypi/parsers.py +268 -0
  16. metaflow/plugins/pypi/utils.py +18 -0
  17. metaflow/version.py +1 -1
  18. {ob_metaflow-2.15.0.1.data → ob_metaflow-2.15.3.1.data}/data/share/metaflow/devtools/Makefile +22 -13
  19. {ob_metaflow-2.15.0.1.data → ob_metaflow-2.15.3.1.data}/data/share/metaflow/devtools/Tiltfile +25 -6
  20. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/METADATA +2 -2
  21. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/RECORD +26 -24
  22. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/WHEEL +1 -1
  23. {ob_metaflow-2.15.0.1.data → ob_metaflow-2.15.3.1.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  24. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/LICENSE +0 -0
  25. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/entry_points.txt +0 -0
  26. {ob_metaflow-2.15.0.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/top_level.txt +0 -0
metaflow/__init__.py CHANGED
@@ -119,6 +119,12 @@ from .includefile import IncludeFile
119
119
  # Decorators
120
120
  from .decorators import step, _import_plugin_decorators
121
121
 
122
+
123
+ # Parsers (for configs) for now
124
+ from .plugins import _import_tl_plugins
125
+
126
+ _import_tl_plugins(globals())
127
+
122
128
  # this auto-generates decorator functions from Decorator objects
123
129
  # in the top-level metaflow namespace
124
130
  _import_plugin_decorators(globals())
@@ -0,0 +1,230 @@
1
+ import os
2
+ import shutil
3
+ import sys
4
+ from subprocess import PIPE, CompletedProcess, run
5
+ from tempfile import TemporaryDirectory
6
+ from typing import Any, Callable, List, Mapping, Optional, cast
7
+
8
+ from metaflow import Run
9
+ from metaflow._vendor import click
10
+ from metaflow.cli import echo_always
11
+
12
+
13
+ @click.group()
14
+ def cli():
15
+ pass
16
+
17
+
18
+ @cli.group(help="Access, compare, and manage code associated with Metaflow runs.")
19
+ def code():
20
+ pass
21
+
22
+
23
+ def echo(line: str) -> None:
24
+ echo_always(line, err=True, fg="magenta")
25
+
26
+
27
+ def extract_code_package(runspec: str) -> TemporaryDirectory:
28
+ try:
29
+ mf_run = Run(runspec, _namespace_check=False)
30
+ echo(f"✅ Run *{runspec}* found, downloading code..")
31
+ except Exception as e:
32
+ echo(f"❌ Run **{runspec}** not found")
33
+ raise e
34
+
35
+ if mf_run.code is None:
36
+ echo(
37
+ f"❌ Run **{runspec}** doesn't have a code package. Maybe it's a local run?"
38
+ )
39
+ raise RuntimeError("no code package found")
40
+
41
+ return mf_run.code.extract()
42
+
43
+
44
+ def perform_diff(
45
+ source_dir: str,
46
+ target_dir: Optional[str] = None,
47
+ output: bool = False,
48
+ **kwargs: Mapping[str, Any],
49
+ ) -> Optional[List[str]]:
50
+ if target_dir is None:
51
+ target_dir = os.getcwd()
52
+
53
+ diffs = []
54
+ for dirpath, dirnames, filenames in os.walk(source_dir, followlinks=True):
55
+ for fname in filenames:
56
+ # NOTE: the paths below need to be set up carefully
57
+ # for the `patch` command to work. Better not to touch
58
+ # the directories below. If you must, test that patches
59
+ # work after your changes.
60
+ #
61
+ # target_file is the git repo in the current working directory
62
+ rel = os.path.relpath(dirpath, source_dir)
63
+ target_file = os.path.join(rel, fname)
64
+ # source_file is the run file loaded in a tmp directory
65
+ source_file = os.path.join(dirpath, fname)
66
+
67
+ if sys.stdout.isatty() and not output:
68
+ color = ["--color"]
69
+ else:
70
+ color = ["--no-color"]
71
+
72
+ if os.path.exists(os.path.join(target_dir, target_file)):
73
+ cmd = (
74
+ ["git", "diff", "--no-index", "--exit-code"]
75
+ + color
76
+ + [
77
+ target_file,
78
+ source_file,
79
+ ]
80
+ )
81
+ result: CompletedProcess = run(
82
+ cmd, text=True, stdout=PIPE, cwd=target_dir
83
+ )
84
+ if result.returncode == 0:
85
+ if not output:
86
+ echo(f"✅ {target_file} is identical, skipping")
87
+ continue
88
+
89
+ if output:
90
+ diffs.append(result.stdout)
91
+ else:
92
+ run(["less", "-R"], input=result.stdout, text=True)
93
+ else:
94
+ if not output:
95
+ echo(f"❗ {target_file} not in the target directory, skipping")
96
+ return diffs if output else None
97
+
98
+
99
+ def run_op(
100
+ runspec: str, op: Callable[..., Optional[List[str]]], **op_args: Mapping[str, Any]
101
+ ) -> Optional[List[str]]:
102
+ tmp = None
103
+ try:
104
+ tmp = extract_code_package(runspec)
105
+ return op(tmp.name, **op_args)
106
+ finally:
107
+ if tmp and os.path.exists(tmp.name):
108
+ shutil.rmtree(tmp.name)
109
+
110
+
111
+ def run_op_diff_runs(
112
+ source_run_pathspec: str, target_run_pathspec: str, **op_args: Mapping[str, Any]
113
+ ) -> Optional[List[str]]:
114
+ source_tmp = None
115
+ target_tmp = None
116
+ try:
117
+ source_tmp = extract_code_package(source_run_pathspec)
118
+ target_tmp = extract_code_package(target_run_pathspec)
119
+ return perform_diff(source_tmp.name, target_tmp.name, **op_args)
120
+ finally:
121
+ for d in [source_tmp, target_tmp]:
122
+ if d and os.path.exists(d.name):
123
+ shutil.rmtree(d.name)
124
+
125
+
126
+ def op_diff(tmpdir: str, **kwargs: Mapping[str, Any]) -> Optional[List[str]]:
127
+ kwargs_dict = dict(kwargs)
128
+ target_dir = cast(Optional[str], kwargs_dict.pop("target_dir", None))
129
+ output: bool = bool(kwargs_dict.pop("output", False))
130
+ op_args: Mapping[str, Any] = {**kwargs_dict}
131
+ return perform_diff(tmpdir, target_dir=target_dir, output=output, **op_args)
132
+
133
+
134
+ def op_pull(tmpdir: str, dst: str, **op_args: Mapping[str, Any]) -> None:
135
+ if os.path.exists(dst):
136
+ echo(f"❌ Directory *{dst}* already exists")
137
+ else:
138
+ shutil.move(tmpdir, dst)
139
+ echo(f"Code downloaded to *{dst}*")
140
+
141
+
142
+ def op_patch(tmpdir: str, dst: str, **kwargs: Mapping[str, Any]) -> None:
143
+ diffs = perform_diff(tmpdir, output=True) or []
144
+ with open(dst, "w", encoding="utf-8") as f:
145
+ for out in diffs:
146
+ out = out.replace(tmpdir, "/.")
147
+ out = out.replace("+++ b/./", "+++ b/")
148
+ out = out.replace("--- b/./", "--- b/")
149
+ out = out.replace("--- a/./", "--- a/")
150
+ out = out.replace("+++ a/./", "+++ a/")
151
+ f.write(out)
152
+ echo(f"Patch saved in *{dst}*")
153
+ path = run(
154
+ ["git", "rev-parse", "--show-prefix"], text=True, stdout=PIPE
155
+ ).stdout.strip()
156
+ if path:
157
+ diropt = f" --directory={path.rstrip('/')}"
158
+ else:
159
+ diropt = ""
160
+ echo("Apply the patch by running:")
161
+ echo_always(
162
+ f"git apply --verbose{diropt} {dst}", highlight=True, bold=True, err=True
163
+ )
164
+
165
+
166
+ @code.command()
167
+ @click.argument("run_pathspec")
168
+ def diff(run_pathspec: str, **kwargs: Mapping[str, Any]) -> None:
169
+ """
170
+ Do a 'git diff' of the current directory and a Metaflow run.
171
+ """
172
+ _ = run_op(run_pathspec, op_diff, **kwargs)
173
+
174
+
175
+ @code.command()
176
+ @click.argument("source_run_pathspec")
177
+ @click.argument("target_run_pathspec")
178
+ def diff_runs(
179
+ source_run_pathspec: str, target_run_pathspec: str, **kwargs: Mapping[str, Any]
180
+ ) -> None:
181
+ """
182
+ Do a 'git diff' between two Metaflow runs.
183
+ """
184
+ _ = run_op_diff_runs(source_run_pathspec, target_run_pathspec, **kwargs)
185
+
186
+
187
+ @code.command()
188
+ @click.argument("run_pathspec")
189
+ @click.option(
190
+ "--dir", help="Destination directory (default: {run_pathspec}_code)", default=None
191
+ )
192
+ def pull(
193
+ run_pathspec: str, dir: Optional[str] = None, **kwargs: Mapping[str, Any]
194
+ ) -> None:
195
+ """
196
+ Pull the code of a Metaflow run.
197
+ """
198
+ if dir is None:
199
+ dir = run_pathspec.lower().replace("/", "_") + "_code"
200
+ op_args: Mapping[str, Any] = {**kwargs, "dst": dir}
201
+ run_op(run_pathspec, op_pull, **op_args)
202
+
203
+
204
+ @code.command()
205
+ @click.argument("run_pathspec")
206
+ @click.option(
207
+ "--file_path",
208
+ help="Patch file name. If not provided, defaults to a sanitized version of RUN_PATHSPEC "
209
+ "with slashes replaced by underscores, plus '.patch'.",
210
+ show_default=False,
211
+ )
212
+ @click.option(
213
+ "--overwrite", is_flag=True, help="Overwrite the patch file if it exists."
214
+ )
215
+ def patch(
216
+ run_pathspec: str,
217
+ file_path: Optional[str] = None,
218
+ overwrite: bool = False,
219
+ **kwargs: Mapping[str, Any],
220
+ ) -> None:
221
+ """
222
+ Create a patch by comparing current dir with a Metaflow run.
223
+ """
224
+ if file_path is None:
225
+ file_path = run_pathspec.lower().replace("/", "_") + ".patch"
226
+ if os.path.exists(file_path) and not overwrite:
227
+ echo(f"File *{file_path}* already exists. To overwrite, specify --overwrite.")
228
+ return
229
+ op_args: Mapping[str, Any] = {**kwargs, "dst": file_path}
230
+ run_op(run_pathspec, op_patch, **op_args)
@@ -1133,13 +1133,16 @@ class StubGenerator:
1133
1133
  result = result[1:]
1134
1134
  # Add doc to first and last overloads. Jedi uses the last one and pycharm
1135
1135
  # the first one. Go figure.
1136
+ result_docstring = docs["func_doc"]
1137
+ if docs["param_doc"]:
1138
+ result_docstring += "\nParameters\n----------\n" + docs["param_doc"]
1136
1139
  result[0] = (
1137
1140
  result[0][0],
1138
- docs["func_doc"] + "\nParameters\n----------\n" + docs["param_doc"],
1141
+ result_docstring,
1139
1142
  )
1140
1143
  result[-1] = (
1141
1144
  result[-1][0],
1142
- docs["func_doc"] + "\nParameters\n----------\n" + docs["param_doc"],
1145
+ result_docstring,
1143
1146
  )
1144
1147
  return result
1145
1148
 
metaflow/cmd/main_cli.py CHANGED
@@ -67,6 +67,7 @@ CMDS_DESC = [
67
67
  ("configure", ".configure_cmd.cli"),
68
68
  ("tutorials", ".tutorials_cmd.cli"),
69
69
  ("develop", ".develop.cli"),
70
+ ("code", ".code.cli"),
70
71
  ]
71
72
 
72
73
  process_cmds(globals())
@@ -2,15 +2,47 @@ import sys
2
2
  import subprocess
3
3
  from pathlib import Path
4
4
  import sysconfig
5
+ import site
6
+
7
+
8
+ def find_makefile():
9
+ possible_dirs = []
10
+
11
+ # 1) The standard sysconfig-based location
12
+ data_dir = sysconfig.get_paths()["data"]
13
+ possible_dirs.append(Path(data_dir) / "share" / "metaflow" / "devtools")
14
+
15
+ # 2) The user base (e.g. ~/.local on many systems)
16
+ user_base = site.getuserbase() # e.g. /home/runner/.local
17
+ possible_dirs.append(Path(user_base) / "share" / "metaflow" / "devtools")
18
+
19
+ # 3) site-packages can vary, we can guess share/.. near each site-packages
20
+ # (Works if pip actually placed devtools near site-packages.)
21
+ for p in site.getsitepackages():
22
+ possible_dirs.append(Path(p).parent / "share" / "metaflow" / "devtools")
23
+ user_site = site.getusersitepackages()
24
+ possible_dirs.append(Path(user_site).parent / "share" / "metaflow" / "devtools")
25
+
26
+ for candidate_dir in possible_dirs:
27
+ makefile_candidate = candidate_dir / "Makefile"
28
+ if makefile_candidate.is_file():
29
+ return makefile_candidate
30
+
31
+ return None
5
32
 
6
33
 
7
34
  def main():
8
- share_dir = Path(sysconfig.get_paths()["data"]) / "share" / "metaflow" / "devtools"
9
- makefile_path = share_dir / "Makefile"
35
+ makefile_path = find_makefile()
36
+ if not makefile_path:
37
+ print("ERROR: Could not find executable in any known location.")
38
+ sys.exit(1)
10
39
  cmd = ["make", "-f", str(makefile_path)] + sys.argv[1:]
11
- # subprocess.run(cmd, check=True)
40
+
12
41
  try:
13
42
  completed = subprocess.run(cmd, check=True)
14
43
  sys.exit(completed.returncode)
15
44
  except subprocess.CalledProcessError as ex:
16
45
  sys.exit(ex.returncode)
46
+ except KeyboardInterrupt:
47
+ print("Process interrupted by user. Exiting cleanly.")
48
+ sys.exit(1)
@@ -198,6 +198,7 @@ _plugin_categories = {
198
198
  list(x.commands)[0] if len(x.commands) == 1 else "too many commands"
199
199
  ),
200
200
  "runner_cli": lambda x: x.name,
201
+ "tl_plugin": None,
201
202
  }
202
203
 
203
204
 
@@ -382,6 +382,8 @@ KUBERNETES_DISK = from_conf("KUBERNETES_DISK", None)
382
382
  # Default kubernetes QoS class
383
383
  KUBERNETES_QOS = from_conf("KUBERNETES_QOS", "burstable")
384
384
 
385
+ # Architecture of kubernetes nodes - used for @conda/@pypi in metaflow-dev
386
+ KUBERNETES_CONDA_ARCH = from_conf("KUBERNETES_CONDA_ARCH")
385
387
  ARGO_WORKFLOWS_KUBERNETES_SECRETS = from_conf("ARGO_WORKFLOWS_KUBERNETES_SECRETS", "")
386
388
  ARGO_WORKFLOWS_ENV_VARS_TO_SKIP = from_conf("ARGO_WORKFLOWS_ENV_VARS_TO_SKIP", "")
387
389
 
@@ -6,7 +6,7 @@ from .util import get_username
6
6
  from . import metaflow_version
7
7
  from metaflow.exception import MetaflowException
8
8
  from metaflow.extension_support import dump_module_info
9
- from metaflow.mflog import BASH_MFLOG
9
+ from metaflow.mflog import BASH_MFLOG, BASH_FLUSH_LOGS
10
10
  from . import R
11
11
 
12
12
 
@@ -159,6 +159,7 @@ class MetaflowEnvironment(object):
159
159
  def get_package_commands(self, code_package_url, datastore_type):
160
160
  cmds = [
161
161
  BASH_MFLOG,
162
+ BASH_FLUSH_LOGS,
162
163
  "mflog 'Setting up task environment.'",
163
164
  self._get_install_dependencies_cmd(datastore_type),
164
165
  "mkdir metaflow",
@@ -176,6 +177,7 @@ class MetaflowEnvironment(object):
176
177
  "fi" % code_package_url,
177
178
  "TAR_OPTIONS='--warning=no-timestamp' tar xf job.tar",
178
179
  "mflog 'Task is starting.'",
180
+ "flush_mflogs",
179
181
  ]
180
182
  return cmds
181
183
 
@@ -44,6 +44,8 @@ BASH_MFLOG = (
44
44
  BASH_SAVE_LOGS_ARGS = ["python", "-m", "metaflow.mflog.save_logs"]
45
45
  BASH_SAVE_LOGS = " ".join(BASH_SAVE_LOGS_ARGS)
46
46
 
47
+ BASH_FLUSH_LOGS = "flush_mflogs(){ " f"{BASH_SAVE_LOGS}; " "}"
48
+
47
49
 
48
50
  # this function returns a bash expression that redirects stdout
49
51
  # and stderr of the given bash expression to mflog.tee
@@ -63,7 +65,7 @@ def bash_capture_logs(bash_expr, var_transform=None):
63
65
  # update_delay determines how often logs should be uploaded to S3
64
66
  # as a function of the task execution time
65
67
 
66
- MIN_UPDATE_DELAY = 1.0 # the most frequent update interval
68
+ MIN_UPDATE_DELAY = 0.25 # the most frequent update interval
67
69
  MAX_UPDATE_DELAY = 30.0 # the least frequent update interval
68
70
 
69
71
 
@@ -110,7 +112,6 @@ def export_mflog_env_vars(
110
112
 
111
113
  def tail_logs(prefix, stdout_tail, stderr_tail, echo, has_log_updates):
112
114
  def _available_logs(tail, stream, echo, should_persist=False):
113
- # print the latest batch of lines
114
115
  try:
115
116
  for line in tail:
116
117
  if should_persist:
@@ -128,7 +129,7 @@ def tail_logs(prefix, stdout_tail, stderr_tail, echo, has_log_updates):
128
129
 
129
130
  start_time = time.time()
130
131
  next_log_update = start_time
131
- log_update_delay = 1
132
+ log_update_delay = update_delay(0)
132
133
  while has_log_updates():
133
134
  if time.time() > next_log_update:
134
135
  _available_logs(stdout_tail, "stdout", echo)
@@ -167,6 +167,12 @@ DEPLOYER_IMPL_PROVIDERS_DESC = [
167
167
  ),
168
168
  ]
169
169
 
170
+ TL_PLUGINS_DESC = [
171
+ ("requirements_txt_parser", ".pypi.parsers.requirements_txt_parser"),
172
+ ("pyproject_toml_parser", ".pypi.parsers.pyproject_toml_parser"),
173
+ ("conda_environment_yml_parser", ".pypi.parsers.conda_environment_yml_parser"),
174
+ ]
175
+
170
176
  process_plugins(globals())
171
177
 
172
178
 
@@ -207,6 +213,8 @@ GCP_CLIENT_PROVIDERS = resolve_plugins("gcp_client_provider")
207
213
  if sys.version_info >= (3, 7):
208
214
  DEPLOYER_IMPL_PROVIDERS = resolve_plugins("deployer_impl_provider")
209
215
 
216
+ TL_PLUGINS = resolve_plugins("tl_plugin")
217
+
210
218
  from .cards.card_modules import MF_EXTERNAL_CARDS
211
219
 
212
220
  # Cards; due to the way cards were designed, it is harder to make them fit
@@ -251,3 +259,9 @@ CARDS = [
251
259
  TestRefreshComponentCard,
252
260
  ]
253
261
  merge_lists(CARDS, MF_EXTERNAL_CARDS, "type")
262
+
263
+
264
+ def _import_tl_plugins(globals_dict):
265
+
266
+ for name, p in TL_PLUGINS.items():
267
+ globals_dict[name] = p
@@ -29,6 +29,7 @@ from metaflow.metaflow_config import (
29
29
  KUBERNETES_SHARED_MEMORY,
30
30
  KUBERNETES_TOLERATIONS,
31
31
  KUBERNETES_QOS,
32
+ KUBERNETES_CONDA_ARCH,
32
33
  )
33
34
  from metaflow.plugins.resources_decorator import ResourcesDecorator
34
35
  from metaflow.plugins.timeout_decorator import get_run_time_limit_for_task
@@ -160,7 +161,7 @@ class KubernetesDecorator(StepDecorator):
160
161
 
161
162
  # Conda environment support
162
163
  supports_conda_environment = True
163
- target_platform = "linux-64"
164
+ target_platform = KUBERNETES_CONDA_ARCH or "linux-64"
164
165
 
165
166
  def init(self):
166
167
  super(KubernetesDecorator, self).init()
@@ -319,6 +319,8 @@ class RunningJobSet(object):
319
319
  def kill(self):
320
320
  plural = "jobsets"
321
321
  client = self._client.get()
322
+ if not (self.is_running or self.is_waiting):
323
+ return
322
324
  try:
323
325
  # Killing the control pod will trigger the jobset to mark everything as failed.
324
326
  # Since jobsets have a successPolicy set to `All` which ensures that everything has
@@ -8,6 +8,7 @@ import subprocess
8
8
  import sys
9
9
  import tarfile
10
10
  import time
11
+ import platform
11
12
  from urllib.error import URLError
12
13
  from urllib.request import urlopen
13
14
  from metaflow.metaflow_config import DATASTORE_LOCAL_DIR, CONDA_USE_FAST_INIT
@@ -36,29 +37,6 @@ def timer(func):
36
37
 
37
38
 
38
39
  if __name__ == "__main__":
39
- # TODO: Detect architecture on the fly when dealing with arm architectures.
40
- # ARCH=$(uname -m)
41
- # OS=$(uname)
42
-
43
- # if [[ "$OS" == "Linux" ]]; then
44
- # PLATFORM="linux"
45
- # if [[ "$ARCH" == "aarch64" ]]; then
46
- # ARCH="aarch64";
47
- # elif [[ $ARCH == "ppc64le" ]]; then
48
- # ARCH="ppc64le";
49
- # else
50
- # ARCH="64";
51
- # fi
52
- # fi
53
-
54
- # if [[ "$OS" == "Darwin" ]]; then
55
- # PLATFORM="osx";
56
- # if [[ "$ARCH" == "arm64" ]]; then
57
- # ARCH="arm64";
58
- # else
59
- # ARCH="64"
60
- # fi
61
- # fi
62
40
 
63
41
  def run_cmd(cmd, stdin_str=None):
64
42
  result = subprocess.run(
@@ -350,12 +328,25 @@ if __name__ == "__main__":
350
328
  cmd = f"fast-initializer --prefix {prefix} --packages-dir {pkgs_dir}"
351
329
  run_cmd(cmd, all_package_urls)
352
330
 
353
- if len(sys.argv) != 5:
354
- print("Usage: bootstrap.py <flow_name> <id> <datastore_type> <architecture>")
331
+ if len(sys.argv) != 4:
332
+ print("Usage: bootstrap.py <flow_name> <id> <datastore_type>")
355
333
  sys.exit(1)
356
334
 
357
335
  try:
358
- _, flow_name, id_, datastore_type, architecture = sys.argv
336
+ _, flow_name, id_, datastore_type = sys.argv
337
+
338
+ system = platform.system().lower()
339
+ arch_machine = platform.machine().lower()
340
+
341
+ if system == "darwin" and arch_machine == "arm64":
342
+ architecture = "osx-arm64"
343
+ elif system == "darwin":
344
+ architecture = "osx-64"
345
+ elif system == "linux" and arch_machine == "aarch64":
346
+ architecture = "linux-aarch64"
347
+ else:
348
+ # default fallback
349
+ architecture = "linux-64"
359
350
 
360
351
  prefix = os.path.join(os.getcwd(), architecture, id_)
361
352
  pkgs_dir = os.path.join(os.getcwd(), ".pkgs")
@@ -190,7 +190,6 @@ class CondaEnvironment(MetaflowEnvironment):
190
190
  # 4. Start PyPI solves in parallel after each conda environment is created
191
191
  # 5. Download PyPI packages sequentially
192
192
  # 6. Create and cache PyPI environments in parallel
193
-
194
193
  with ThreadPoolExecutor() as executor:
195
194
  # Start all conda solves in parallel
196
195
  conda_futures = [
@@ -213,14 +212,14 @@ class CondaEnvironment(MetaflowEnvironment):
213
212
 
214
213
  # Queue PyPI solve to start after conda create
215
214
  if result[0] in pypi_envs:
215
+ # solve pypi envs uniquely
216
+ pypi_env = pypi_envs.pop(result[0])
216
217
 
217
218
  def pypi_solve(env):
218
219
  create_future.result() # Wait for conda create
219
220
  return solve(*env, "pypi")
220
221
 
221
- pypi_futures.append(
222
- executor.submit(pypi_solve, pypi_envs[result[0]])
223
- )
222
+ pypi_futures.append(executor.submit(pypi_solve, pypi_env))
224
223
 
225
224
  # Process PyPI results sequentially for downloads
226
225
  for solve_future in pypi_futures:
@@ -242,7 +241,7 @@ class CondaEnvironment(MetaflowEnvironment):
242
241
  if id_:
243
242
  # bootstrap.py is responsible for ensuring the validity of this executable.
244
243
  # -s is important! Can otherwise leak packages to other environments.
245
- return os.path.join("linux-64", id_, "bin/python -s")
244
+ return os.path.join("$MF_ARCH", id_, "bin/python -s")
246
245
  else:
247
246
  # for @conda/@pypi(disabled=True).
248
247
  return super().executable(step_name, default)
@@ -315,7 +314,6 @@ class CondaEnvironment(MetaflowEnvironment):
315
314
  # 5. All resolved packages (Conda or PyPI) are cached
316
315
  # 6. PyPI packages are only installed for local platform
317
316
 
318
- # Resolve `linux-64` Conda environments if @batch or @kubernetes are in play
319
317
  target_platform = conda_platform()
320
318
  for decorator in step.decorators:
321
319
  # NOTE: Keep the list of supported decorator names for backward compatibility purposes.
@@ -329,7 +327,6 @@ class CondaEnvironment(MetaflowEnvironment):
329
327
  "snowpark",
330
328
  "slurm",
331
329
  ]:
332
- # TODO: Support arm architectures
333
330
  target_platform = getattr(decorator, "target_platform", "linux-64")
334
331
  break
335
332
 
@@ -427,15 +424,18 @@ class CondaEnvironment(MetaflowEnvironment):
427
424
  if id_:
428
425
  return [
429
426
  "echo 'Bootstrapping virtual environment...'",
427
+ "flush_mflogs",
430
428
  # We have to prevent the tracing module from loading,
431
429
  # as the bootstrapping process uses the internal S3 client which would fail to import tracing
432
430
  # due to the required dependencies being bundled into the conda environment,
433
431
  # which is yet to be initialized at this point.
434
- 'DISABLE_TRACING=True python -m metaflow.plugins.pypi.bootstrap "%s" %s "%s" linux-64'
432
+ 'DISABLE_TRACING=True python -m metaflow.plugins.pypi.bootstrap "%s" %s "%s"'
435
433
  % (self.flow.name, id_, self.datastore_type),
436
434
  "echo 'Environment bootstrapped.'",
435
+ "flush_mflogs",
437
436
  # To avoid having to install micromamba in the PATH in micromamba.py, we add it to the PATH here.
438
437
  "export PATH=$PATH:$(pwd)/micromamba/bin",
438
+ "export MF_ARCH=$(case $(uname)/$(uname -m) in Darwin/arm64)echo osx-arm64;;Darwin/*)echo osx-64;;Linux/aarch64)echo linux-aarch64;;*)echo linux-64;;esac)",
439
439
  ]
440
440
  else:
441
441
  # for @conda/@pypi(disabled=True).
@@ -0,0 +1,268 @@
1
+ # this file can be overridden by extensions as is (e.g. metaflow-nflx-extensions)
2
+ from metaflow.exception import MetaflowException
3
+
4
+
5
+ class ParserValueError(MetaflowException):
6
+ headline = "Value error"
7
+
8
+
9
+ def requirements_txt_parser(content: str):
10
+ """
11
+ Parse non-comment lines from a requirements.txt file as strictly valid
12
+ PEP 508 requirements.
13
+
14
+ Recognizes direct references (e.g. "my_lib @ git+https://..."), extras
15
+ (e.g. "requests[security]"), and version specifiers (e.g. "==2.0"). If
16
+ the package name is "python", its specifier is stored in the "python"
17
+ key instead of "packages".
18
+
19
+ Parameters
20
+ ----------
21
+ content : str
22
+ Contents of a requirements.txt file.
23
+
24
+ Returns
25
+ -------
26
+ dict
27
+ A dictionary with two keys:
28
+ - "packages": dict(str -> str)
29
+ Mapping from package name (plus optional extras/references) to a
30
+ version specifier string.
31
+ - "python": str or None
32
+ The Python version constraints if present, otherwise None.
33
+
34
+ Raises
35
+ ------
36
+ ParserValueError
37
+ If a requirement line is invalid PEP 508 or if environment markers are
38
+ detected, or if multiple Python constraints are specified.
39
+ """
40
+ import re
41
+ from metaflow._vendor.packaging.requirements import Requirement, InvalidRequirement
42
+
43
+ parsed = {"packages": {}, "python": None}
44
+
45
+ inline_comment_pattern = re.compile(r"\s+#.*$")
46
+ for line in content.splitlines():
47
+ line = line.strip()
48
+
49
+ # support Rye lockfiles by skipping lines not compliant with requirements
50
+ if line == "-e file:.":
51
+ continue
52
+
53
+ if not line or line.startswith("#"):
54
+ continue
55
+
56
+ line = inline_comment_pattern.sub("", line).strip()
57
+ if not line:
58
+ continue
59
+
60
+ try:
61
+ req = Requirement(line)
62
+ except InvalidRequirement:
63
+ raise ParserValueError(f"Not a valid PEP 508 requirement: '{line}'")
64
+
65
+ if req.marker is not None:
66
+ raise ParserValueError(
67
+ "Environment markers (e.g. 'platform_system==\"Linux\"') "
68
+ f"are not supported for line: '{line}'"
69
+ )
70
+
71
+ dep_key = req.name
72
+ if req.extras:
73
+ dep_key += f"[{','.join(req.extras)}]"
74
+ if req.url:
75
+ dep_key += f"@{req.url}"
76
+
77
+ dep_spec = str(req.specifier).lstrip(" =")
78
+
79
+ if req.name.lower() == "python":
80
+ if parsed["python"] is not None and dep_spec:
81
+ raise ParserValueError(
82
+ f"Multiple Python version specs not allowed: '{line}'"
83
+ )
84
+ parsed["python"] = dep_spec or None
85
+ else:
86
+ parsed["packages"][dep_key] = dep_spec
87
+
88
+ return parsed
89
+
90
+
91
+ def pyproject_toml_parser(content: str):
92
+ """
93
+ Parse a pyproject.toml file per PEP 621.
94
+
95
+ Reads the 'requires-python' and 'dependencies' fields from the "[project]" section.
96
+ Each dependency line must be a valid PEP 508 requirement. If the package name is
97
+ "python", its specifier is stored in the "python" key instead of "packages".
98
+
99
+ Parameters
100
+ ----------
101
+ content : str
102
+ Contents of a pyproject.toml file.
103
+
104
+ Returns
105
+ -------
106
+ dict
107
+ A dictionary with two keys:
108
+ - "packages": dict(str -> str)
109
+ Mapping from package name (plus optional extras/references) to a
110
+ version specifier string.
111
+ - "python": str or None
112
+ The Python version constraints if present, otherwise None.
113
+
114
+ Raises
115
+ ------
116
+ RuntimeError
117
+ If no TOML library (tomllib in Python 3.11+ or tomli in earlier versions) is found.
118
+ ParserValueError
119
+ If a dependency is not valid PEP 508, if environment markers are used, or if
120
+ multiple Python constraints are specified.
121
+ """
122
+ try:
123
+ import tomllib as toml # Python 3.11+
124
+ except ImportError:
125
+ try:
126
+ import tomli as toml # Python < 3.11 (requires "tomli" package)
127
+ except ImportError:
128
+ raise RuntimeError(
129
+ "Could not import a TOML library. For Python <3.11, please install 'tomli'."
130
+ )
131
+ from metaflow._vendor.packaging.requirements import Requirement, InvalidRequirement
132
+
133
+ data = toml.loads(content)
134
+
135
+ project = data.get("project", {})
136
+ requirements = project.get("dependencies", [])
137
+ requires_python = project.get("requires-python")
138
+
139
+ parsed = {"packages": {}, "python": None}
140
+
141
+ if requires_python is not None:
142
+ # If present, store verbatim; note that PEP 621 does not necessarily
143
+ # require "python" to be a dependency in the usual sense.
144
+ # Example: "requires-python" = ">=3.7,<4"
145
+ parsed["python"] = requires_python.lstrip("=").strip()
146
+
147
+ for dep_line in requirements:
148
+ dep_line_stripped = dep_line.strip()
149
+ try:
150
+ req = Requirement(dep_line_stripped)
151
+ except InvalidRequirement:
152
+ raise ParserValueError(
153
+ f"Not a valid PEP 508 requirement: '{dep_line_stripped}'"
154
+ )
155
+
156
+ if req.marker is not None:
157
+ raise ParserValueError(
158
+ f"Environment markers not supported for line: '{dep_line_stripped}'"
159
+ )
160
+
161
+ dep_key = req.name
162
+ if req.extras:
163
+ dep_key += f"[{','.join(req.extras)}]"
164
+ if req.url:
165
+ dep_key += f"@{req.url}"
166
+
167
+ dep_spec = str(req.specifier).lstrip("=")
168
+
169
+ if req.name.lower() == "python":
170
+ if parsed["python"] is not None and dep_spec:
171
+ raise ParserValueError(
172
+ f"Multiple Python version specs not allowed: '{dep_line_stripped}'"
173
+ )
174
+ parsed["python"] = dep_spec or None
175
+ else:
176
+ parsed["packages"][dep_key] = dep_spec
177
+
178
+ return parsed
179
+
180
+
181
+ def conda_environment_yml_parser(content: str):
182
+ """
183
+ Parse a minimal environment.yml file under strict assumptions.
184
+
185
+ The file must contain a 'dependencies:' line, after which each dependency line
186
+ appears with a '- ' prefix. Python can appear as 'python=3.9', etc.; other
187
+ packages as 'numpy=1.21.2' or simply 'numpy'. Non-compliant lines raise ParserValueError.
188
+
189
+ Parameters
190
+ ----------
191
+ content : str
192
+ Contents of a environment.yml file.
193
+
194
+ Returns
195
+ -------
196
+ dict
197
+ A dictionary with keys:
198
+ {
199
+ "packages": dict(str -> str),
200
+ "python": str or None
201
+ }
202
+
203
+ Raises
204
+ ------
205
+ ParserValueError
206
+ If the file has malformed lines or unsupported sections.
207
+ """
208
+ import re
209
+
210
+ packages = {}
211
+ python_version = None
212
+
213
+ inside_dependencies = False
214
+
215
+ # Basic pattern for lines like "numpy=1.21.2"
216
+ # Group 1: package name
217
+ # Group 2: optional operator + version (could be "=1.21.2", "==1.21.2", etc.)
218
+ line_regex = re.compile(r"^([A-Za-z0-9_\-\.]+)(\s*[=<>!~].+\s*)?$")
219
+ inline_comment_pattern = re.compile(r"\s+#.*$")
220
+
221
+ for line in content.splitlines():
222
+ line = line.strip()
223
+ if not line or line.startswith("#"):
224
+ continue
225
+
226
+ line = inline_comment_pattern.sub("", line).strip()
227
+ if not line:
228
+ continue
229
+
230
+ if line.lower().startswith("dependencies:"):
231
+ inside_dependencies = True
232
+ continue
233
+
234
+ if inside_dependencies and not line.startswith("-"):
235
+ inside_dependencies = False
236
+ continue
237
+
238
+ if not inside_dependencies:
239
+ continue
240
+
241
+ dep_line = line.lstrip("-").strip()
242
+ if dep_line.endswith(":"):
243
+ raise ParserValueError(
244
+ f"Unsupported subsection '{dep_line}' in environment.yml."
245
+ )
246
+
247
+ match = line_regex.match(dep_line)
248
+ if not match:
249
+ raise ParserValueError(
250
+ f"Line '{dep_line}' is not a valid conda package specifier."
251
+ )
252
+
253
+ pkg_name, pkg_version_part = match.groups()
254
+ version_spec = pkg_version_part.strip() if pkg_version_part else ""
255
+
256
+ if version_spec.startswith("="):
257
+ version_spec = version_spec.lstrip("=").strip()
258
+
259
+ if pkg_name.lower() == "python":
260
+ if python_version is not None and version_spec:
261
+ raise ParserValueError(
262
+ f"Multiple Python version specs detected: '{dep_line}'"
263
+ )
264
+ python_version = version_spec
265
+ else:
266
+ packages[pkg_name] = version_spec
267
+
268
+ return {"packages": packages, "python": python_version}
@@ -72,6 +72,24 @@ def pip_tags(python_version, mamba_platform):
72
72
  )
73
73
  ]
74
74
  platforms.append("linux_x86_64")
75
+ elif mamba_platform == "linux-aarch64":
76
+ platforms = [
77
+ "manylinux%s_aarch64" % s
78
+ for s in (
79
+ "2014",
80
+ "_2_17",
81
+ "_2_18",
82
+ "_2_19",
83
+ "_2_20",
84
+ "_2_21",
85
+ "_2_23",
86
+ "_2_24",
87
+ "_2_25",
88
+ "_2_26",
89
+ "_2_27",
90
+ )
91
+ ]
92
+ platforms.append("linux_aarch64")
75
93
  elif mamba_platform == "osx-64":
76
94
  platforms = tags.mac_platforms(arch="x86_64")
77
95
  elif mamba_platform == "osx-arm64":
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.15.0.1"
1
+ metaflow_version = "2.15.3.1"
@@ -8,6 +8,7 @@ help:
8
8
  @echo " ui - Open Metaflow UI"
9
9
  @echo " dashboard - Open Minikube dashboard"
10
10
  @echo " down - Stop and clean up the environment"
11
+ @echo " all-up - Start the development environment with all services"
11
12
  @echo " help - Show this help message"
12
13
 
13
14
  HELM_VERSION := v3.14.0
@@ -24,10 +25,10 @@ MINIKUBE := $(MINIKUBE_DIR)/minikube
24
25
  TILT_DIR := $(DEVTOOLS_DIR)/tilt
25
26
  TILT := $(TILT_DIR)/tilt
26
27
  TILTFILE := $(MKFILE_DIR)/Tiltfile
27
- MAKE_CMD := $(MAKE) -C "$(MKFILE_DIR)"
28
+ MAKE_CMD := $(MAKE) -f "$(MKFILE_PATH)"
28
29
 
29
30
  MINIKUBE_CPUS ?= 4
30
- MINIKUBE_MEMORY ?= 6000
31
+ MINIKUBE_MEMORY ?= 6144
31
32
  MINIKUBE_DISK_SIZE ?= 20g
32
33
 
33
34
  ifeq ($(shell uname), Darwin)
@@ -129,7 +130,8 @@ setup-minikube:
129
130
  --cpus $(MINIKUBE_CPUS) \
130
131
  --memory $(MINIKUBE_MEMORY) \
131
132
  --disk-size $(MINIKUBE_DISK_SIZE) \
132
- --driver docker; \
133
+ --driver docker \
134
+ || { echo "❌ Failed to start Minikube (check if Docker is running)"; exit 1; }; \
133
135
  echo "🔌 Enabling metrics-server and dashboard (quietly)..."; \
134
136
  $(MINIKUBE) addons enable metrics-server >/dev/null 2>&1; \
135
137
  $(MINIKUBE) addons enable dashboard >/dev/null 2>&1; \
@@ -170,7 +172,6 @@ up: install-brew check-docker install-curl install-gum setup-minikube install-he
170
172
  @echo 'trap "exit" INT TERM' >> $(DEVTOOLS_DIR)/start.sh
171
173
  @echo 'trap "kill 0" EXIT' >> $(DEVTOOLS_DIR)/start.sh
172
174
  @echo 'eval $$($(MINIKUBE) docker-env)' >> $(DEVTOOLS_DIR)/start.sh
173
-
174
175
  @echo 'if [ -n "$$SERVICES_OVERRIDE" ]; then' >> "$(DEVTOOLS_DIR)/start.sh"
175
176
  @echo ' echo "🌐 Using user-provided list of services: $$SERVICES_OVERRIDE"' >> "$(DEVTOOLS_DIR)/start.sh"
176
177
  @echo ' SERVICES="$$SERVICES_OVERRIDE"' >> "$(DEVTOOLS_DIR)/start.sh"
@@ -186,11 +187,14 @@ up: install-brew check-docker install-curl install-gum setup-minikube install-he
186
187
  @chmod +x $(DEVTOOLS_DIR)/start.sh
187
188
  @$(DEVTOOLS_DIR)/start.sh
188
189
 
190
+ all-up:
191
+ @echo "🚀 Starting up all services..."
192
+ SERVICES_OVERRIDE=all $(MAKE_CMD) up
193
+
189
194
  down:
190
195
  @echo "🛑 Stopping all services..."
191
196
  @-pkill -f "$(MINIKUBE) tunnel" 2>/dev/null || true
192
197
  @echo "⏹️ Stopping Tilt..."
193
- -PATH="$(MINIKUBE_DIR):$(TILT_DIR):$$PATH" tilt down -f $(TILTFILE)
194
198
  @echo "🧹 Cleaning up Minikube..."
195
199
  $(MAKE_CMD) teardown-minikube
196
200
  @echo "🗑️ Removing Tilt binary and directory..."
@@ -201,7 +205,7 @@ down:
201
205
 
202
206
  shell: setup-tilt
203
207
  @echo "⏳ Checking if development environment is up..."
204
- @set -e; \
208
+ @set -eu; \
205
209
  for i in $$(seq 1 90); do \
206
210
  if "$(TILT)" get session >/dev/null 2>&1; then \
207
211
  found_session=1; \
@@ -210,7 +214,7 @@ shell: setup-tilt
210
214
  sleep 2; \
211
215
  fi; \
212
216
  done; \
213
- if [ -z "$${found_session}" ]; then \
217
+ if [ -z "$${found_session:-}" ]; then \
214
218
  echo "❌ Development environment is not up."; \
215
219
  echo " Please run 'metaflow-dev up' in another terminal, then re-run 'metaflow-dev shell'."; \
216
220
  exit 1; \
@@ -220,7 +224,10 @@ shell: setup-tilt
220
224
  "$(TILT)" get uiresource generate-configs >/dev/null 2>&1; \
221
225
  status=$$?; \
222
226
  if [ $$status -eq 0 ]; then \
223
- "$(TILT)" wait --for=condition=Ready uiresource/generate-configs; \
227
+ if ! "$(TILT)" wait --for=condition=Ready uiresource/generate-configs --timeout=300s; then \
228
+ echo "❌ Timed out waiting for development environment to be ready."; \
229
+ exit 1; \
230
+ fi; \
224
231
  break; \
225
232
  elif [ $$status -eq 127 ]; then \
226
233
  echo "❌ Development environment is not up."; \
@@ -250,8 +257,7 @@ shell: setup-tilt
250
257
  "$$user_shell" -i; \
251
258
  fi'
252
259
 
253
- # TODO: This can be done away with in a while since we now have metaflow-dev command.
254
- #
260
+
255
261
  # @echo '$(MAKE_CMD) create-dev-shell' >> $(DEVTOOLS_DIR)/start.sh
256
262
  # @echo 'rm -f /tmp/metaflow-devshell-*' >> $(DEVTOOLS_DIR)/start.sh
257
263
  create-dev-shell: setup-tilt
@@ -268,7 +274,10 @@ create-dev-shell: setup-tilt
268
274
  echo "fi" >> $$SHELL_PATH && \
269
275
  echo "" >> $$SHELL_PATH && \
270
276
  echo "echo \"⏳ Waiting for development environment to be ready...\"" >> $$SHELL_PATH && \
271
- echo "$(TILT) wait --for=condition=Ready uiresource/generate-configs" >> $$SHELL_PATH && \
277
+ echo "if ! $(TILT) wait --for=condition=Ready uiresource/generate-configs --timeout=300s; then" >> $$SHELL_PATH && \
278
+ echo " echo \"❌ Timed out waiting for development environment to be ready.\"" >> $$SHELL_PATH && \
279
+ echo " exit 1" >> $$SHELL_PATH && \
280
+ echo "fi" >> $$SHELL_PATH && \
272
281
  echo "" >> $$SHELL_PATH && \
273
282
  echo "echo \"🔧 Starting a new shell for development environment...\"" >> $$SHELL_PATH && \
274
283
  echo "if [ -n \"\$$SHELL\" ]; then" >> $$SHELL_PATH && \
@@ -318,6 +327,6 @@ ui: setup-tilt
318
327
  @echo "🔗 Opening Metaflow UI at http://localhost:3000"
319
328
  @open http://localhost:3000
320
329
 
321
- .PHONY: install-helm setup-minikube setup-tilt teardown-minikube tunnel up down check-docker install-curl install-gum install-brew up down dashboard shell ui help
330
+ .PHONY: install-helm setup-minikube setup-tilt teardown-minikube tunnel up down check-docker install-curl install-gum install-brew up down dashboard shell ui all-up help
322
331
 
323
- .DEFAULT_GOAL := up
332
+ .DEFAULT_GOAL := help
@@ -23,8 +23,13 @@ components = {
23
23
  "argo-events": ["argo-workflows"],
24
24
  }
25
25
 
26
- if os.getenv("SERVICES", "").strip():
27
- requested_components = os.getenv("SERVICES", "").split(",")
26
+ services_env = os.getenv("SERVICES", "").strip().lower()
27
+
28
+ if services_env:
29
+ if services_env == "all":
30
+ requested_components = list(components.keys())
31
+ else:
32
+ requested_components = services_env.split(",")
28
33
  else:
29
34
  requested_components = list(components.keys())
30
35
 
@@ -78,7 +83,7 @@ for component in requested_components:
78
83
  if result not in enabled_components:
79
84
  enabled_components.append(result)
80
85
 
81
- # Print a friendly summary when running `tilt up`.
86
+ # Print a friendly summary when running `tilt up`.
82
87
  if config.tilt_subcommand == 'up':
83
88
  print("\n📦 Components to install:")
84
89
  for component in enabled_components:
@@ -99,6 +104,7 @@ if "minio" in enabled_components:
99
104
  set=[
100
105
  'rootUser=rootuser',
101
106
  'rootPassword=rootpass123',
107
+ # TODO: perturb the bucket name to avoid conflicts
102
108
  'buckets[0].name=metaflow-test',
103
109
  'buckets[0].policy=none',
104
110
  'buckets[0].purge=false',
@@ -498,13 +504,14 @@ if "argo-events" in enabled_components:
498
504
  'argo-events-controller-manager',
499
505
  labels=['argo-events'],
500
506
  )
501
-
507
+
502
508
  metaflow_config["METAFLOW_ARGO_EVENTS_EVENT"] = "metaflow-event"
503
509
  metaflow_config["METAFLOW_ARGO_EVENTS_EVENT_BUS"] = "default"
504
510
  metaflow_config["METAFLOW_ARGO_EVENTS_EVENT_SOURCE"] = "argo-events-webhook"
505
511
  metaflow_config["METAFLOW_ARGO_EVENTS_SERVICE_ACCOUNT"] = "operate-workflow-sa"
506
512
  metaflow_config["METAFLOW_ARGO_EVENTS_WEBHOOK_AUTH"] = "service"
507
- metaflow_config["METAFLOW_ARGO_EVENTS_WEBHOOK_URL"] = "http://argo-events-webhook-eventsource-svc:12000/metaflow-event"
513
+ metaflow_config["METAFLOW_ARGO_EVENTS_INTERNAL_WEBHOOK_URL"] = "http://argo-events-webhook-eventsource-svc:12000/metaflow-event"
514
+ metaflow_config["METAFLOW_ARGO_EVENTS_WEBHOOK_URL"] = "http://localhost:12000/metaflow-event"
508
515
 
509
516
  config_resources.append('argo-events-controller-manager')
510
517
  config_resources.append('argo-events-webhook-eventsource-svc')
@@ -600,8 +607,20 @@ if "ui" in enabled_components:
600
607
  config_resources.append('metaflow-ui')
601
608
  config_resources.append('metaflow-ui-static')
602
609
 
610
+ cmd = '''
611
+ ARCH=$(kubectl get nodes -o jsonpath='{.items[0].status.nodeInfo.architecture}')
612
+ case "$ARCH" in
613
+ arm64) echo linux-aarch64 ;;
614
+ amd64) echo linux-64 ;;
615
+ *) echo linux-64 ;;
616
+ esac
617
+ '''
618
+
619
+ # For @conda/@pypi emulation
620
+ metaflow_config["METAFLOW_KUBERNETES_CONDA_ARCH"] = str(local(cmd)).strip()
621
+
603
622
  local_resource(
604
623
  name="generate-configs",
605
624
  cmd=write_config_files(),
606
625
  resource_deps=config_resources,
607
- )
626
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ob-metaflow
3
- Version: 2.15.0.1
3
+ Version: 2.15.3.1
4
4
  Summary: Metaflow: More AI and ML, Less Engineering
5
5
  Author: Netflix, Outerbounds & the Metaflow Community
6
6
  Author-email: help@outerbounds.co
@@ -12,7 +12,7 @@ Requires-Dist: boto3
12
12
  Requires-Dist: pylint
13
13
  Requires-Dist: kubernetes
14
14
  Provides-Extra: stubs
15
- Requires-Dist: metaflow-stubs==2.15.0.1; extra == "stubs"
15
+ Requires-Dist: metaflow-stubs==2.15.3.1; extra == "stubs"
16
16
  Dynamic: author
17
17
  Dynamic: author-email
18
18
  Dynamic: description
@@ -1,5 +1,5 @@
1
1
  metaflow/R.py,sha256=CqVfIatvmjciuICNnoyyNGrwE7Va9iXfLdFbQa52hwA,3958
2
- metaflow/__init__.py,sha256=XprEDmjLLNTyKHMMDyUglG6c3lEjPGxClROvx7iabxs,6201
2
+ metaflow/__init__.py,sha256=cuNzIhOJ9ivGXgTGMuaVfdhtWpUMXbnsINVahBk4O4E,6306
3
3
  metaflow/cards.py,sha256=IbRmredvmFEU0V6JL7DR8wCESwVmmZJubr6x24bo7U4,442
4
4
  metaflow/cli.py,sha256=RU-yXpT-Lfl3xGyFNtL742e9KEqcRxEnQ-4mwXrXhvo,20928
5
5
  metaflow/cli_args.py,sha256=hDsdWdRmfXYifVGq6b6FDfgoWxtIG2nr_lU6EBV0Pnk,3584
@@ -16,10 +16,10 @@ metaflow/includefile.py,sha256=kWKDSlzVcRVNGG9PV5eB3o2ynrzqhVsfaLtkqjshn7Q,20948
16
16
  metaflow/info_file.py,sha256=wtf2_F0M6dgiUu74AFImM8lfy5RrUw5Yj7Rgs2swKRY,686
17
17
  metaflow/integrations.py,sha256=LlsaoePRg03DjENnmLxZDYto3NwWc9z_PtU6nJxLldg,1480
18
18
  metaflow/lint.py,sha256=x4p6tnRzYqNNniCGXyrUW0WuYfTUgnaOMRivxvnxask,11661
19
- metaflow/metaflow_config.py,sha256=RPWT2JEhYUg1IW2965QfB2bhXi_fWei7yMP0Kcxe6JQ,23598
19
+ metaflow/metaflow_config.py,sha256=i-7nDYXWTF9fTgtlD_AavTFsNfUma-y4Z0DFKp6ybkM,23732
20
20
  metaflow/metaflow_config_funcs.py,sha256=5GlvoafV6SxykwfL8D12WXSfwjBN_NsyuKE_Q3gjGVE,6738
21
21
  metaflow/metaflow_current.py,sha256=pfkXmkyHeMJhxIs6HBJNBEaBDpcl5kz9Wx5mW6F_3qo,7164
22
- metaflow/metaflow_environment.py,sha256=gs_UpYpuOKEEfFM0z0tnwje7zAVsQ5Ck7Dp2M9_1utQ,8065
22
+ metaflow/metaflow_environment.py,sha256=e5BOkA7VdpjseI4HUkm_pR74NVJRNADL20LIQL4W1vU,8139
23
23
  metaflow/metaflow_profile.py,sha256=jKPEW-hmAQO-htSxb9hXaeloLacAh41A35rMZH6G8pA,418
24
24
  metaflow/metaflow_version.py,sha256=duhIzfKZtcxMVMs2uiBqBvUarSHJqyWDwMhaBOQd_g0,7491
25
25
  metaflow/monitor.py,sha256=T0NMaBPvXynlJAO_avKtk8OIIRMyEuMAyF8bIp79aZU,5323
@@ -36,7 +36,7 @@ metaflow/tuple_util.py,sha256=_G5YIEhuugwJ_f6rrZoelMFak3DqAR2tt_5CapS1XTY,830
36
36
  metaflow/unbounded_foreach.py,sha256=p184WMbrMJ3xKYHwewj27ZhRUsSj_kw1jlye5gA9xJk,387
37
37
  metaflow/util.py,sha256=mJBkV5tShIyCsLDeM1zygQGeciQVMrVPm_qI8Oi33G0,14656
38
38
  metaflow/vendor.py,sha256=FchtA9tH22JM-eEtJ2c9FpUdMn8sSb1VHuQS56EcdZk,5139
39
- metaflow/version.py,sha256=NQEH5O4Tqo4ZbEvQqIweiu-dx2DFvVXi22aZRw_Tsr8,30
39
+ metaflow/version.py,sha256=PRWe10Ts-pc4mffOZY75hEPcy8y3fVOIpZNMxJlbk9I,30
40
40
  metaflow/_vendor/__init__.py,sha256=y_CiwUD3l4eAKvTVDZeqgVujMy31cAM1qjAB-HfI-9s,353
41
41
  metaflow/_vendor/typing_extensions.py,sha256=0nUs5p1A_UrZigrAVBoOEM6TxU37zzPDUtiij1ZwpNc,110417
42
42
  metaflow/_vendor/zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
@@ -121,12 +121,13 @@ metaflow/client/core.py,sha256=Cca6HbK-UBO72aELfFJxsl85ylYHHlCAd-uJP-lEepQ,83689
121
121
  metaflow/client/filecache.py,sha256=Wy0yhhCqC1JZgebqi7z52GCwXYnkAqMZHTtxThvwBgM,15229
122
122
  metaflow/cmd/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
123
123
  metaflow/cmd/configure_cmd.py,sha256=o-DKnUf2FBo_HiMVyoyzQaGBSMtpbEPEdFTQZ0hkU-k,33396
124
- metaflow/cmd/main_cli.py,sha256=H0UC-jiZjThHZgQeMyNZh3raSDDyUTvMspYUqKFnNSU,2955
125
- metaflow/cmd/make_wrapper.py,sha256=NFpSdESs4Ks9xeurmYB5VUyYplhNcONDZJcUP2cf8-8,494
124
+ metaflow/cmd/main_cli.py,sha256=LSehmMjkWojAN1XTtqW6S51ZpGNAdW4_VK5S7qH8-Ts,2982
125
+ metaflow/cmd/make_wrapper.py,sha256=N8L4u8QZAryH0sAjRsdEqG-gTj2S4LUsfDizOemrTR0,1604
126
126
  metaflow/cmd/tutorials_cmd.py,sha256=8FdlKkicTOhCIDKcBR5b0Oz6giDvS-EMY3o9skIrRqw,5156
127
127
  metaflow/cmd/util.py,sha256=jS_0rUjOnGGzPT65fzRLdGjrYAOOLA4jU2S0HJLV0oc,406
128
+ metaflow/cmd/code/__init__.py,sha256=VO4dNM9M9LHYy5nTgEiJvCV1RBl8lpDlYGJm6GIcaBA,7413
128
129
  metaflow/cmd/develop/__init__.py,sha256=p1Sy8yU1MEKSrH5ttOWOZvNcI1qYu6J6jghdTHwPgOw,689
129
- metaflow/cmd/develop/stub_generator.py,sha256=vhhEAPkHN-9wZYW4rqnMjVsjwqm4Avn_inVx-wC6DKE,65164
130
+ metaflow/cmd/develop/stub_generator.py,sha256=bo2yWe0kvCZ-3arEFe9eAnPN-h8oNNPcQsDwsL350UM,65217
130
131
  metaflow/cmd/develop/stubs.py,sha256=JX2qNZDvG0upvPueAcLhoR_zyLtRranZMwY05tLdpRQ,11884
131
132
  metaflow/datastore/__init__.py,sha256=VxP6ddJt3rwiCkpiSfAhyVkUCOe1pgZZsytVEJzFmSQ,155
132
133
  metaflow/datastore/content_addressed_store.py,sha256=6T7tNqL29kpmecyMLHF35RhoSBOb-OZcExnsB65AvnI,7641
@@ -140,17 +141,17 @@ metaflow/extension_support/__init__.py,sha256=2z0c4R8zsVmEFOMGT2Jujsl6xveDVa9KLl
140
141
  metaflow/extension_support/_empty_file.py,sha256=HENjnM4uAfeNygxMB_feCCWORFoSat9n_QwzSx2oXPw,109
141
142
  metaflow/extension_support/cmd.py,sha256=hk8iBUUINqvKCDxInKgWpum8ThiRZtHSJP7qBASHzl8,5711
142
143
  metaflow/extension_support/integrations.py,sha256=AWAh-AZ-vo9IxuAVEjGw3s8p_NMm2DKHYx10oC51gPU,5506
143
- metaflow/extension_support/plugins.py,sha256=jgpNJU9q7V1vnattuH7LncTZezWk_VC4lS7Qn761h6A,11263
144
+ metaflow/extension_support/plugins.py,sha256=gl7NbIJLJyLTb5LELsj1D9paQip6t6Lqz6Rhmvqvyrw,11286
144
145
  metaflow/metadata_provider/__init__.py,sha256=FZNSnz26VB_m18DQG8mup6-Gfl7r1U6lRMljJBp3VAM,64
145
146
  metaflow/metadata_provider/heartbeat.py,sha256=42mQo6wOHdFuaCh426uV6Kn8swe7e5I3gqA_G7cI_LA,3127
146
147
  metaflow/metadata_provider/metadata.py,sha256=meO4Fhxu7tbMUGwasYb9_AtL06fwrrXKKjIK7KRWZDs,27093
147
148
  metaflow/metadata_provider/util.py,sha256=lYoQKbqoTM1iZChgyVWN-gX-HyM9tt9bXEMJexY9XmM,1723
148
- metaflow/mflog/__init__.py,sha256=9iMMn2xYB0oaDXXcInxa9AdDqeVBeiJeB3klnqGkyL0,5983
149
+ metaflow/mflog/__init__.py,sha256=TkR9ny_JYvNCWJTdLiHsbLSLc9cUvzAzpDuHLdG8nkA,6020
149
150
  metaflow/mflog/mflog.py,sha256=VebXxqitOtNAs7VJixnNfziO_i_urG7bsJ5JiB5IXgY,4370
150
151
  metaflow/mflog/save_logs.py,sha256=4p1OwozsHJBslOzAf0wUq2XPMNpEOZWM68MgWzh_jJY,2330
151
152
  metaflow/mflog/save_logs_periodically.py,sha256=2Uvk9hi-zlCqXxOQoXmmjH1SCugfw6eG6w70WgfI-ho,1256
152
153
  metaflow/mflog/tee.py,sha256=wTER15qeHuiRpCkOqo-bd-r3Gj-EVlf3IvWRCA4beW4,887
153
- metaflow/plugins/__init__.py,sha256=Lr7i7ssJI_-czorJYjMFcRhGspqArobNoXUl9T1p3MY,8055
154
+ metaflow/plugins/__init__.py,sha256=siqE9Zj_b9zKgMhll3f5L2m1gcAKxp_e4qMRTGJ65xY,8460
154
155
  metaflow/plugins/catch_decorator.py,sha256=UOM2taN_OL2RPpuJhwEOA9ZALm0-hHD0XS2Hn2GUev0,4061
155
156
  metaflow/plugins/debug_logger.py,sha256=mcF5HYzJ0NQmqCMjyVUk3iAP-heroHRIiVWQC6Ha2-I,879
156
157
  metaflow/plugins/debug_monitor.py,sha256=Md5X_sDOSssN9pt2D8YcaIjTK5JaQD55UAYTcF6xYF0,1099
@@ -292,23 +293,24 @@ metaflow/plugins/kubernetes/kube_utils.py,sha256=jdFMGbEmIow-oli26v31W9CmbZXigx0
292
293
  metaflow/plugins/kubernetes/kubernetes.py,sha256=4WHVs421w5JfFSRAdpiZ6X9w7xEK8UPYGNcc9e0JOFc,30420
293
294
  metaflow/plugins/kubernetes/kubernetes_cli.py,sha256=PY-aMaVelHddHq3jqEEu9cvNl7xEjT2lFFADN9dXMkw,13918
294
295
  metaflow/plugins/kubernetes/kubernetes_client.py,sha256=tuvXP-QKpdeSmzVolB2R_TaacOr5DIb0j642eKcjsiM,6491
295
- metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=Spz1vkIurf9Z9mENZEdXk2tLS3v2jqVJIF5wyYyqbJ4,31684
296
+ metaflow/plugins/kubernetes/kubernetes_decorator.py,sha256=vGzVmVsaid2sgM1w9NIWqrK2aCpBe9K4aYfr2r4KzIM,31736
296
297
  metaflow/plugins/kubernetes/kubernetes_job.py,sha256=0PjcVgJkmSqjBNEkibk8y0xxBrgu99D8ar9RsXnzCeY,32833
297
- metaflow/plugins/kubernetes/kubernetes_jobsets.py,sha256=9kU43eE5IvIa7y-POzBdxnJOazWsedKhwQ51Tu1HN_A,42471
298
+ metaflow/plugins/kubernetes/kubernetes_jobsets.py,sha256=XjduAYY_H-jX3lGK6b-jYt6QnDvC2ac3qIAgr-iDsCQ,42543
298
299
  metaflow/plugins/kubernetes/spot_metadata_cli.py,sha256=an0nWCxgflmqIPBCBrlb4m3DereDFFJBLt-KKhqcHc8,1670
299
300
  metaflow/plugins/kubernetes/spot_monitor_sidecar.py,sha256=zrWU-smQwPnL6MBHmzTxWyEA00R6iKKQbhhy50xFwQ8,3832
300
301
  metaflow/plugins/metadata_providers/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
301
302
  metaflow/plugins/metadata_providers/local.py,sha256=Z0CXaGZJbAkj4II3WspJi-uCCtShH64yaXZQ5i9Ym7g,24390
302
303
  metaflow/plugins/metadata_providers/service.py,sha256=K0Ym6lcmegX6wBC5uZbeAFQJSDFc8e6DzJiCB1VIqjc,22554
303
304
  metaflow/plugins/pypi/__init__.py,sha256=0YFZpXvX7HCkyBFglatual7XGifdA1RwC3U4kcizyak,1037
304
- metaflow/plugins/pypi/bootstrap.py,sha256=XAz832qSLFxIXW6SP02N8PQ_7CKiqrCfirkE80Iwarc,14881
305
+ metaflow/plugins/pypi/bootstrap.py,sha256=SNONquX6QnTbu7htmhaQeVeZ2ofaFaUCDScRIrTTERc,14718
305
306
  metaflow/plugins/pypi/conda_decorator.py,sha256=piFcE4uGmWhhbGlxMK0GHd7BGEyqy6r9BFy8Mjoi80Q,15937
306
- metaflow/plugins/pypi/conda_environment.py,sha256=LYjbPNbV29SnspVtS-mHmpW4mt6I-aP3qDbXZSXaH4I,22322
307
+ metaflow/plugins/pypi/conda_environment.py,sha256=kQeX1cHJZk1oY4d-5bH8fhVrzNaFBvOCQgaszNAa5ew,22459
307
308
  metaflow/plugins/pypi/micromamba.py,sha256=LLJ2dGGOEyld07W8iI6dtE01h2Y1PQnBhU-dMBssZ3c,16502
309
+ metaflow/plugins/pypi/parsers.py,sha256=gpOOG2Ph95wI73MWCAi7XjpK0gYhv5k5YIGBs73QPuE,8556
308
310
  metaflow/plugins/pypi/pip.py,sha256=H0cIy8odpZ-JTn4SwF0b74tuC3uRU7X8TdAQJ2kODG8,13971
309
311
  metaflow/plugins/pypi/pypi_decorator.py,sha256=ybNgo-T5Z_0W2KNuED0pdjyI0qygZ4a1MXAzKqdHt_E,7250
310
312
  metaflow/plugins/pypi/pypi_environment.py,sha256=FYMg8kF3lXqcLfRYWD83a9zpVjcoo_TARqMGZ763rRk,230
311
- metaflow/plugins/pypi/utils.py,sha256=855aSATi-qPhFs5OFV5dl03RDNDr5tUrPebloygrWnU,2984
313
+ metaflow/plugins/pypi/utils.py,sha256=glfXN0Do8HleB8YE9LE9Pb1tIBivqdPGmS0sUtO1e7k,3451
312
314
  metaflow/plugins/secrets/__init__.py,sha256=mhJaN2eMS_ZZVewAMR2E-JdP5i0t3v9e6Dcwd-WpruE,310
313
315
  metaflow/plugins/secrets/inline_secrets_provider.py,sha256=EChmoBGA1i7qM3jtYwPpLZDBybXLergiDlN63E0u3x8,294
314
316
  metaflow/plugins/secrets/secrets_decorator.py,sha256=s-sFzPWOjahhpr5fMj-ZEaHkDYAPTO0isYXGvaUwlG8,11273
@@ -361,12 +363,12 @@ metaflow/user_configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3h
361
363
  metaflow/user_configs/config_decorators.py,sha256=qCKVAvd0NKgaCxQ2OThes5-DYHXq6A1HqURubYNeFdw,20481
362
364
  metaflow/user_configs/config_options.py,sha256=m6jccSpzI4qUJ7vyYkYBIf8G3V0Caunxg_k7zg4Zlqg,21067
363
365
  metaflow/user_configs/config_parameters.py,sha256=oeJGVKu1ao_YQX6Lg6P2FEv5k5-_F4sARLlVpTW9ezM,15502
364
- ob_metaflow-2.15.0.1.data/data/share/metaflow/devtools/Makefile,sha256=x9Q2FsScc9XQa0uVV2oNpA3VHwet_6oc8aQN0ztbM2Q,12907
365
- ob_metaflow-2.15.0.1.data/data/share/metaflow/devtools/Tiltfile,sha256=ednswaJXxyH4wRWPNQZMzb5Kg1TiukHUNXgUh_DP8mU,20016
366
- ob_metaflow-2.15.0.1.data/data/share/metaflow/devtools/pick_services.sh,sha256=DCnrMXwtApfx3B4S-YiZESMyAFHbXa3VuNL0MxPLyiE,2196
367
- ob_metaflow-2.15.0.1.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
368
- ob_metaflow-2.15.0.1.dist-info/METADATA,sha256=DF7rbN-OZVxyixtvenAUWQd7qn9nOqfsXf7YB5pptBU,5313
369
- ob_metaflow-2.15.0.1.dist-info/WHEEL,sha256=3HfeesdN7jshHPzN8HJ8UeFRlEd6ApplqndzbPTvPl8,109
370
- ob_metaflow-2.15.0.1.dist-info/entry_points.txt,sha256=RvEq8VFlgGe_FfqGOZi0D7ze1hLD0pAtXeNyGfzc_Yc,103
371
- ob_metaflow-2.15.0.1.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
372
- ob_metaflow-2.15.0.1.dist-info/RECORD,,
366
+ ob_metaflow-2.15.3.1.data/data/share/metaflow/devtools/Makefile,sha256=VOAcKq3Nn7tvJLs-T8KIvuNQxbGNMH5GEte_96QoT4Q,13342
367
+ ob_metaflow-2.15.3.1.data/data/share/metaflow/devtools/Tiltfile,sha256=6xZrL354f8B5PKeoPUJnwka0gHUXl7XSPzNGgf5mcRI,20606
368
+ ob_metaflow-2.15.3.1.data/data/share/metaflow/devtools/pick_services.sh,sha256=DCnrMXwtApfx3B4S-YiZESMyAFHbXa3VuNL0MxPLyiE,2196
369
+ ob_metaflow-2.15.3.1.dist-info/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
370
+ ob_metaflow-2.15.3.1.dist-info/METADATA,sha256=zHwB0DwIwgJly48FFXMbISgkHGJEgZCF2bEalHdpQbs,5313
371
+ ob_metaflow-2.15.3.1.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
372
+ ob_metaflow-2.15.3.1.dist-info/entry_points.txt,sha256=RvEq8VFlgGe_FfqGOZi0D7ze1hLD0pAtXeNyGfzc_Yc,103
373
+ ob_metaflow-2.15.3.1.dist-info/top_level.txt,sha256=v1pDHoWaSaKeuc5fKTRSfsXCKSdW1zvNVmvA-i0if3o,9
374
+ ob_metaflow-2.15.3.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.1)
2
+ Generator: setuptools (75.8.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py2-none-any
5
5
  Tag: py3-none-any