ob-metaflow 2.9.10.1__py2.py3-none-any.whl → 2.10.2.6__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (57) hide show
  1. metaflow/_vendor/packaging/__init__.py +15 -0
  2. metaflow/_vendor/packaging/_elffile.py +108 -0
  3. metaflow/_vendor/packaging/_manylinux.py +238 -0
  4. metaflow/_vendor/packaging/_musllinux.py +80 -0
  5. metaflow/_vendor/packaging/_parser.py +328 -0
  6. metaflow/_vendor/packaging/_structures.py +61 -0
  7. metaflow/_vendor/packaging/_tokenizer.py +188 -0
  8. metaflow/_vendor/packaging/markers.py +245 -0
  9. metaflow/_vendor/packaging/requirements.py +95 -0
  10. metaflow/_vendor/packaging/specifiers.py +1005 -0
  11. metaflow/_vendor/packaging/tags.py +546 -0
  12. metaflow/_vendor/packaging/utils.py +141 -0
  13. metaflow/_vendor/packaging/version.py +563 -0
  14. metaflow/_vendor/v3_7/__init__.py +1 -0
  15. metaflow/_vendor/v3_7/zipp.py +329 -0
  16. metaflow/metaflow_config.py +2 -1
  17. metaflow/metaflow_environment.py +3 -1
  18. metaflow/mflog/mflog.py +7 -1
  19. metaflow/multicore_utils.py +12 -2
  20. metaflow/plugins/__init__.py +8 -3
  21. metaflow/plugins/airflow/airflow.py +13 -0
  22. metaflow/plugins/argo/argo_client.py +16 -0
  23. metaflow/plugins/argo/argo_events.py +7 -1
  24. metaflow/plugins/argo/argo_workflows.py +62 -0
  25. metaflow/plugins/argo/argo_workflows_cli.py +15 -0
  26. metaflow/plugins/aws/batch/batch.py +10 -0
  27. metaflow/plugins/aws/batch/batch_cli.py +1 -2
  28. metaflow/plugins/aws/batch/batch_decorator.py +2 -9
  29. metaflow/plugins/datatools/s3/s3.py +4 -0
  30. metaflow/plugins/env_escape/client.py +24 -3
  31. metaflow/plugins/env_escape/stub.py +2 -8
  32. metaflow/plugins/kubernetes/kubernetes.py +13 -0
  33. metaflow/plugins/kubernetes/kubernetes_cli.py +1 -2
  34. metaflow/plugins/kubernetes/kubernetes_decorator.py +9 -2
  35. metaflow/plugins/pypi/__init__.py +29 -0
  36. metaflow/plugins/pypi/bootstrap.py +131 -0
  37. metaflow/plugins/pypi/conda_decorator.py +335 -0
  38. metaflow/plugins/pypi/conda_environment.py +414 -0
  39. metaflow/plugins/pypi/micromamba.py +294 -0
  40. metaflow/plugins/pypi/pip.py +205 -0
  41. metaflow/plugins/pypi/pypi_decorator.py +130 -0
  42. metaflow/plugins/pypi/pypi_environment.py +7 -0
  43. metaflow/plugins/pypi/utils.py +75 -0
  44. metaflow/task.py +0 -3
  45. metaflow/vendor.py +1 -0
  46. {ob_metaflow-2.9.10.1.dist-info → ob_metaflow-2.10.2.6.dist-info}/METADATA +1 -1
  47. {ob_metaflow-2.9.10.1.dist-info → ob_metaflow-2.10.2.6.dist-info}/RECORD +51 -33
  48. {ob_metaflow-2.9.10.1.dist-info → ob_metaflow-2.10.2.6.dist-info}/WHEEL +1 -1
  49. metaflow/plugins/conda/__init__.py +0 -90
  50. metaflow/plugins/conda/batch_bootstrap.py +0 -104
  51. metaflow/plugins/conda/conda.py +0 -247
  52. metaflow/plugins/conda/conda_environment.py +0 -136
  53. metaflow/plugins/conda/conda_flow_decorator.py +0 -35
  54. metaflow/plugins/conda/conda_step_decorator.py +0 -416
  55. {ob_metaflow-2.9.10.1.dist-info → ob_metaflow-2.10.2.6.dist-info}/LICENSE +0 -0
  56. {ob_metaflow-2.9.10.1.dist-info → ob_metaflow-2.10.2.6.dist-info}/entry_points.txt +0 -0
  57. {ob_metaflow-2.9.10.1.dist-info → ob_metaflow-2.10.2.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,329 @@
1
+ import io
2
+ import posixpath
3
+ import zipfile
4
+ import itertools
5
+ import contextlib
6
+ import sys
7
+ import pathlib
8
+
9
+ if sys.version_info < (3, 7):
10
+ from collections import OrderedDict
11
+ else:
12
+ OrderedDict = dict
13
+
14
+
15
+ __all__ = ['Path']
16
+
17
+
18
+ def _parents(path):
19
+ """
20
+ Given a path with elements separated by
21
+ posixpath.sep, generate all parents of that path.
22
+
23
+ >>> list(_parents('b/d'))
24
+ ['b']
25
+ >>> list(_parents('/b/d/'))
26
+ ['/b']
27
+ >>> list(_parents('b/d/f/'))
28
+ ['b/d', 'b']
29
+ >>> list(_parents('b'))
30
+ []
31
+ >>> list(_parents(''))
32
+ []
33
+ """
34
+ return itertools.islice(_ancestry(path), 1, None)
35
+
36
+
37
+ def _ancestry(path):
38
+ """
39
+ Given a path with elements separated by
40
+ posixpath.sep, generate all elements of that path
41
+
42
+ >>> list(_ancestry('b/d'))
43
+ ['b/d', 'b']
44
+ >>> list(_ancestry('/b/d/'))
45
+ ['/b/d', '/b']
46
+ >>> list(_ancestry('b/d/f/'))
47
+ ['b/d/f', 'b/d', 'b']
48
+ >>> list(_ancestry('b'))
49
+ ['b']
50
+ >>> list(_ancestry(''))
51
+ []
52
+ """
53
+ path = path.rstrip(posixpath.sep)
54
+ while path and path != posixpath.sep:
55
+ yield path
56
+ path, tail = posixpath.split(path)
57
+
58
+
59
+ _dedupe = OrderedDict.fromkeys
60
+ """Deduplicate an iterable in original order"""
61
+
62
+
63
+ def _difference(minuend, subtrahend):
64
+ """
65
+ Return items in minuend not in subtrahend, retaining order
66
+ with O(1) lookup.
67
+ """
68
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
69
+
70
+
71
+ class CompleteDirs(zipfile.ZipFile):
72
+ """
73
+ A ZipFile subclass that ensures that implied directories
74
+ are always included in the namelist.
75
+ """
76
+
77
+ @staticmethod
78
+ def _implied_dirs(names):
79
+ parents = itertools.chain.from_iterable(map(_parents, names))
80
+ as_dirs = (p + posixpath.sep for p in parents)
81
+ return _dedupe(_difference(as_dirs, names))
82
+
83
+ def namelist(self):
84
+ names = super(CompleteDirs, self).namelist()
85
+ return names + list(self._implied_dirs(names))
86
+
87
+ def _name_set(self):
88
+ return set(self.namelist())
89
+
90
+ def resolve_dir(self, name):
91
+ """
92
+ If the name represents a directory, return that name
93
+ as a directory (with the trailing slash).
94
+ """
95
+ names = self._name_set()
96
+ dirname = name + '/'
97
+ dir_match = name not in names and dirname in names
98
+ return dirname if dir_match else name
99
+
100
+ @classmethod
101
+ def make(cls, source):
102
+ """
103
+ Given a source (filename or zipfile), return an
104
+ appropriate CompleteDirs subclass.
105
+ """
106
+ if isinstance(source, CompleteDirs):
107
+ return source
108
+
109
+ if not isinstance(source, zipfile.ZipFile):
110
+ return cls(_pathlib_compat(source))
111
+
112
+ # Only allow for FastLookup when supplied zipfile is read-only
113
+ if 'r' not in source.mode:
114
+ cls = CompleteDirs
115
+
116
+ source.__class__ = cls
117
+ return source
118
+
119
+
120
+ class FastLookup(CompleteDirs):
121
+ """
122
+ ZipFile subclass to ensure implicit
123
+ dirs exist and are resolved rapidly.
124
+ """
125
+
126
+ def namelist(self):
127
+ with contextlib.suppress(AttributeError):
128
+ return self.__names
129
+ self.__names = super(FastLookup, self).namelist()
130
+ return self.__names
131
+
132
+ def _name_set(self):
133
+ with contextlib.suppress(AttributeError):
134
+ return self.__lookup
135
+ self.__lookup = super(FastLookup, self)._name_set()
136
+ return self.__lookup
137
+
138
+
139
+ def _pathlib_compat(path):
140
+ """
141
+ For path-like objects, convert to a filename for compatibility
142
+ on Python 3.6.1 and earlier.
143
+ """
144
+ try:
145
+ return path.__fspath__()
146
+ except AttributeError:
147
+ return str(path)
148
+
149
+
150
+ class Path:
151
+ """
152
+ A pathlib-compatible interface for zip files.
153
+
154
+ Consider a zip file with this structure::
155
+
156
+ .
157
+ ├── a.txt
158
+ └── b
159
+ ├── c.txt
160
+ └── d
161
+ └── e.txt
162
+
163
+ >>> data = io.BytesIO()
164
+ >>> zf = zipfile.ZipFile(data, 'w')
165
+ >>> zf.writestr('a.txt', 'content of a')
166
+ >>> zf.writestr('b/c.txt', 'content of c')
167
+ >>> zf.writestr('b/d/e.txt', 'content of e')
168
+ >>> zf.filename = 'mem/abcde.zip'
169
+
170
+ Path accepts the zipfile object itself or a filename
171
+
172
+ >>> root = Path(zf)
173
+
174
+ From there, several path operations are available.
175
+
176
+ Directory iteration (including the zip file itself):
177
+
178
+ >>> a, b = root.iterdir()
179
+ >>> a
180
+ Path('mem/abcde.zip', 'a.txt')
181
+ >>> b
182
+ Path('mem/abcde.zip', 'b/')
183
+
184
+ name property:
185
+
186
+ >>> b.name
187
+ 'b'
188
+
189
+ join with divide operator:
190
+
191
+ >>> c = b / 'c.txt'
192
+ >>> c
193
+ Path('mem/abcde.zip', 'b/c.txt')
194
+ >>> c.name
195
+ 'c.txt'
196
+
197
+ Read text:
198
+
199
+ >>> c.read_text()
200
+ 'content of c'
201
+
202
+ existence:
203
+
204
+ >>> c.exists()
205
+ True
206
+ >>> (b / 'missing.txt').exists()
207
+ False
208
+
209
+ Coercion to string:
210
+
211
+ >>> import os
212
+ >>> str(c).replace(os.sep, posixpath.sep)
213
+ 'mem/abcde.zip/b/c.txt'
214
+
215
+ At the root, ``name``, ``filename``, and ``parent``
216
+ resolve to the zipfile. Note these attributes are not
217
+ valid and will raise a ``ValueError`` if the zipfile
218
+ has no filename.
219
+
220
+ >>> root.name
221
+ 'abcde.zip'
222
+ >>> str(root.filename).replace(os.sep, posixpath.sep)
223
+ 'mem/abcde.zip'
224
+ >>> str(root.parent)
225
+ 'mem'
226
+ """
227
+
228
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
229
+
230
+ def __init__(self, root, at=""):
231
+ """
232
+ Construct a Path from a ZipFile or filename.
233
+
234
+ Note: When the source is an existing ZipFile object,
235
+ its type (__class__) will be mutated to a
236
+ specialized type. If the caller wishes to retain the
237
+ original type, the caller should either create a
238
+ separate ZipFile object or pass a filename.
239
+ """
240
+ self.root = FastLookup.make(root)
241
+ self.at = at
242
+
243
+ def open(self, mode='r', *args, pwd=None, **kwargs):
244
+ """
245
+ Open this entry as text or binary following the semantics
246
+ of ``pathlib.Path.open()`` by passing arguments through
247
+ to io.TextIOWrapper().
248
+ """
249
+ if self.is_dir():
250
+ raise IsADirectoryError(self)
251
+ zip_mode = mode[0]
252
+ if not self.exists() and zip_mode == 'r':
253
+ raise FileNotFoundError(self)
254
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
255
+ if 'b' in mode:
256
+ if args or kwargs:
257
+ raise ValueError("encoding args invalid for binary operation")
258
+ return stream
259
+ return io.TextIOWrapper(stream, *args, **kwargs)
260
+
261
+ @property
262
+ def name(self):
263
+ return pathlib.Path(self.at).name or self.filename.name
264
+
265
+ @property
266
+ def suffix(self):
267
+ return pathlib.Path(self.at).suffix or self.filename.suffix
268
+
269
+ @property
270
+ def suffixes(self):
271
+ return pathlib.Path(self.at).suffixes or self.filename.suffixes
272
+
273
+ @property
274
+ def stem(self):
275
+ return pathlib.Path(self.at).stem or self.filename.stem
276
+
277
+ @property
278
+ def filename(self):
279
+ return pathlib.Path(self.root.filename).joinpath(self.at)
280
+
281
+ def read_text(self, *args, **kwargs):
282
+ with self.open('r', *args, **kwargs) as strm:
283
+ return strm.read()
284
+
285
+ def read_bytes(self):
286
+ with self.open('rb') as strm:
287
+ return strm.read()
288
+
289
+ def _is_child(self, path):
290
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
291
+
292
+ def _next(self, at):
293
+ return self.__class__(self.root, at)
294
+
295
+ def is_dir(self):
296
+ return not self.at or self.at.endswith("/")
297
+
298
+ def is_file(self):
299
+ return self.exists() and not self.is_dir()
300
+
301
+ def exists(self):
302
+ return self.at in self.root._name_set()
303
+
304
+ def iterdir(self):
305
+ if not self.is_dir():
306
+ raise ValueError("Can't listdir a file")
307
+ subs = map(self._next, self.root.namelist())
308
+ return filter(self._is_child, subs)
309
+
310
+ def __str__(self):
311
+ return posixpath.join(self.root.filename, self.at)
312
+
313
+ def __repr__(self):
314
+ return self.__repr.format(self=self)
315
+
316
+ def joinpath(self, *other):
317
+ next = posixpath.join(self.at, *map(_pathlib_compat, other))
318
+ return self._next(self.root.resolve_dir(next))
319
+
320
+ __truediv__ = joinpath
321
+
322
+ @property
323
+ def parent(self):
324
+ if not self.at:
325
+ return self.filename.parent
326
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
327
+ if parent_at:
328
+ parent_at += '/'
329
+ return self._next(parent_at)
@@ -312,6 +312,7 @@ ARGO_EVENTS_WEBHOOK_URL = from_conf("ARGO_EVENTS_WEBHOOK_URL")
312
312
  ARGO_EVENTS_INTERNAL_WEBHOOK_URL = from_conf(
313
313
  "ARGO_EVENTS_INTERNAL_WEBHOOK_URL", ARGO_EVENTS_WEBHOOK_URL
314
314
  )
315
+ ARGO_EVENTS_WEBHOOK_AUTH = from_conf("ARGO_EVENTS_WEBHOOK_AUTH", "none")
315
316
 
316
317
  ARGO_WORKFLOWS_UI_URL = from_conf("ARGO_WORKFLOWS_UI_URL")
317
318
 
@@ -470,7 +471,7 @@ try:
470
471
  DEBUG_OPTIONS.extend(o)
471
472
  for typ in o:
472
473
  vars()["DEBUG_%s" % typ.upper()] = from_conf(
473
- "DEBUG_%s" % typ.upper()
474
+ "DEBUG_%s" % typ.upper(), False
474
475
  )
475
476
  elif n == "get_pinned_conda_libs":
476
477
 
@@ -193,7 +193,9 @@ class MetaflowEnvironment(object):
193
193
  env[ext_key] = ext_val
194
194
  return env
195
195
 
196
- def executable(self, step_name):
196
+ def executable(self, step_name, default=None):
197
+ if default is not None:
198
+ return default
197
199
  return self._python()
198
200
 
199
201
  def _python(self):
metaflow/mflog/mflog.py CHANGED
@@ -135,7 +135,13 @@ def merge_logs(logs):
135
135
  missing.append(line)
136
136
  for line in missing:
137
137
  res = MFLogline(
138
- False, None, MISSING_TIMESTAMP_STR, None, None, line, MISSING_TIMESTAMP
138
+ False,
139
+ None,
140
+ MISSING_TIMESTAMP_STR.encode("utf-8"),
141
+ None,
142
+ None,
143
+ line,
144
+ MISSING_TIMESTAMP,
139
145
  )
140
146
  yield res.utc_tstamp_str, res
141
147
 
@@ -4,6 +4,7 @@ import traceback
4
4
  from itertools import islice
5
5
  from multiprocessing import cpu_count
6
6
  from tempfile import NamedTemporaryFile
7
+ import time
7
8
  import metaflow.tracing as tracing
8
9
 
9
10
  try:
@@ -70,8 +71,17 @@ def parallel_imap_unordered(func, iterable, max_parallel=None, dir=None):
70
71
 
71
72
  while pids:
72
73
 
73
- pid, output_file = pids.pop()
74
- if os.waitpid(pid, 0)[1]:
74
+ for idx, pid_info in enumerate(pids):
75
+ pid, output_file = pid_info
76
+ pid, exit_code = os.waitpid(pid, os.WNOHANG)
77
+ if pid:
78
+ pids.pop(idx)
79
+ break
80
+ else:
81
+ time.sleep(0.1) # Wait a bit before re-checking
82
+ continue
83
+
84
+ if exit_code:
75
85
  raise MulticoreException("Child failed")
76
86
 
77
87
  with open(output_file, "rb") as f:
@@ -41,10 +41,11 @@ STEP_DECORATORS_DESC = [
41
41
  "unbounded_test_foreach_internal",
42
42
  ".test_unbounded_foreach_decorator.InternalTestUnboundedForeachDecorator",
43
43
  ),
44
- ("conda", ".conda.conda_step_decorator.CondaStepDecorator"),
45
44
  ("card", ".cards.card_decorator.CardDecorator"),
46
45
  ("pytorch_parallel", ".frameworks.pytorch.PytorchParallelDecorator"),
47
46
  ("airflow_internal", ".airflow.airflow_decorator.AirflowInternalDecorator"),
47
+ ("pypi", ".pypi.pypi_decorator.PyPIStepDecorator"),
48
+ ("conda", ".pypi.conda_decorator.CondaStepDecorator"),
48
49
  ]
49
50
 
50
51
  # Add new flow decorators here
@@ -53,15 +54,19 @@ STEP_DECORATORS_DESC = [
53
54
  # careful with the choice of name though - they become top-level
54
55
  # imports from the metaflow package.
55
56
  FLOW_DECORATORS_DESC = [
56
- ("conda_base", ".conda.conda_flow_decorator.CondaFlowDecorator"),
57
57
  ("schedule", ".aws.step_functions.schedule_decorator.ScheduleDecorator"),
58
58
  ("project", ".project_decorator.ProjectDecorator"),
59
59
  ("trigger", ".events_decorator.TriggerDecorator"),
60
60
  ("trigger_on_finish", ".events_decorator.TriggerOnFinishDecorator"),
61
+ ("pypi_base", ".pypi.pypi_decorator.PyPIFlowDecorator"),
62
+ ("conda_base", ".pypi.conda_decorator.CondaFlowDecorator"),
61
63
  ]
62
64
 
63
65
  # Add environments here
64
- ENVIRONMENTS_DESC = [("conda", ".conda.conda_environment.CondaEnvironment")]
66
+ ENVIRONMENTS_DESC = [
67
+ ("conda", ".pypi.conda_environment.CondaEnvironment"),
68
+ ("pypi", ".pypi.pypi_environment.PyPIEnvironment"),
69
+ ]
65
70
 
66
71
  # Add metadata providers here
67
72
  METADATA_PROVIDERS_DESC = [
@@ -32,6 +32,9 @@ from metaflow.metaflow_config import (
32
32
  SERVICE_HEADERS,
33
33
  SERVICE_INTERNAL_URL,
34
34
  )
35
+
36
+ from metaflow.metaflow_config_funcs import config_values
37
+
35
38
  from metaflow.parameters import (
36
39
  DelayedEvaluationParameter,
37
40
  JSONTypeClass,
@@ -335,6 +338,16 @@ class Airflow(object):
335
338
  metaflow_version["production_token"] = self.production_token
336
339
  env["METAFLOW_VERSION"] = json.dumps(metaflow_version)
337
340
 
341
+ # Temporary passing of *some* environment variables. Do not rely on this
342
+ # mechanism as it will be removed in the near future
343
+ env.update(
344
+ {
345
+ k: v
346
+ for k, v in config_values()
347
+ if k.startswith("METAFLOW_CONDA_") or k.startswith("METAFLOW_DEBUG_")
348
+ }
349
+ )
350
+
338
351
  # Extract the k8s decorators for constructing the arguments of the K8s Pod Operator on Airflow.
339
352
  k8s_deco = [deco for deco in node.decorators if deco.name == "kubernetes"][0]
340
353
  user_code_retries, _ = self._get_retries(node)
@@ -52,6 +52,22 @@ class ArgoClient(object):
52
52
  json.loads(e.body)["message"] if e.body is not None else e.reason
53
53
  )
54
54
 
55
+ def get_workflow_templates(self):
56
+ client = self._client.get()
57
+ try:
58
+ return client.CustomObjectsApi().list_namespaced_custom_object(
59
+ group=self._group,
60
+ version=self._version,
61
+ namespace=self._namespace,
62
+ plural="workflowtemplates",
63
+ )["items"]
64
+ except client.rest.ApiException as e:
65
+ if e.status == 404:
66
+ return None
67
+ raise ArgoClientException(
68
+ json.loads(e.body)["message"] if e.body is not None else e.reason
69
+ )
70
+
55
71
  def register_workflow_template(self, name, workflow_template):
56
72
  # Unfortunately, Kubernetes client does not handle optimistic
57
73
  # concurrency control by itself unlike kubectl
@@ -7,7 +7,11 @@ import uuid
7
7
  from datetime import datetime
8
8
 
9
9
  from metaflow.exception import MetaflowException
10
- from metaflow.metaflow_config import ARGO_EVENTS_WEBHOOK_URL
10
+ from metaflow.metaflow_config import (
11
+ ARGO_EVENTS_WEBHOOK_URL,
12
+ ARGO_EVENTS_WEBHOOK_AUTH,
13
+ SERVICE_HEADERS,
14
+ )
11
15
 
12
16
 
13
17
  class ArgoEventException(MetaflowException):
@@ -99,6 +103,8 @@ class ArgoEvent(object):
99
103
  if self._access_token:
100
104
  # TODO: Test with bearer tokens
101
105
  headers = {"Authorization": "Bearer {}".format(self._access_token)}
106
+ if ARGO_EVENTS_WEBHOOK_AUTH == "service":
107
+ headers.update(SERVICE_HEADERS)
102
108
  # TODO: do we need to worry about certs?
103
109
 
104
110
  # Use urllib to avoid introducing any dependency in Metaflow
@@ -18,6 +18,7 @@ from metaflow.metaflow_config import (
18
18
  ARGO_EVENTS_INTERNAL_WEBHOOK_URL,
19
19
  ARGO_WORKFLOWS_ENV_VARS_TO_SKIP,
20
20
  ARGO_WORKFLOWS_KUBERNETES_SECRETS,
21
+ ARGO_EVENTS_WEBHOOK_AUTH,
21
22
  AWS_SECRETS_MANAGER_DEFAULT_REGION,
22
23
  AZURE_STORAGE_BLOB_SERVICE_ENDPOINT,
23
24
  CARD_AZUREROOT,
@@ -42,6 +43,9 @@ from metaflow.metaflow_config import (
42
43
  UI_URL,
43
44
  ARGO_WORKFLOWS_UI_URL,
44
45
  )
46
+
47
+ from metaflow.metaflow_config_funcs import config_values
48
+
45
49
  from metaflow.mflog import BASH_SAVE_LOGS, bash_capture_logs, export_mflog_env_vars
46
50
  from metaflow.parameters import deploy_time_eval
47
51
  from metaflow.plugins.kubernetes.kubernetes import (
@@ -179,6 +183,25 @@ class ArgoWorkflows(object):
179
183
  # allowed by Metaflow - guaranteeing uniqueness.
180
184
  return name.replace("_", "-")
181
185
 
186
+ @staticmethod
187
+ def list_templates(flow_name, all=False):
188
+ client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
189
+
190
+ templates = client.get_workflow_templates()
191
+ if templates is None:
192
+ return []
193
+
194
+ template_names = [
195
+ template["metadata"]["name"]
196
+ for template in templates
197
+ if all
198
+ or flow_name
199
+ == template["metadata"]
200
+ .get("annotations", {})
201
+ .get("metaflow/flow_name", None)
202
+ ]
203
+ return template_names
204
+
182
205
  @staticmethod
183
206
  def delete(name):
184
207
  client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
@@ -583,6 +606,32 @@ class ArgoWorkflows(object):
583
606
  }
584
607
  )
585
608
 
609
+ # Some more annotations to populate the Argo UI nicely
610
+ if self.tags:
611
+ annotations.update({"metaflow/tags": json.dumps(self.tags)})
612
+ if self.notify_on_error:
613
+ annotations.update(
614
+ {
615
+ "metaflow/notify_on_error": json.dumps(
616
+ {
617
+ "slack": bool(self.notify_slack_webhook_url),
618
+ "pager_duty": bool(self.notify_pager_duty_integration_key),
619
+ }
620
+ )
621
+ }
622
+ )
623
+ if self.notify_on_success:
624
+ annotations.update(
625
+ {
626
+ "metaflow/notify_on_success": json.dumps(
627
+ {
628
+ "slack": bool(self.notify_slack_webhook_url),
629
+ "pager_duty": bool(self.notify_pager_duty_integration_key),
630
+ }
631
+ )
632
+ }
633
+ )
634
+
586
635
  return (
587
636
  WorkflowTemplate()
588
637
  .metadata(
@@ -1138,6 +1187,18 @@ class ArgoWorkflows(object):
1138
1187
  0
1139
1188
  ].attributes["vars"]
1140
1189
  )
1190
+
1191
+ # Temporary passing of *some* environment variables. Do not rely on this
1192
+ # mechanism as it will be removed in the near future
1193
+ env.update(
1194
+ {
1195
+ k: v
1196
+ for k, v in config_values()
1197
+ if k.startswith("METAFLOW_CONDA_")
1198
+ or k.startswith("METAFLOW_DEBUG_")
1199
+ }
1200
+ )
1201
+
1141
1202
  env.update(
1142
1203
  {
1143
1204
  **{
@@ -1167,6 +1228,7 @@ class ArgoWorkflows(object):
1167
1228
  "METAFLOW_ARGO_EVENTS_EVENT_SOURCE": ARGO_EVENTS_EVENT_SOURCE,
1168
1229
  "METAFLOW_ARGO_EVENTS_SERVICE_ACCOUNT": ARGO_EVENTS_SERVICE_ACCOUNT,
1169
1230
  "METAFLOW_ARGO_EVENTS_WEBHOOK_URL": ARGO_EVENTS_INTERNAL_WEBHOOK_URL,
1231
+ "METAFLOW_ARGO_EVENTS_WEBHOOK_AUTH": ARGO_EVENTS_WEBHOOK_AUTH,
1170
1232
  },
1171
1233
  **{
1172
1234
  # Some optional values for bookkeeping
@@ -833,6 +833,21 @@ def terminate(obj, run_id, authorize=None):
833
833
  obj.echo("\nRun terminated.")
834
834
 
835
835
 
836
+ @argo_workflows.command(help="List Argo Workflow templates for the flow.")
837
+ @click.option(
838
+ "--all",
839
+ default=False,
840
+ is_flag=True,
841
+ type=bool,
842
+ help="list all Argo Workflow Templates (not just limited to this flow)",
843
+ )
844
+ @click.pass_obj
845
+ def list_workflow_templates(obj, all=None):
846
+ templates = ArgoWorkflows.list_templates(obj.flow.name, all)
847
+ for template_name in templates:
848
+ obj.echo_always(template_name)
849
+
850
+
836
851
  def validate_run_id(
837
852
  workflow_name, token_prefix, authorize, run_id, instructions_fn=None
838
853
  ):
@@ -23,6 +23,9 @@ from metaflow.metaflow_config import (
23
23
  AWS_SECRETS_MANAGER_DEFAULT_REGION,
24
24
  S3_SERVER_SIDE_ENCRYPTION,
25
25
  )
26
+
27
+ from metaflow.metaflow_config_funcs import config_values
28
+
26
29
  from metaflow.mflog import (
27
30
  export_mflog_env_vars,
28
31
  bash_capture_logs,
@@ -249,6 +252,13 @@ class Batch(object):
249
252
  .environment_variable("METAFLOW_CARD_S3ROOT", CARD_S3ROOT)
250
253
  .environment_variable("METAFLOW_RUNTIME_ENVIRONMENT", "aws-batch")
251
254
  )
255
+
256
+ # Temporary passing of *some* environment variables. Do not rely on this
257
+ # mechanism as it will be removed in the near future
258
+ for k, v in config_values():
259
+ if k.startswith("METAFLOW_CONDA_") or k.startswith("METAFLOW_DEBUG_"):
260
+ job.environment_variable(k, v)
261
+
252
262
  if DEFAULT_SECRETS_BACKEND_TYPE is not None:
253
263
  job.environment_variable(
254
264
  "METAFLOW_DEFAULT_SECRETS_BACKEND_TYPE", DEFAULT_SECRETS_BACKEND_TYPE
@@ -190,8 +190,7 @@ def step(
190
190
  if R.use_r():
191
191
  entrypoint = R.entrypoint()
192
192
  else:
193
- if executable is None:
194
- executable = ctx.obj.environment.executable(step_name)
193
+ executable = ctx.obj.environment.executable(step_name, executable)
195
194
  entrypoint = "%s -u %s" % (executable, os.path.basename(sys.argv[0]))
196
195
 
197
196
  top_args = " ".join(util.dict_to_cli_options(ctx.parent.parent.params))