metaflow 2.15.21__py2.py3-none-any.whl → 2.16.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. metaflow/__init__.py +7 -1
  2. metaflow/cli.py +19 -1
  3. metaflow/cli_components/init_cmd.py +1 -0
  4. metaflow/cli_components/run_cmds.py +8 -2
  5. metaflow/client/core.py +22 -30
  6. metaflow/datastore/task_datastore.py +0 -1
  7. metaflow/debug.py +5 -0
  8. metaflow/decorators.py +236 -70
  9. metaflow/extension_support/__init__.py +15 -8
  10. metaflow/extension_support/_empty_file.py +2 -2
  11. metaflow/flowspec.py +92 -60
  12. metaflow/graph.py +24 -2
  13. metaflow/meta_files.py +13 -0
  14. metaflow/metadata_provider/metadata.py +7 -1
  15. metaflow/metaflow_config.py +5 -0
  16. metaflow/metaflow_environment.py +82 -25
  17. metaflow/metaflow_version.py +1 -1
  18. metaflow/package/__init__.py +664 -0
  19. metaflow/packaging_sys/__init__.py +870 -0
  20. metaflow/packaging_sys/backend.py +113 -0
  21. metaflow/packaging_sys/distribution_support.py +153 -0
  22. metaflow/packaging_sys/tar_backend.py +86 -0
  23. metaflow/packaging_sys/utils.py +91 -0
  24. metaflow/packaging_sys/v1.py +480 -0
  25. metaflow/plugins/airflow/airflow.py +5 -1
  26. metaflow/plugins/airflow/airflow_cli.py +16 -5
  27. metaflow/plugins/argo/argo_workflows.py +15 -4
  28. metaflow/plugins/argo/argo_workflows_cli.py +17 -4
  29. metaflow/plugins/aws/batch/batch.py +22 -3
  30. metaflow/plugins/aws/batch/batch_cli.py +3 -0
  31. metaflow/plugins/aws/batch/batch_decorator.py +13 -5
  32. metaflow/plugins/aws/step_functions/step_functions.py +4 -1
  33. metaflow/plugins/aws/step_functions/step_functions_cli.py +16 -4
  34. metaflow/plugins/cards/card_decorator.py +0 -5
  35. metaflow/plugins/kubernetes/kubernetes.py +8 -1
  36. metaflow/plugins/kubernetes/kubernetes_cli.py +3 -0
  37. metaflow/plugins/kubernetes/kubernetes_decorator.py +13 -5
  38. metaflow/plugins/package_cli.py +25 -23
  39. metaflow/plugins/parallel_decorator.py +4 -2
  40. metaflow/plugins/pypi/bootstrap.py +8 -2
  41. metaflow/plugins/pypi/conda_decorator.py +39 -82
  42. metaflow/plugins/pypi/conda_environment.py +6 -2
  43. metaflow/plugins/pypi/pypi_decorator.py +4 -4
  44. metaflow/plugins/test_unbounded_foreach_decorator.py +2 -2
  45. metaflow/plugins/timeout_decorator.py +0 -1
  46. metaflow/plugins/uv/bootstrap.py +12 -1
  47. metaflow/plugins/uv/uv_environment.py +4 -2
  48. metaflow/pylint_wrapper.py +5 -1
  49. metaflow/runner/click_api.py +5 -4
  50. metaflow/runner/subprocess_manager.py +14 -2
  51. metaflow/runtime.py +37 -11
  52. metaflow/task.py +92 -7
  53. metaflow/user_configs/config_options.py +13 -8
  54. metaflow/user_configs/config_parameters.py +0 -4
  55. metaflow/user_decorators/__init__.py +0 -0
  56. metaflow/user_decorators/common.py +144 -0
  57. metaflow/user_decorators/mutable_flow.py +499 -0
  58. metaflow/user_decorators/mutable_step.py +424 -0
  59. metaflow/user_decorators/user_flow_decorator.py +264 -0
  60. metaflow/user_decorators/user_step_decorator.py +712 -0
  61. metaflow/util.py +4 -1
  62. metaflow/version.py +1 -1
  63. {metaflow-2.15.21.dist-info → metaflow-2.16.1.dist-info}/METADATA +2 -2
  64. {metaflow-2.15.21.dist-info → metaflow-2.16.1.dist-info}/RECORD +71 -60
  65. metaflow/info_file.py +0 -25
  66. metaflow/package.py +0 -203
  67. metaflow/user_configs/config_decorators.py +0 -568
  68. {metaflow-2.15.21.data → metaflow-2.16.1.data}/data/share/metaflow/devtools/Makefile +0 -0
  69. {metaflow-2.15.21.data → metaflow-2.16.1.data}/data/share/metaflow/devtools/Tiltfile +0 -0
  70. {metaflow-2.15.21.data → metaflow-2.16.1.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  71. {metaflow-2.15.21.dist-info → metaflow-2.16.1.dist-info}/WHEEL +0 -0
  72. {metaflow-2.15.21.dist-info → metaflow-2.16.1.dist-info}/entry_points.txt +0 -0
  73. {metaflow-2.15.21.dist-info → metaflow-2.16.1.dist-info}/licenses/LICENSE +0 -0
  74. {metaflow-2.15.21.dist-info → metaflow-2.16.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,480 @@
1
+ import json
2
+ import os
3
+ import sys
4
+ from pathlib import Path
5
+ from types import ModuleType
6
+ from typing import Any, Callable, Dict, Generator, List, Optional, Set, Tuple, Union
7
+
8
+ from ..debug import debug
9
+ from ..extension_support import (
10
+ EXT_EXCLUDE_SUFFIXES,
11
+ extension_info,
12
+ package_mfext_all,
13
+ package_mfext_all_descriptions,
14
+ )
15
+ from ..exception import MetaflowException
16
+ from ..metaflow_version import get_version
17
+ from ..user_decorators.user_flow_decorator import FlowMutatorMeta
18
+ from ..user_decorators.user_step_decorator import UserStepDecoratorMeta
19
+ from ..util import get_metaflow_root
20
+ from . import ContentType, MFCONTENT_MARKER, MetaflowCodeContentV1Base
21
+ from .distribution_support import _ModuleInfo, modules_to_distributions
22
+ from .utils import suffix_filter, walk
23
+
24
+
25
+ class MetaflowCodeContentV1(MetaflowCodeContentV1Base):
26
+ METAFLOW_SUFFIXES_LIST = [".py", ".html", ".css", ".js"]
27
+
28
+ def __init__(
29
+ self,
30
+ code_dir: str = MetaflowCodeContentV1Base._code_dir,
31
+ other_dir: str = MetaflowCodeContentV1Base._other_dir,
32
+ criteria: Callable[[ModuleType], bool] = lambda x: True,
33
+ ):
34
+ super().__init__(code_dir, other_dir)
35
+
36
+ self._metaflow_root = get_metaflow_root()
37
+ self._metaflow_version = get_version()
38
+
39
+ self._criteria = criteria
40
+
41
+ # We try to find the modules we need to package. We will first look at all modules
42
+ # and apply the criteria to them. Then we will use the most parent module that
43
+ # fits the criteria as the module to package
44
+
45
+ # Make a copy since sys.modules could be modified while we load other
46
+ # modules. See https://github.com/Netflix/metaflow/issues/2489
47
+ all_modules = dict(sys.modules)
48
+ modules = filter(lambda x: criteria(x[1]), all_modules.items())
49
+ # Ensure that we see the parent modules first
50
+ modules = sorted(modules, key=lambda x: x[0])
51
+ if modules:
52
+ last_prefix = modules[0][0]
53
+ new_modules = [modules[0]]
54
+ for name, mod in modules[1:]:
55
+ if name.startswith(last_prefix + "."):
56
+ # This is a submodule of the last module, we can skip it
57
+ continue
58
+ # Otherwise, we have a new top-level module
59
+ last_prefix = name
60
+ new_modules.append((name, mod))
61
+ else:
62
+ new_modules = []
63
+
64
+ self._modules = {
65
+ name: _ModuleInfo(
66
+ name,
67
+ set(
68
+ Path(p).resolve().as_posix()
69
+ for p in getattr(mod, "__path__", [mod.__file__])
70
+ ),
71
+ mod,
72
+ True, # This is a Metaflow module (see filter below)
73
+ )
74
+ for (name, mod) in new_modules
75
+ }
76
+
77
+ # Filter the modules
78
+ self._modules = {
79
+ name: info for name, info in self._modules.items() if criteria(info.module)
80
+ }
81
+
82
+ # Contain metadata information regarding the distributions packaged.
83
+ # This allows Metaflow to "fake" distribution information when packaged
84
+ self._distmetainfo = {} # type: Dict[str, Dict[str, str]]
85
+
86
+ # Maps an absolute path on the filesystem to the path of the file in the
87
+ # archive.
88
+ self._files = {} # type: Dict[str, str]
89
+ self._files_from_modules = {} # type: Dict[str, str]
90
+
91
+ self._other_files = {} # type: Dict[str, str]
92
+ self._other_content = {} # type: Dict[str, bytes]
93
+
94
+ debug.package_exec(f"Used system modules found: {str(self._modules)}")
95
+
96
+ # Populate with files from the third party modules
97
+ for k, v in self._modules.items():
98
+ self._files_from_modules.update(self._module_files(k, v.root_paths))
99
+
100
+ # Figure out the files to package for Metaflow and extensions
101
+ self._cached_metaflow_files = list(self._metaflow_distribution_files())
102
+ self._cached_metaflow_files.extend(list(self._metaflow_extension_files()))
103
+
104
+ def create_mfcontent_info(self) -> Dict[str, Any]:
105
+ return {"version": 1, "module_files": list(self._files_from_modules.values())}
106
+
107
+ def get_excluded_tl_entries(self) -> List[str]:
108
+ """
109
+ When packaging Metaflow from within an executing Metaflow flow, we need to
110
+ exclude the files that are inserted by this content from being packaged (possibly).
111
+
112
+ Use this function to return these files or top-level directories.
113
+
114
+ Returns
115
+ -------
116
+ List[str]
117
+ Files or directories to exclude
118
+ """
119
+ return [self._code_dir, self._other_dir]
120
+
121
+ def content_names(
122
+ self, content_types: Optional[int] = None
123
+ ) -> Generator[Tuple[str, str], None, None]:
124
+ """
125
+ Detailed list of the content of this MetaflowCodeContent. This will list all files
126
+ (or non files -- for the INFO or CONFIG data for example) present in the archive.
127
+
128
+ Parameters
129
+ ----------
130
+ content_types : Optional[int]
131
+ The type of content to get the names of. If None, all content is returned.
132
+
133
+ Yields
134
+ ------
135
+ Generator[Tuple[str, str], None, None]
136
+ Path on the filesystem and the name in the archive
137
+ """
138
+ yield from self._content(content_types, generate_value=False)
139
+
140
+ def contents(
141
+ self, content_types: Optional[int] = None
142
+ ) -> Generator[Tuple[Union[bytes, str], str], None, None]:
143
+ """
144
+ Very similar to content_names but returns the content of the non-files
145
+ as well as bytes. For files, identical output as content_names
146
+
147
+ Parameters
148
+ ----------
149
+ content_types : Optional[int]
150
+ The type of content to get the content of. If None, all content is returned.
151
+
152
+ Yields
153
+ ------
154
+ Generator[Tuple[Union[str, bytes], str], None, None]
155
+ Content of the MF content
156
+ """
157
+ yield from self._content(content_types, generate_value=True)
158
+
159
+ def show(self) -> str:
160
+ """
161
+ Returns a more human-readable string representation of the content of this
162
+ MetaflowCodeContent. This will not, for example, list all files but summarize what
163
+ is included at a more high level.
164
+
165
+ Returns
166
+ -------
167
+ str
168
+ A human-readable string representation of the content of this MetaflowCodeContent
169
+ """
170
+ all_user_step_decorators = {}
171
+ for k, v in UserStepDecoratorMeta.all_decorators().items():
172
+ all_user_step_decorators.setdefault(
173
+ getattr(v, "_original_module", v.__module__), []
174
+ ).append(k)
175
+
176
+ all_user_flow_decorators = {}
177
+ for k, v in FlowMutatorMeta.all_decorators().items():
178
+ all_user_flow_decorators.setdefault(
179
+ getattr(v, "_original_module", v.__module__), []
180
+ ).append(k)
181
+
182
+ result = []
183
+ if self._metaflow_version:
184
+ result.append(f"\nMetaflow version: {self._metaflow_version}")
185
+ ext_info = extension_info()
186
+ if ext_info["installed"]:
187
+ result.append("\nMetaflow extensions packaged:")
188
+ for ext_name, ext_info in ext_info["installed"].items():
189
+ result.append(
190
+ f" - {ext_name} ({ext_info['extension_name']}) @ {ext_info['dist_version']}"
191
+ )
192
+
193
+ if self._modules:
194
+ mf_modules = []
195
+ other_modules = []
196
+ for name, info in self._modules.items():
197
+ if info.metaflow_module:
198
+ mf_modules.append(f" - {name} @ {', '.join(info.root_paths)}")
199
+ module_user_step_decorators = [
200
+ ", ".join(v)
201
+ for k, v in all_user_step_decorators.items()
202
+ if k == info.name or k.startswith(info.name + ".")
203
+ ]
204
+ module_user_flow_decorators = [
205
+ ", ".join(v)
206
+ for k, v in all_user_flow_decorators.items()
207
+ if k == info.name or k.startswith(info.name + ".")
208
+ ]
209
+ if module_user_step_decorators:
210
+ mf_modules.append(
211
+ f" - Provides step decorators: {', '.join(module_user_step_decorators)}"
212
+ )
213
+ if module_user_flow_decorators:
214
+ mf_modules.append(
215
+ f" - Provides flow mutators: {', '.join(module_user_flow_decorators)}"
216
+ )
217
+ else:
218
+ other_modules.append(f" - {name} @ {', '.join(info.root_paths)}")
219
+ if mf_modules:
220
+ result.append("\nMetaflow modules:")
221
+ result.extend(mf_modules)
222
+ if other_modules:
223
+ result.append("\nNon-Metaflow packaged modules:")
224
+ result.extend(other_modules)
225
+
226
+ return "\n".join(result)
227
+
228
+ def add_info(self, info: Dict[str, Any]) -> None:
229
+ """
230
+ Add the content of the INFO file to the Metaflow content
231
+
232
+ Parameters
233
+ ----------
234
+ info: Dict[str, Any]
235
+ The content of the INFO file
236
+ """
237
+ info_file_path = os.path.join(self._other_dir, self._info_file)
238
+ if info_file_path in self._other_content:
239
+ raise MetaflowException("INFO file already present in the MF environment")
240
+ self._other_content[info_file_path] = json.dumps(info).encode("utf-8")
241
+
242
+ def add_config(self, config: Dict[str, Any]) -> None:
243
+ """
244
+ Add the content of the CONFIG file to the Metaflow content
245
+
246
+ Parameters
247
+ ----------
248
+ config: Dict[str, Any]
249
+ The content of the CONFIG file
250
+ """
251
+ config_file_path = os.path.join(self._other_dir, self._config_file)
252
+ if config_file_path in self._other_content:
253
+ raise MetaflowException("CONFIG file already present in the MF environment")
254
+ self._other_content[config_file_path] = json.dumps(config).encode("utf-8")
255
+
256
+ def add_module(self, module: ModuleType) -> None:
257
+ """
258
+ Add a python module to the Metaflow content
259
+
260
+ Parameters
261
+ ----------
262
+ module_path: ModuleType
263
+ The module to add
264
+ """
265
+ name = module.__name__
266
+ debug.package_exec(f"Adding module {name} to the MF content")
267
+ # If the module is a single file, we handle this here by looking at __file__
268
+ # which will point to the single file. If it is an actual module, __path__
269
+ # will contain the path(s) to the module
270
+ self._modules[name] = _ModuleInfo(
271
+ name,
272
+ set(
273
+ Path(p).resolve().as_posix()
274
+ for p in getattr(module, "__path__", [module.__file__])
275
+ ),
276
+ module,
277
+ False, # This is not a Metaflow module (added by the user manually)
278
+ )
279
+ self._files_from_modules.update(
280
+ self._module_files(name, self._modules[name].root_paths)
281
+ )
282
+
283
+ def add_code_file(self, file_path: str, file_name: str) -> None:
284
+ """
285
+ Add a code file to the Metaflow content
286
+
287
+ Parameters
288
+ ----------
289
+ file_path: str
290
+ The path to the code file to add (on the filesystem)
291
+ file_name: str
292
+ The path in the archive to add the code file to
293
+ """
294
+ file_path = os.path.realpath(file_path)
295
+ debug.package_exec(
296
+ f"Adding code file {file_path} as {file_name} to the MF content"
297
+ )
298
+
299
+ if file_path in self._files and self._files[file_path] != os.path.join(
300
+ self._code_dir, file_name.lstrip("/")
301
+ ):
302
+ raise MetaflowException(
303
+ "File '%s' is already present in the MF content with a different name: '%s'"
304
+ % (file_path, self._files[file_path])
305
+ )
306
+ self._files[file_path] = os.path.join(self._code_dir, file_name.lstrip("/"))
307
+
308
+ def add_other_file(self, file_path: str, file_name: str) -> None:
309
+ """
310
+ Add a non-python file to the Metaflow content
311
+
312
+ Parameters
313
+ ----------
314
+ file_path: str
315
+ The path to the file to add (on the filesystem)
316
+ file_name: str
317
+ The path in the archive to add the file to
318
+ """
319
+ file_path = os.path.realpath(file_path)
320
+ debug.package_exec(
321
+ f"Adding other file {file_path} as {file_name} to the MF content"
322
+ )
323
+ if file_path in self._other_files and self._other_files[
324
+ file_path
325
+ ] != os.path.join(self._other_dir, file_name.lstrip("/")):
326
+ raise MetaflowException(
327
+ "File %s is already present in the MF content with a different name: %s"
328
+ % (file_path, self._other_files[file_path])
329
+ )
330
+ self._other_files[file_path] = os.path.join(
331
+ self._other_dir, file_name.lstrip("/")
332
+ )
333
+
334
+ def _content(
335
+ self, content_types: Optional[int] = None, generate_value: bool = False
336
+ ) -> Generator[Tuple[Union[str, bytes], str], None, None]:
337
+ from ..package import MetaflowPackage # Prevent circular dependency
338
+
339
+ if content_types is None:
340
+ content_types = ContentType.ALL_CONTENT.value
341
+
342
+ if content_types & ContentType.CODE_CONTENT.value:
343
+ yield from self._cached_metaflow_files
344
+ yield from self._files.items()
345
+ if content_types & ContentType.MODULE_CONTENT.value:
346
+ yield from self._files_from_modules.items()
347
+ if content_types & ContentType.OTHER_CONTENT.value:
348
+ yield from self._other_files.items()
349
+ if generate_value:
350
+ for k, v in self._other_content.items():
351
+ yield v, k
352
+ # Include the distribution file too
353
+ yield json.dumps(self._distmetainfo).encode("utf-8"), os.path.join(
354
+ self._other_dir, self._dist_info_file
355
+ )
356
+ yield json.dumps(self.create_mfcontent_info()).encode(
357
+ "utf-8"
358
+ ), os.path.join(self._code_dir, MFCONTENT_MARKER)
359
+ else:
360
+ for k in self._other_content.keys():
361
+ yield "<generated %s content>" % (os.path.basename(k)), k
362
+ yield "<generated %s content>" % (
363
+ os.path.basename(self._dist_info_file)
364
+ ), os.path.join(self._other_dir, self._dist_info_file)
365
+ yield "<generated %s content>" % MFCONTENT_MARKER, os.path.join(
366
+ self._code_dir, MFCONTENT_MARKER
367
+ )
368
+
369
+ def _metaflow_distribution_files(self) -> Generator[Tuple[str, str], None, None]:
370
+ debug.package_exec("Including Metaflow from '%s'" % self._metaflow_root)
371
+ for path_tuple in walk(
372
+ os.path.join(self._metaflow_root, "metaflow"),
373
+ exclude_hidden=False,
374
+ file_filter=suffix_filter(self.METAFLOW_SUFFIXES_LIST),
375
+ ):
376
+ yield path_tuple[0], os.path.join(self._code_dir, path_tuple[1])
377
+
378
+ def _metaflow_extension_files(self) -> Generator[Tuple[str, str], None, None]:
379
+ # Metaflow extensions; for now, we package *all* extensions but this may change
380
+ # at a later date; it is possible to call `package_mfext_package` instead of
381
+ # `package_mfext_all` but in that case, make sure to also add a
382
+ # metaflow_extensions/__init__.py file to properly "close" the metaflow_extensions
383
+ # package and prevent other extensions from being loaded that may be
384
+ # present in the rest of the system
385
+ for path_tuple in package_mfext_all():
386
+ yield path_tuple[0], os.path.join(self._code_dir, path_tuple[1])
387
+ if debug.package:
388
+ ext_info = package_mfext_all_descriptions()
389
+ ext_info = {
390
+ k: {k1: v1 for k1, v1 in v.items() if k1 in ("root_paths",)}
391
+ for k, v in ext_info.items()
392
+ }
393
+ debug.package_exec(f"Metaflow extensions packaged: {ext_info}")
394
+
395
+ def _module_files(
396
+ self, name: str, paths: Set[str]
397
+ ) -> Generator[Tuple[str, str], None, None]:
398
+ debug.package_exec(
399
+ " Looking for distributions for module %s in %s" % (name, paths)
400
+ )
401
+ paths = set(paths) # Do not modify external paths
402
+ has_init = False
403
+ distributions = modules_to_distributions().get(name)
404
+ prefix_parts = tuple(name.split("."))
405
+
406
+ seen_distributions = set()
407
+ if distributions:
408
+ for dist in distributions:
409
+ dist_name = dist.metadata["Name"] # dist.name not always present
410
+ if dist_name in seen_distributions:
411
+ continue
412
+ # For some reason, sometimes the same distribution appears twice. We
413
+ # don't need to process twice.
414
+ seen_distributions.add(dist_name)
415
+ debug.package_exec(
416
+ " Including distribution '%s' for module '%s'"
417
+ % (dist_name, name)
418
+ )
419
+ dist_root = str(dist.locate_file(name))
420
+ if dist_root not in paths:
421
+ # This is an error because it means that this distribution is
422
+ # not contributing to the module.
423
+ raise RuntimeError(
424
+ "Distribution '%s' is not contributing to module '%s' as "
425
+ "expected (got '%s' when expected one of %s)"
426
+ % (dist.metadata["Name"], name, dist_root, paths)
427
+ )
428
+ paths.discard(dist_root)
429
+ if dist_name not in self._distmetainfo:
430
+ # Possible that a distribution contributes to multiple modules
431
+ self._distmetainfo[dist_name] = {
432
+ # We can add more if needed but these are likely the most
433
+ # useful (captures, name, version, etc and files which can
434
+ # be used to find non-python files in the distribution).
435
+ "METADATA": dist.read_text("METADATA") or "",
436
+ "RECORD": dist.read_text("RECORD") or "",
437
+ }
438
+ for file in dist.files or []:
439
+ # Skip files that do not belong to this module (distribution may
440
+ # provide multiple modules)
441
+ if file.parts[: len(prefix_parts)] != prefix_parts:
442
+ continue
443
+ if file.parts[len(prefix_parts)] == "__init__.py":
444
+ has_init = True
445
+ yield str(
446
+ dist.locate_file(file).resolve().as_posix()
447
+ ), os.path.join(self._code_dir, *prefix_parts, *file.parts[1:])
448
+
449
+ # Now if there are more paths left in paths, it means there is a non-distribution
450
+ # component to this package which we also include.
451
+ debug.package_exec(
452
+ " Looking for non-distribution files for module '%s' in %s"
453
+ % (name, paths)
454
+ )
455
+ for path in paths:
456
+ if not Path(path).is_dir():
457
+ # Single file for the module -- this will be something like <name>.py
458
+ yield path, os.path.join(
459
+ self._code_dir, *prefix_parts[:-1], f"{prefix_parts[-1]}.py"
460
+ )
461
+ has_init = True
462
+ else:
463
+ for root, _, files in os.walk(path):
464
+ for file in files:
465
+ if any(file.endswith(x) for x in EXT_EXCLUDE_SUFFIXES):
466
+ continue
467
+ rel_path = os.path.relpath(os.path.join(root, file), path)
468
+ if rel_path == "__init__.py":
469
+ has_init = True
470
+ yield os.path.join(root, file), os.path.join(
471
+ self._code_dir,
472
+ name,
473
+ rel_path,
474
+ )
475
+ # We now include an empty __init__.py file to close the module and prevent
476
+ # leaks from possible namespace packages
477
+ if not has_init:
478
+ yield os.path.join(
479
+ self._metaflow_root, "metaflow", "extension_support", "_empty_file.py"
480
+ ), os.path.join(self._code_dir, *prefix_parts, "__init__.py")
@@ -66,6 +66,7 @@ class Airflow(object):
66
66
  name,
67
67
  graph,
68
68
  flow,
69
+ code_package_metadata,
69
70
  code_package_sha,
70
71
  code_package_url,
71
72
  metadata,
@@ -87,6 +88,7 @@ class Airflow(object):
87
88
  self.name = name
88
89
  self.graph = graph
89
90
  self.flow = flow
91
+ self.code_package_metadata = code_package_metadata
90
92
  self.code_package_sha = code_package_sha
91
93
  self.code_package_url = code_package_url
92
94
  self.metadata = metadata
@@ -372,6 +374,7 @@ class Airflow(object):
372
374
  # Technically the "user" is the stakeholder but should these labels be present.
373
375
  }
374
376
  additional_mf_variables = {
377
+ "METAFLOW_CODE_METADATA": self.code_package_metadata,
375
378
  "METAFLOW_CODE_SHA": self.code_package_sha,
376
379
  "METAFLOW_CODE_URL": self.code_package_url,
377
380
  "METAFLOW_CODE_DS": self.flow_datastore.TYPE,
@@ -476,6 +479,7 @@ class Airflow(object):
476
479
  node.name,
477
480
  AIRFLOW_MACROS.create_task_id(self.contains_foreach),
478
481
  AIRFLOW_MACROS.ATTEMPT,
482
+ code_package_metadata=self.code_package_metadata,
479
483
  code_package_url=self.code_package_url,
480
484
  step_cmds=self._step_cli(
481
485
  node, input_paths, self.code_package_url, user_code_retries
@@ -534,7 +538,7 @@ class Airflow(object):
534
538
  "with": [
535
539
  decorator.make_decorator_spec()
536
540
  for decorator in node.decorators
537
- if not decorator.statically_defined
541
+ if not decorator.statically_defined and decorator.inserted_by is None
538
542
  ]
539
543
  }
540
544
  # FlowDecorators can define their own top-level options. They are
@@ -7,6 +7,7 @@ from hashlib import sha1
7
7
  from metaflow import current, decorators
8
8
  from metaflow._vendor import click
9
9
  from metaflow.exception import MetaflowException, MetaflowInternalError
10
+ from metaflow.metaflow_config import FEAT_ALWAYS_UPLOAD_CODE_PACKAGE
10
11
  from metaflow.package import MetaflowPackage
11
12
  from metaflow.plugins.aws.step_functions.production_token import (
12
13
  load_token,
@@ -288,20 +289,30 @@ def make_flow(
288
289
  decorators._init_step_decorators(
289
290
  obj.flow, obj.graph, obj.environment, obj.flow_datastore, obj.logger
290
291
  )
291
-
292
+ obj.graph = obj.flow._graph
292
293
  # Save the code package in the flow datastore so that both user code and
293
294
  # metaflow package can be retrieved during workflow execution.
294
295
  obj.package = MetaflowPackage(
295
- obj.flow, obj.environment, obj.echo, obj.package_suffixes
296
+ obj.flow,
297
+ obj.environment,
298
+ obj.echo,
299
+ suffixes=obj.package_suffixes,
300
+ flow_datastore=obj.flow_datastore if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE else None,
296
301
  )
297
- package_url, package_sha = obj.flow_datastore.save_data(
298
- [obj.package.blob], len_hint=1
299
- )[0]
302
+ # This blocks until the package is created
303
+ if FEAT_ALWAYS_UPLOAD_CODE_PACKAGE:
304
+ package_url = obj.package.package_url()
305
+ package_sha = obj.package.package_sha()
306
+ else:
307
+ package_url, package_sha = obj.flow_datastore.save_data(
308
+ [obj.package.blob], len_hint=1
309
+ )[0]
300
310
 
301
311
  return Airflow(
302
312
  dag_name,
303
313
  obj.graph,
304
314
  obj.flow,
315
+ obj.package.package_metadata,
305
316
  package_sha,
306
317
  package_url,
307
318
  obj.metadata,
@@ -91,6 +91,7 @@ class ArgoWorkflows(object):
91
91
  name,
92
92
  graph: FlowGraph,
93
93
  flow,
94
+ code_package_metadata,
94
95
  code_package_sha,
95
96
  code_package_url,
96
97
  production_token,
@@ -143,6 +144,7 @@ class ArgoWorkflows(object):
143
144
  self.name = name
144
145
  self.graph = graph
145
146
  self.flow = flow
147
+ self.code_package_metadata = code_package_metadata
146
148
  self.code_package_sha = code_package_sha
147
149
  self.code_package_url = code_package_url
148
150
  self.production_token = production_token
@@ -551,7 +553,7 @@ class ArgoWorkflows(object):
551
553
  type=param_type,
552
554
  description=param.kwargs.get("help"),
553
555
  is_required=is_required,
554
- **extra_attrs
556
+ **extra_attrs,
555
557
  )
556
558
  return parameters
557
559
 
@@ -1495,7 +1497,9 @@ class ArgoWorkflows(object):
1495
1497
  mflog_expr,
1496
1498
  ]
1497
1499
  + self.environment.get_package_commands(
1498
- self.code_package_url, self.flow_datastore.TYPE
1500
+ self.code_package_url,
1501
+ self.flow_datastore.TYPE,
1502
+ self.code_package_metadata,
1499
1503
  )
1500
1504
  )
1501
1505
  step_cmds = self.environment.bootstrap_commands(
@@ -1507,6 +1511,7 @@ class ArgoWorkflows(object):
1507
1511
  decorator.make_decorator_spec()
1508
1512
  for decorator in node.decorators
1509
1513
  if not decorator.statically_defined
1514
+ and decorator.inserted_by is None
1510
1515
  ]
1511
1516
  }
1512
1517
  # FlowDecorators can define their own top-level options. They are
@@ -1673,6 +1678,7 @@ class ArgoWorkflows(object):
1673
1678
  **{
1674
1679
  # These values are needed by Metaflow to set it's internal
1675
1680
  # state appropriately.
1681
+ "METAFLOW_CODE_METADATA": self.code_package_metadata,
1676
1682
  "METAFLOW_CODE_URL": self.code_package_url,
1677
1683
  "METAFLOW_CODE_SHA": self.code_package_sha,
1678
1684
  "METAFLOW_CODE_DS": self.flow_datastore.TYPE,
@@ -2476,7 +2482,9 @@ class ArgoWorkflows(object):
2476
2482
  mflog_expr,
2477
2483
  ]
2478
2484
  + self.environment.get_package_commands(
2479
- self.code_package_url, self.flow_datastore.TYPE
2485
+ self.code_package_url,
2486
+ self.flow_datastore.TYPE,
2487
+ self.code_package_metadata,
2480
2488
  )[:-1]
2481
2489
  # Replace the line 'Task in starting'
2482
2490
  # FIXME: this can be brittle.
@@ -2496,6 +2504,7 @@ class ArgoWorkflows(object):
2496
2504
  env = {
2497
2505
  # These values are needed by Metaflow to set it's internal
2498
2506
  # state appropriately.
2507
+ "METAFLOW_CODE_METADATA": self.code_package_metadata,
2499
2508
  "METAFLOW_CODE_URL": self.code_package_url,
2500
2509
  "METAFLOW_CODE_SHA": self.code_package_sha,
2501
2510
  "METAFLOW_CODE_DS": self.flow_datastore.TYPE,
@@ -2952,7 +2961,8 @@ class ArgoWorkflows(object):
2952
2961
  mflog_expr,
2953
2962
  ]
2954
2963
  + self.environment.get_package_commands(
2955
- self.code_package_url, self.flow_datastore.TYPE
2964
+ self.code_package_url,
2965
+ self.flow_datastore.TYPE,
2956
2966
  )[:-1]
2957
2967
  # Replace the line 'Task in starting'
2958
2968
  # FIXME: this can be brittle.
@@ -2967,6 +2977,7 @@ class ArgoWorkflows(object):
2967
2977
  env = {
2968
2978
  # These values are needed by Metaflow to set it's internal
2969
2979
  # state appropriately.
2980
+ "METAFLOW_CODE_METADATA": self.code_package_metadata,
2970
2981
  "METAFLOW_CODE_URL": self.code_package_url,
2971
2982
  "METAFLOW_CODE_SHA": self.code_package_sha,
2972
2983
  "METAFLOW_CODE_DS": self.flow_datastore.TYPE,