metaflow 2.15.14__py2.py3-none-any.whl → 2.15.15__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. metaflow/__init__.py +2 -2
  2. metaflow/_vendor/click/core.py +4 -3
  3. metaflow/cmd/develop/stubs.py +9 -27
  4. metaflow/datastore/task_datastore.py +3 -3
  5. metaflow/decorators.py +3 -3
  6. metaflow/extension_support/__init__.py +25 -42
  7. metaflow/parameters.py +2 -2
  8. metaflow/plugins/argo/argo_workflows_cli.py +4 -4
  9. metaflow/plugins/cards/card_modules/chevron/renderer.py +1 -1
  10. metaflow/plugins/cards/card_modules/test_cards.py +6 -6
  11. metaflow/plugins/cards/component_serializer.py +1 -8
  12. metaflow/plugins/package_cli.py +12 -2
  13. metaflow/plugins/pypi/bootstrap.py +2 -2
  14. metaflow/plugins/uv/bootstrap.py +18 -1
  15. metaflow/plugins/uv/uv_environment.py +1 -1
  16. metaflow/runner/click_api.py +16 -9
  17. metaflow/runner/deployer_impl.py +17 -5
  18. metaflow/runner/metaflow_runner.py +40 -13
  19. metaflow/runner/subprocess_manager.py +1 -1
  20. metaflow/runner/utils.py +8 -0
  21. metaflow/user_configs/config_options.py +6 -6
  22. metaflow/user_configs/config_parameters.py +211 -45
  23. metaflow/util.py +2 -5
  24. metaflow/vendor.py +0 -1
  25. metaflow/version.py +1 -1
  26. {metaflow-2.15.14.dist-info → metaflow-2.15.15.dist-info}/METADATA +2 -2
  27. {metaflow-2.15.14.dist-info → metaflow-2.15.15.dist-info}/RECORD +34 -38
  28. {metaflow-2.15.14.dist-info → metaflow-2.15.15.dist-info}/WHEEL +1 -1
  29. metaflow/_vendor/v3_5/__init__.py +0 -1
  30. metaflow/_vendor/v3_5/importlib_metadata/__init__.py +0 -644
  31. metaflow/_vendor/v3_5/importlib_metadata/_compat.py +0 -152
  32. metaflow/_vendor/v3_5/zipp.py +0 -329
  33. {metaflow-2.15.14.data → metaflow-2.15.15.data}/data/share/metaflow/devtools/Makefile +0 -0
  34. {metaflow-2.15.14.data → metaflow-2.15.15.data}/data/share/metaflow/devtools/Tiltfile +0 -0
  35. {metaflow-2.15.14.data → metaflow-2.15.15.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  36. {metaflow-2.15.14.dist-info → metaflow-2.15.15.dist-info}/entry_points.txt +0 -0
  37. {metaflow-2.15.14.dist-info → metaflow-2.15.15.dist-info}/licenses/LICENSE +0 -0
  38. {metaflow-2.15.14.dist-info → metaflow-2.15.15.dist-info}/top_level.txt +0 -0
metaflow/__init__.py CHANGED
@@ -128,8 +128,8 @@ _import_tl_plugins(globals())
128
128
  # this auto-generates decorator functions from Decorator objects
129
129
  # in the top-level metaflow namespace
130
130
  _import_plugin_decorators(globals())
131
- # Setting card import for only python 3.4
132
- if sys.version_info[0] >= 3 and sys.version_info[1] >= 4:
131
+ # Setting card import for only python 3.6
132
+ if sys.version_info[0] >= 3 and sys.version_info[1] >= 6:
133
133
  from . import cards
134
134
 
135
135
  # Client
@@ -719,7 +719,7 @@ class BaseCommand(object):
719
719
  prog_name=None,
720
720
  complete_var=None,
721
721
  standalone_mode=True,
722
- **extra
722
+ **extra,
723
723
  ):
724
724
  """This is the way to invoke a script with all the bells and
725
725
  whistles as a command line application. This will always terminate
@@ -1101,7 +1101,7 @@ class MultiCommand(Command):
1101
1101
  subcommand_metavar=None,
1102
1102
  chain=False,
1103
1103
  result_callback=None,
1104
- **attrs
1104
+ **attrs,
1105
1105
  ):
1106
1106
  Command.__init__(self, name, **attrs)
1107
1107
  if no_args_is_help is None:
@@ -1463,6 +1463,7 @@ class Parameter(object):
1463
1463
  parameter. The old callback format will still work, but it will
1464
1464
  raise a warning to give you a chance to migrate the code easier.
1465
1465
  """
1466
+
1466
1467
  param_type_name = "parameter"
1467
1468
 
1468
1469
  def __init__(
@@ -1708,7 +1709,7 @@ class Option(Parameter):
1708
1709
  hidden=False,
1709
1710
  show_choices=True,
1710
1711
  show_envvar=False,
1711
- **attrs
1712
+ **attrs,
1712
1713
  ):
1713
1714
  default_is_missing = attrs.get("default", _missing) is _missing
1714
1715
  Parameter.__init__(self, param_decls, type=type, **attrs)
@@ -12,25 +12,13 @@ from . import develop
12
12
  from .stub_generator import StubGenerator
13
13
 
14
14
  _py_ver = sys.version_info[:2]
15
- _metadata_package = None
16
15
 
17
-
18
- def _check_stubs_supported():
19
- global _metadata_package
20
- if _metadata_package is not None:
21
- return _metadata_package
22
- else:
23
- if _py_ver >= (3, 4):
24
- if _py_ver >= (3, 8):
25
- from importlib import metadata
26
- elif _py_ver >= (3, 7):
27
- from metaflow._vendor.v3_7 import importlib_metadata as metadata
28
- elif _py_ver >= (3, 6):
29
- from metaflow._vendor.v3_6 import importlib_metadata as metadata
30
- else:
31
- from metaflow._vendor.v3_5 import importlib_metadata as metadata
32
- _metadata_package = metadata
33
- return _metadata_package
16
+ if _py_ver >= (3, 8):
17
+ from importlib import metadata
18
+ elif _py_ver >= (3, 7):
19
+ from metaflow._vendor.v3_7 import importlib_metadata as metadata
20
+ else:
21
+ from metaflow._vendor.v3_6 import importlib_metadata as metadata
34
22
 
35
23
 
36
24
  @develop.group(short_help="Stubs management")
@@ -45,12 +33,6 @@ def stubs(ctx: Any):
45
33
  This CLI provides utilities to check and generate stubs for your current Metaflow
46
34
  installation.
47
35
  """
48
- if _check_stubs_supported() is None:
49
- raise click.UsageError(
50
- "Building and installing stubs are not supported on Python %d.%d "
51
- "(3.4 minimum required)" % _py_ver,
52
- ctx=ctx,
53
- )
54
36
 
55
37
 
56
38
  @stubs.command(short_help="Check validity of stubs")
@@ -187,7 +169,7 @@ setup(
187
169
  packages=find_namespace_packages(),
188
170
  package_data={{"metaflow-stubs": ["generated_for.txt", "py.typed", "**/*.pyi"]}},
189
171
  install_requires=["metaflow=={mf_version}"],
190
- python_requires=">=3.5.2",
172
+ python_requires=">=3.6.1",
191
173
  )
192
174
  """
193
175
  )
@@ -330,14 +312,14 @@ def get_packages_for_stubs() -> Tuple[List[Tuple[str, str]], List[str]]:
330
312
  # some reason it shows up multiple times.
331
313
  interesting_dists = [
332
314
  d
333
- for d in _metadata_package.distributions()
315
+ for d in metadata.distributions()
334
316
  if any(
335
317
  [
336
318
  p == "metaflow-stubs"
337
319
  for p in (d.read_text("top_level.txt") or "").split()
338
320
  ]
339
321
  )
340
- and isinstance(d, _metadata_package.PathDistribution)
322
+ and isinstance(d, metadata.PathDistribution)
341
323
  ]
342
324
 
343
325
  for dist in interesting_dists:
@@ -118,7 +118,7 @@ class TaskDataStore(object):
118
118
  # The GZIP encodings are for backward compatibility
119
119
  self._encodings = {"pickle-v2", "gzip+pickle-v2"}
120
120
  ver = sys.version_info[0] * 10 + sys.version_info[1]
121
- if ver >= 34:
121
+ if ver >= 36:
122
122
  self._encodings.add("pickle-v4")
123
123
  self._encodings.add("gzip+pickle-v4")
124
124
 
@@ -289,7 +289,7 @@ class TaskDataStore(object):
289
289
  except (SystemError, OverflowError) as e:
290
290
  raise DataException(
291
291
  "Artifact *%s* is very large (over 2GB). "
292
- "You need to use Python 3.4 or newer if you want to "
292
+ "You need to use Python 3.6 or newer if you want to "
293
293
  "serialize large objects." % name
294
294
  ) from e
295
295
  except TypeError as e:
@@ -352,7 +352,7 @@ class TaskDataStore(object):
352
352
  encode_type = "gzip+pickle-v2"
353
353
  if encode_type not in self._encodings:
354
354
  raise DataException(
355
- "Python 3.4 or later is required to load artifact '%s'" % name
355
+ "Python 3.6 or later is required to load artifact '%s'" % name
356
356
  )
357
357
  else:
358
358
  to_load[self._objects[name]].append(name)
metaflow/decorators.py CHANGED
@@ -152,7 +152,7 @@ class Decorator(object):
152
152
  # Note that by design, later values override previous ones.
153
153
  self.attributes, new_user_attributes = unpack_delayed_evaluator(self.attributes)
154
154
  self._user_defined_attributes.update(new_user_attributes)
155
- self.attributes = resolve_delayed_evaluator(self.attributes)
155
+ self.attributes = resolve_delayed_evaluator(self.attributes, to_dict=True)
156
156
 
157
157
  self._ran_init = True
158
158
 
@@ -638,7 +638,7 @@ StepFlag = NewType("StepFlag", bool)
638
638
 
639
639
  @overload
640
640
  def step(
641
- f: Callable[[FlowSpecDerived], None]
641
+ f: Callable[[FlowSpecDerived], None],
642
642
  ) -> Callable[[FlowSpecDerived, StepFlag], None]: ...
643
643
 
644
644
 
@@ -649,7 +649,7 @@ def step(
649
649
 
650
650
 
651
651
  def step(
652
- f: Union[Callable[[FlowSpecDerived], None], Callable[[FlowSpecDerived, Any], None]]
652
+ f: Union[Callable[[FlowSpecDerived], None], Callable[[FlowSpecDerived, Any], None]],
653
653
  ):
654
654
  """
655
655
  Marks a method in a FlowSpec as a Metaflow Step. Note that this
@@ -103,9 +103,6 @@ def load_module(module_name):
103
103
 
104
104
  def get_modules(extension_point):
105
105
  modules_to_load = []
106
- if not _mfext_supported:
107
- _ext_debug("Not supported for your Python version -- 3.4+ is needed")
108
- return []
109
106
  if extension_point not in _extension_points:
110
107
  raise RuntimeError(
111
108
  "Metaflow extension point '%s' not supported" % extension_point
@@ -146,9 +143,6 @@ def dump_module_info(all_packages=None, pkgs_per_extension_point=None):
146
143
 
147
144
 
148
145
  def get_extensions_in_dir(d):
149
- if not _mfext_supported:
150
- _ext_debug("Not supported for your Python version -- 3.4+ is needed")
151
- return None, None
152
146
  return _get_extension_packages(ignore_info_file=True, restrict_to_directories=[d])
153
147
 
154
148
 
@@ -312,21 +306,16 @@ def multiload_all(modules, extension_point, dst_globals):
312
306
 
313
307
 
314
308
  _py_ver = sys.version_info[:2]
315
- _mfext_supported = False
316
309
  _aliased_modules = []
317
310
 
318
- if _py_ver >= (3, 4):
319
- import importlib.util
311
+ import importlib.util
320
312
 
321
- if _py_ver >= (3, 8):
322
- from importlib import metadata
323
- elif _py_ver >= (3, 7):
324
- from metaflow._vendor.v3_7 import importlib_metadata as metadata
325
- elif _py_ver >= (3, 6):
326
- from metaflow._vendor.v3_6 import importlib_metadata as metadata
327
- else:
328
- from metaflow._vendor.v3_5 import importlib_metadata as metadata
329
- _mfext_supported = True
313
+ if _py_ver >= (3, 8):
314
+ from importlib import metadata
315
+ elif _py_ver >= (3, 7):
316
+ from metaflow._vendor.v3_7 import importlib_metadata as metadata
317
+ else:
318
+ from metaflow._vendor.v3_6 import importlib_metadata as metadata
330
319
 
331
320
  # Extension points are the directories that can be present in a EXT_PKG to
332
321
  # contribute to that extension point. For example, if you have
@@ -355,10 +344,6 @@ def _ext_debug(*args, **kwargs):
355
344
 
356
345
 
357
346
  def _get_extension_packages(ignore_info_file=False, restrict_to_directories=None):
358
- if not _mfext_supported:
359
- _ext_debug("Not supported for your Python version -- 3.4+ is needed")
360
- return {}, {}
361
-
362
347
  # If we have an INFO file with the appropriate information (if running from a saved
363
348
  # code package for example), we use that directly
364
349
  # Pre-compute on _extension_points
@@ -381,12 +366,11 @@ def _get_extension_packages(ignore_info_file=False, restrict_to_directories=None
381
366
  try:
382
367
  extensions_module = importlib.import_module(EXT_PKG)
383
368
  except ImportError as e:
384
- if _py_ver >= (3, 6):
385
- # e.name is set to the name of the package that fails to load
386
- # so don't error ONLY IF the error is importing this module (but do
387
- # error if there is a transitive import error)
388
- if not (isinstance(e, ModuleNotFoundError) and e.name == EXT_PKG):
389
- raise
369
+ # e.name is set to the name of the package that fails to load
370
+ # so don't error ONLY IF the error is importing this module (but do
371
+ # error if there is a transitive import error)
372
+ if not (isinstance(e, ModuleNotFoundError) and e.name == EXT_PKG):
373
+ raise
390
374
  return {}, {}
391
375
 
392
376
  if restrict_to_directories:
@@ -894,20 +878,19 @@ def _attempt_load_module(module_name):
894
878
  try:
895
879
  extension_module = importlib.import_module(module_name)
896
880
  except ImportError as e:
897
- if _py_ver >= (3, 6):
898
- # e.name is set to the name of the package that fails to load
899
- # so don't error ONLY IF the error is importing this module (but do
900
- # error if there is a transitive import error)
901
- errored_names = [EXT_PKG]
902
- parts = module_name.split(".")
903
- for p in parts[1:]:
904
- errored_names.append("%s.%s" % (errored_names[-1], p))
905
- if not (isinstance(e, ModuleNotFoundError) and e.name in errored_names):
906
- print(
907
- "The following exception occurred while trying to load '%s' ('%s')"
908
- % (EXT_PKG, module_name)
909
- )
910
- raise
881
+ # e.name is set to the name of the package that fails to load
882
+ # so don't error ONLY IF the error is importing this module (but do
883
+ # error if there is a transitive import error)
884
+ errored_names = [EXT_PKG]
885
+ parts = module_name.split(".")
886
+ for p in parts[1:]:
887
+ errored_names.append("%s.%s" % (errored_names[-1], p))
888
+ if not (isinstance(e, ModuleNotFoundError) and e.name in errored_names):
889
+ print(
890
+ "The following exception occurred while trying to load '%s' ('%s')"
891
+ % (EXT_PKG, module_name)
892
+ )
893
+ raise
911
894
  _ext_debug(" Unknown error when loading '%s': %s" % (module_name, e))
912
895
  return None
913
896
  else:
metaflow/parameters.py CHANGED
@@ -373,7 +373,7 @@ class Parameter(object):
373
373
  # Do it one item at a time so errors are ignored at that level (as opposed to
374
374
  # at the entire kwargs level)
375
375
  self.kwargs = {
376
- k: resolve_delayed_evaluator(v, ignore_errors=ignore_errors)
376
+ k: resolve_delayed_evaluator(v, ignore_errors=ignore_errors, to_dict=True)
377
377
  for k, v in self.kwargs.items()
378
378
  }
379
379
 
@@ -382,7 +382,7 @@ class Parameter(object):
382
382
  for key, value in self._override_kwargs.items():
383
383
  if value is not None:
384
384
  self.kwargs[key] = resolve_delayed_evaluator(
385
- value, ignore_errors=ignore_errors
385
+ value, ignore_errors=ignore_errors, to_dict=True
386
386
  )
387
387
  # Set two default values if no-one specified them
388
388
  self.kwargs.setdefault("required", False)
@@ -344,20 +344,20 @@ def create(
344
344
 
345
345
 
346
346
  def check_python_version(obj):
347
- # argo-workflows integration for Metaflow isn't supported for Py versions below 3.5.
347
+ # argo-workflows integration for Metaflow isn't supported for Py versions below 3.6.
348
348
  # This constraint can very well be lifted if desired.
349
- if sys.version_info < (3, 5):
349
+ if sys.version_info < (3, 6):
350
350
  obj.echo("")
351
351
  obj.echo(
352
352
  "Metaflow doesn't support Argo Workflows for Python %s right now."
353
353
  % platform.python_version()
354
354
  )
355
355
  obj.echo(
356
- "Please upgrade your Python interpreter to version 3.5 (or higher) or "
356
+ "Please upgrade your Python interpreter to version 3.6 (or higher) or "
357
357
  "reach out to us at slack.outerbounds.co for more help."
358
358
  )
359
359
  raise UnsupportedPythonVersion(
360
- "Try again with a more recent version of Python (>=3.5)."
360
+ "Try again with a more recent version of Python (>=3.6)."
361
361
  )
362
362
 
363
363
 
@@ -61,7 +61,7 @@ def _get_key(key, scopes, warn, keep, def_ldel, def_rdel):
61
61
  # Loop through the scopes
62
62
  for scope in scopes:
63
63
  try:
64
- # For every dot seperated key
64
+ # For every dot separated key
65
65
  for child in key.split("."):
66
66
  # Move into the scope
67
67
  try:
@@ -34,7 +34,7 @@ class TestPathSpecCard(MetaflowCard):
34
34
  class TestEditableCard(MetaflowCard):
35
35
  type = "test_editable_card"
36
36
 
37
- seperator = "$&#!!@*"
37
+ separator = "$&#!!@*"
38
38
 
39
39
  ALLOW_USER_COMPONENTS = True
40
40
 
@@ -42,13 +42,13 @@ class TestEditableCard(MetaflowCard):
42
42
  self._components = components
43
43
 
44
44
  def render(self, task):
45
- return self.seperator.join([str(comp) for comp in self._components])
45
+ return self.separator.join([str(comp) for comp in self._components])
46
46
 
47
47
 
48
48
  class TestEditableCard2(MetaflowCard):
49
49
  type = "test_editable_card_2"
50
50
 
51
- seperator = "$&#!!@*"
51
+ separator = "$&#!!@*"
52
52
 
53
53
  ALLOW_USER_COMPONENTS = True
54
54
 
@@ -56,19 +56,19 @@ class TestEditableCard2(MetaflowCard):
56
56
  self._components = components
57
57
 
58
58
  def render(self, task):
59
- return self.seperator.join([str(comp) for comp in self._components])
59
+ return self.separator.join([str(comp) for comp in self._components])
60
60
 
61
61
 
62
62
  class TestNonEditableCard(MetaflowCard):
63
63
  type = "test_non_editable_card"
64
64
 
65
- seperator = "$&#!!@*"
65
+ separator = "$&#!!@*"
66
66
 
67
67
  def __init__(self, components=[], **kwargs):
68
68
  self._components = components
69
69
 
70
70
  def render(self, task):
71
- return self.seperator.join([str(comp) for comp in self._components])
71
+ return self.separator.join([str(comp) for comp in self._components])
72
72
 
73
73
 
74
74
  class TestMockCard(MetaflowCard):
@@ -57,15 +57,8 @@ class ComponentStore:
57
57
  The `_component_map` attribute is supposed to be a dictionary so that we can access the components by their ids.
58
58
  But we also want to maintain order in which components are inserted since all of these components are going to be visible on a UI.
59
59
  Since python3.6 dictionaries are ordered by default so we can use the default python `dict`.
60
- For python3.5 and below we need to use an OrderedDict since `dict`'s are not ordered by default.
61
60
  """
62
- python_version = int(platform.python_version_tuple()[0]) * 10 + int(
63
- platform.python_version_tuple()[1]
64
- )
65
- if python_version < 36:
66
- self._component_map = OrderedDict()
67
- else:
68
- self._component_map = {}
61
+ self._component_map = {}
69
62
 
70
63
  def __init__(self, logger, card_type=None, components=None, user_set_id=None):
71
64
  self._logger = logger
@@ -38,14 +38,24 @@ def info(obj):
38
38
 
39
39
 
40
40
  @package.command(help="List files included in the code package.")
41
+ @click.option(
42
+ "--archive/--no-archive",
43
+ default=False,
44
+ help="If True, lists the file paths as present in the code package archive; "
45
+ "otherwise, lists the files on your filesystem included in the code package",
46
+ show_default=True,
47
+ )
41
48
  @click.pass_obj
42
- def list(obj):
49
+ def list(obj, archive=False):
43
50
  obj.echo(
44
51
  "Files included in the code package " "(change with --package-suffixes):",
45
52
  fg="magenta",
46
53
  bold=False,
47
54
  )
48
- obj.echo_always("\n".join(path for path, _ in obj.package.path_tuples()))
55
+ if archive:
56
+ obj.echo_always("\n".join(path for _, path in obj.package.path_tuples()))
57
+ else:
58
+ obj.echo_always("\n".join(path for path, _ in obj.package.path_tuples()))
49
59
 
50
60
 
51
61
  @package.command(help="Save the current code package in a tar file")
@@ -14,7 +14,7 @@ from urllib.request import urlopen
14
14
  from metaflow.metaflow_config import DATASTORE_LOCAL_DIR, CONDA_USE_FAST_INIT
15
15
  from metaflow.plugins import DATASTORES
16
16
  from metaflow.plugins.pypi.utils import MICROMAMBA_MIRROR_URL, MICROMAMBA_URL
17
- from metaflow.util import which
17
+ from metaflow.util import which, get_metaflow_root
18
18
  from urllib.request import Request
19
19
  import warnings
20
20
 
@@ -366,7 +366,7 @@ if __name__ == "__main__":
366
366
  # Move MAGIC_FILE inside local datastore.
367
367
  os.makedirs(manifest_dir, exist_ok=True)
368
368
  shutil.move(
369
- os.path.join(os.getcwd(), MAGIC_FILE),
369
+ os.path.join(get_metaflow_root(), MAGIC_FILE),
370
370
  os.path.join(manifest_dir, MAGIC_FILE),
371
371
  )
372
372
  with open(os.path.join(manifest_dir, MAGIC_FILE)) as f:
@@ -4,6 +4,7 @@ import sys
4
4
  import time
5
5
 
6
6
  from metaflow.util import which
7
+ from metaflow.info_file import read_info_file
7
8
  from metaflow.metaflow_config import get_pinned_conda_libs
8
9
  from urllib.request import Request, urlopen
9
10
  from urllib.error import URLError
@@ -78,11 +79,27 @@ if __name__ == "__main__":
78
79
  # return only dependency names instead of pinned versions
79
80
  return pinned.keys()
80
81
 
82
+ def skip_metaflow_dependencies():
83
+ skip_pkgs = ["metaflow"]
84
+ info = read_info_file()
85
+ if info is not None:
86
+ try:
87
+ skip_pkgs.extend([ext_name for ext_name in info["ext_info"][0].keys()])
88
+ except Exception:
89
+ print(
90
+ "Failed to read INFO. Metaflow-related packages might get installed during runtime."
91
+ )
92
+
93
+ return skip_pkgs
94
+
81
95
  def sync_uv_project(datastore_type):
82
96
  print("Syncing uv project...")
83
97
  dependencies = " ".join(get_dependencies(datastore_type))
98
+ skip_pkgs = " ".join(
99
+ [f"--no-install-package {dep}" for dep in skip_metaflow_dependencies()]
100
+ )
84
101
  cmd = f"""set -e;
85
- uv sync --frozen --no-install-package metaflow;
102
+ uv sync --frozen {skip_pkgs};
86
103
  uv pip install {dependencies} --strict
87
104
  """
88
105
  run_cmd(cmd)
@@ -22,7 +22,7 @@ class UVEnvironment(MetaflowEnvironment):
22
22
  self.logger("Bootstrapping uv...")
23
23
 
24
24
  def executable(self, step_name, default=None):
25
- return "uv run python"
25
+ return "uv run --no-sync python"
26
26
 
27
27
  def add_to_package(self):
28
28
  # NOTE: We treat uv.lock and pyproject.toml as regular project assets and ship these along user code as part of the code package
@@ -467,9 +467,14 @@ class MetaflowAPI(object):
467
467
  config_file = defaults.get("config")
468
468
 
469
469
  if config_file:
470
- config_file = map(
471
- lambda x: (x[0], ConvertPath.convert_value(x[1], is_default)),
472
- config_file,
470
+ config_file = dict(
471
+ map(
472
+ lambda x: (
473
+ x[0],
474
+ ConvertPath.convert_value(x[1], is_default),
475
+ ),
476
+ config_file,
477
+ )
473
478
  )
474
479
 
475
480
  is_default = False
@@ -479,12 +484,14 @@ class MetaflowAPI(object):
479
484
  config_value = defaults.get("config_value")
480
485
 
481
486
  if config_value:
482
- config_value = map(
483
- lambda x: (
484
- x[0],
485
- ConvertDictOrStr.convert_value(x[1], is_default),
486
- ),
487
- config_value,
487
+ config_value = dict(
488
+ map(
489
+ lambda x: (
490
+ x[0],
491
+ ConvertDictOrStr.convert_value(x[1], is_default),
492
+ ),
493
+ config_value,
494
+ )
488
495
  )
489
496
 
490
497
  if (config_file is None) ^ (config_value is None):
@@ -5,8 +5,10 @@ import sys
5
5
 
6
6
  from typing import Any, ClassVar, Dict, Optional, TYPE_CHECKING, Type
7
7
 
8
+ from metaflow.metaflow_config import CLICK_API_PROCESS_CONFIG
9
+
8
10
  from .subprocess_manager import SubprocessManager
9
- from .utils import get_lower_level_group, handle_timeout, temporary_fifo
11
+ from .utils import get_lower_level_group, handle_timeout, temporary_fifo, with_dir
10
12
 
11
13
  if TYPE_CHECKING:
12
14
  import metaflow.runner.deployer
@@ -88,7 +90,7 @@ class DeployerImpl(object):
88
90
  self.show_output = show_output
89
91
  self.profile = profile
90
92
  self.env = env
91
- self.cwd = cwd
93
+ self.cwd = cwd or os.getcwd()
92
94
  self.file_read_timeout = file_read_timeout
93
95
 
94
96
  self.env_vars = os.environ.copy()
@@ -140,9 +142,19 @@ class DeployerImpl(object):
140
142
  ) -> "metaflow.runner.deployer.DeployedFlow":
141
143
  with temporary_fifo() as (attribute_file_path, attribute_file_fd):
142
144
  # every subclass needs to have `self.deployer_kwargs`
143
- command = get_lower_level_group(
144
- self.api, self.top_level_kwargs, self.TYPE, self.deployer_kwargs
145
- ).create(deployer_attribute_file=attribute_file_path, **kwargs)
145
+ # TODO: Get rid of CLICK_API_PROCESS_CONFIG in the near future
146
+ if CLICK_API_PROCESS_CONFIG:
147
+ # We need to run this in the cwd because configs depend on files
148
+ # that may be located in paths relative to the directory the user
149
+ # wants to run in
150
+ with with_dir(self.cwd):
151
+ command = get_lower_level_group(
152
+ self.api, self.top_level_kwargs, self.TYPE, self.deployer_kwargs
153
+ ).create(deployer_attribute_file=attribute_file_path, **kwargs)
154
+ else:
155
+ command = get_lower_level_group(
156
+ self.api, self.top_level_kwargs, self.TYPE, self.deployer_kwargs
157
+ ).create(deployer_attribute_file=attribute_file_path, **kwargs)
146
158
 
147
159
  pid = self.spm.run_command(
148
160
  [sys.executable, *command],