metaflow 2.15.14__py2.py3-none-any.whl → 2.15.16__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. metaflow/__init__.py +2 -2
  2. metaflow/_vendor/click/core.py +4 -3
  3. metaflow/cmd/develop/stub_generator.py +30 -16
  4. metaflow/cmd/develop/stubs.py +9 -27
  5. metaflow/datastore/task_datastore.py +3 -3
  6. metaflow/decorators.py +3 -3
  7. metaflow/extension_support/__init__.py +25 -42
  8. metaflow/parameters.py +2 -2
  9. metaflow/plugins/argo/argo_workflows_cli.py +4 -4
  10. metaflow/plugins/argo/argo_workflows_deployer_objects.py +6 -49
  11. metaflow/plugins/aws/aws_client.py +6 -0
  12. metaflow/plugins/cards/card_modules/chevron/renderer.py +1 -1
  13. metaflow/plugins/cards/card_modules/test_cards.py +6 -6
  14. metaflow/plugins/cards/component_serializer.py +1 -8
  15. metaflow/plugins/datatools/s3/s3op.py +1 -1
  16. metaflow/plugins/metadata_providers/service.py +12 -8
  17. metaflow/plugins/package_cli.py +12 -2
  18. metaflow/plugins/pypi/bootstrap.py +2 -2
  19. metaflow/plugins/uv/bootstrap.py +18 -1
  20. metaflow/plugins/uv/uv_environment.py +1 -1
  21. metaflow/runner/click_api.py +16 -9
  22. metaflow/runner/deployer.py +49 -0
  23. metaflow/runner/deployer_impl.py +17 -5
  24. metaflow/runner/metaflow_runner.py +40 -13
  25. metaflow/runner/subprocess_manager.py +1 -1
  26. metaflow/runner/utils.py +8 -0
  27. metaflow/user_configs/config_options.py +6 -6
  28. metaflow/user_configs/config_parameters.py +211 -45
  29. metaflow/util.py +2 -5
  30. metaflow/vendor.py +0 -1
  31. metaflow/version.py +1 -1
  32. {metaflow-2.15.14.dist-info → metaflow-2.15.16.dist-info}/METADATA +2 -2
  33. {metaflow-2.15.14.dist-info → metaflow-2.15.16.dist-info}/RECORD +40 -44
  34. {metaflow-2.15.14.dist-info → metaflow-2.15.16.dist-info}/WHEEL +1 -1
  35. metaflow/_vendor/v3_5/__init__.py +0 -1
  36. metaflow/_vendor/v3_5/importlib_metadata/__init__.py +0 -644
  37. metaflow/_vendor/v3_5/importlib_metadata/_compat.py +0 -152
  38. metaflow/_vendor/v3_5/zipp.py +0 -329
  39. {metaflow-2.15.14.data → metaflow-2.15.16.data}/data/share/metaflow/devtools/Makefile +0 -0
  40. {metaflow-2.15.14.data → metaflow-2.15.16.data}/data/share/metaflow/devtools/Tiltfile +0 -0
  41. {metaflow-2.15.14.data → metaflow-2.15.16.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  42. {metaflow-2.15.14.dist-info → metaflow-2.15.16.dist-info}/entry_points.txt +0 -0
  43. {metaflow-2.15.14.dist-info → metaflow-2.15.16.dist-info}/licenses/LICENSE +0 -0
  44. {metaflow-2.15.14.dist-info → metaflow-2.15.16.dist-info}/top_level.txt +0 -0
metaflow/__init__.py CHANGED
@@ -128,8 +128,8 @@ _import_tl_plugins(globals())
128
128
  # this auto-generates decorator functions from Decorator objects
129
129
  # in the top-level metaflow namespace
130
130
  _import_plugin_decorators(globals())
131
- # Setting card import for only python 3.4
132
- if sys.version_info[0] >= 3 and sys.version_info[1] >= 4:
131
+ # Setting card import for only python 3.6
132
+ if sys.version_info[0] >= 3 and sys.version_info[1] >= 6:
133
133
  from . import cards
134
134
 
135
135
  # Client
@@ -719,7 +719,7 @@ class BaseCommand(object):
719
719
  prog_name=None,
720
720
  complete_var=None,
721
721
  standalone_mode=True,
722
- **extra
722
+ **extra,
723
723
  ):
724
724
  """This is the way to invoke a script with all the bells and
725
725
  whistles as a command line application. This will always terminate
@@ -1101,7 +1101,7 @@ class MultiCommand(Command):
1101
1101
  subcommand_metavar=None,
1102
1102
  chain=False,
1103
1103
  result_callback=None,
1104
- **attrs
1104
+ **attrs,
1105
1105
  ):
1106
1106
  Command.__init__(self, name, **attrs)
1107
1107
  if no_args_is_help is None:
@@ -1463,6 +1463,7 @@ class Parameter(object):
1463
1463
  parameter. The old callback format will still work, but it will
1464
1464
  raise a warning to give you a chance to migrate the code easier.
1465
1465
  """
1466
+
1466
1467
  param_type_name = "parameter"
1467
1468
 
1468
1469
  def __init__(
@@ -1708,7 +1709,7 @@ class Option(Parameter):
1708
1709
  hidden=False,
1709
1710
  show_choices=True,
1710
1711
  show_envvar=False,
1711
- **attrs
1712
+ **attrs,
1712
1713
  ):
1713
1714
  default_is_missing = attrs.get("default", _missing) is _missing
1714
1715
  Parameter.__init__(self, param_decls, type=type, **attrs)
@@ -488,9 +488,6 @@ class StubGenerator:
488
488
  self._imports.add(name)
489
489
 
490
490
  def _add_to_typing_check(name, is_module=False):
491
- # if name != self._current_module_name:
492
- # self._typing_imports.add(name)
493
- #
494
491
  if name == "None":
495
492
  return
496
493
  if is_module:
@@ -504,6 +501,24 @@ class StubGenerator:
504
501
  # the current file
505
502
  self._typing_imports.add(splits[0])
506
503
 
504
+ def _format_qualified_class_name(cls: type) -> str:
505
+ """Helper to format a class with its qualified module name"""
506
+ # Special case for NoneType - return None
507
+ if cls.__name__ == "NoneType":
508
+ return "None"
509
+
510
+ module = inspect.getmodule(cls)
511
+ if (
512
+ module
513
+ and module.__name__ != "builtins"
514
+ and module.__name__ != "__main__"
515
+ ):
516
+ module_name = self._get_module_name_alias(module.__name__)
517
+ _add_to_typing_check(module_name, is_module=True)
518
+ return f"{module_name}.{cls.__name__}"
519
+ else:
520
+ return cls.__name__
521
+
507
522
  if isinstance(element, str):
508
523
  # Special case for self referential things (particularly in a class)
509
524
  if element == self._current_name:
@@ -557,19 +572,15 @@ class StubGenerator:
557
572
  return element.__name__
558
573
  elif isinstance(element, type(Ellipsis)):
559
574
  return "..."
560
- # elif (
561
- # isinstance(element, typing._GenericAlias)
562
- # and hasattr(element, "_name")
563
- # and element._name in ("List", "Tuple", "Dict", "Set")
564
- # ):
565
- # # 3.7 has these as _GenericAlias but they don't behave like the ones in 3.10
566
- # _add_to_import("typing")
567
- # return str(element)
568
575
  elif isinstance(element, typing._GenericAlias):
569
576
  # We need to check things recursively in __args__ if it exists
570
577
  args_str = []
571
578
  for arg in getattr(element, "__args__", []):
572
- args_str.append(self._get_element_name_with_module(arg))
579
+ # Special handling for class objects in type arguments
580
+ if isinstance(arg, type):
581
+ args_str.append(_format_qualified_class_name(arg))
582
+ else:
583
+ args_str.append(self._get_element_name_with_module(arg))
573
584
 
574
585
  _add_to_import("typing")
575
586
  if element._name:
@@ -584,12 +595,15 @@ class StubGenerator:
584
595
  args_str = [call_args, args_str[-1]]
585
596
  return "typing.%s[%s]" % (element._name, ", ".join(args_str))
586
597
  else:
587
- return "%s[%s]" % (element.__origin__, ", ".join(args_str))
598
+ # Handle the case where we have a generic type without a _name
599
+ origin = element.__origin__
600
+ if isinstance(origin, type):
601
+ origin_str = _format_qualified_class_name(origin)
602
+ else:
603
+ origin_str = str(origin)
604
+ return "%s[%s]" % (origin_str, ", ".join(args_str))
588
605
  elif isinstance(element, ForwardRef):
589
606
  f_arg = self._get_module_name_alias(element.__forward_arg__)
590
- # if f_arg in ("Run", "Task"): # HACK -- forward references in current.py
591
- # _add_to_import("metaflow")
592
- # f_arg = "metaflow.%s" % f_arg
593
607
  _add_to_typing_check(f_arg)
594
608
  return '"%s"' % f_arg
595
609
  elif inspect.getmodule(element) == inspect.getmodule(typing):
@@ -12,25 +12,13 @@ from . import develop
12
12
  from .stub_generator import StubGenerator
13
13
 
14
14
  _py_ver = sys.version_info[:2]
15
- _metadata_package = None
16
15
 
17
-
18
- def _check_stubs_supported():
19
- global _metadata_package
20
- if _metadata_package is not None:
21
- return _metadata_package
22
- else:
23
- if _py_ver >= (3, 4):
24
- if _py_ver >= (3, 8):
25
- from importlib import metadata
26
- elif _py_ver >= (3, 7):
27
- from metaflow._vendor.v3_7 import importlib_metadata as metadata
28
- elif _py_ver >= (3, 6):
29
- from metaflow._vendor.v3_6 import importlib_metadata as metadata
30
- else:
31
- from metaflow._vendor.v3_5 import importlib_metadata as metadata
32
- _metadata_package = metadata
33
- return _metadata_package
16
+ if _py_ver >= (3, 8):
17
+ from importlib import metadata
18
+ elif _py_ver >= (3, 7):
19
+ from metaflow._vendor.v3_7 import importlib_metadata as metadata
20
+ else:
21
+ from metaflow._vendor.v3_6 import importlib_metadata as metadata
34
22
 
35
23
 
36
24
  @develop.group(short_help="Stubs management")
@@ -45,12 +33,6 @@ def stubs(ctx: Any):
45
33
  This CLI provides utilities to check and generate stubs for your current Metaflow
46
34
  installation.
47
35
  """
48
- if _check_stubs_supported() is None:
49
- raise click.UsageError(
50
- "Building and installing stubs are not supported on Python %d.%d "
51
- "(3.4 minimum required)" % _py_ver,
52
- ctx=ctx,
53
- )
54
36
 
55
37
 
56
38
  @stubs.command(short_help="Check validity of stubs")
@@ -187,7 +169,7 @@ setup(
187
169
  packages=find_namespace_packages(),
188
170
  package_data={{"metaflow-stubs": ["generated_for.txt", "py.typed", "**/*.pyi"]}},
189
171
  install_requires=["metaflow=={mf_version}"],
190
- python_requires=">=3.5.2",
172
+ python_requires=">=3.6.1",
191
173
  )
192
174
  """
193
175
  )
@@ -330,14 +312,14 @@ def get_packages_for_stubs() -> Tuple[List[Tuple[str, str]], List[str]]:
330
312
  # some reason it shows up multiple times.
331
313
  interesting_dists = [
332
314
  d
333
- for d in _metadata_package.distributions()
315
+ for d in metadata.distributions()
334
316
  if any(
335
317
  [
336
318
  p == "metaflow-stubs"
337
319
  for p in (d.read_text("top_level.txt") or "").split()
338
320
  ]
339
321
  )
340
- and isinstance(d, _metadata_package.PathDistribution)
322
+ and isinstance(d, metadata.PathDistribution)
341
323
  ]
342
324
 
343
325
  for dist in interesting_dists:
@@ -118,7 +118,7 @@ class TaskDataStore(object):
118
118
  # The GZIP encodings are for backward compatibility
119
119
  self._encodings = {"pickle-v2", "gzip+pickle-v2"}
120
120
  ver = sys.version_info[0] * 10 + sys.version_info[1]
121
- if ver >= 34:
121
+ if ver >= 36:
122
122
  self._encodings.add("pickle-v4")
123
123
  self._encodings.add("gzip+pickle-v4")
124
124
 
@@ -289,7 +289,7 @@ class TaskDataStore(object):
289
289
  except (SystemError, OverflowError) as e:
290
290
  raise DataException(
291
291
  "Artifact *%s* is very large (over 2GB). "
292
- "You need to use Python 3.4 or newer if you want to "
292
+ "You need to use Python 3.6 or newer if you want to "
293
293
  "serialize large objects." % name
294
294
  ) from e
295
295
  except TypeError as e:
@@ -352,7 +352,7 @@ class TaskDataStore(object):
352
352
  encode_type = "gzip+pickle-v2"
353
353
  if encode_type not in self._encodings:
354
354
  raise DataException(
355
- "Python 3.4 or later is required to load artifact '%s'" % name
355
+ "Python 3.6 or later is required to load artifact '%s'" % name
356
356
  )
357
357
  else:
358
358
  to_load[self._objects[name]].append(name)
metaflow/decorators.py CHANGED
@@ -152,7 +152,7 @@ class Decorator(object):
152
152
  # Note that by design, later values override previous ones.
153
153
  self.attributes, new_user_attributes = unpack_delayed_evaluator(self.attributes)
154
154
  self._user_defined_attributes.update(new_user_attributes)
155
- self.attributes = resolve_delayed_evaluator(self.attributes)
155
+ self.attributes = resolve_delayed_evaluator(self.attributes, to_dict=True)
156
156
 
157
157
  self._ran_init = True
158
158
 
@@ -638,7 +638,7 @@ StepFlag = NewType("StepFlag", bool)
638
638
 
639
639
  @overload
640
640
  def step(
641
- f: Callable[[FlowSpecDerived], None]
641
+ f: Callable[[FlowSpecDerived], None],
642
642
  ) -> Callable[[FlowSpecDerived, StepFlag], None]: ...
643
643
 
644
644
 
@@ -649,7 +649,7 @@ def step(
649
649
 
650
650
 
651
651
  def step(
652
- f: Union[Callable[[FlowSpecDerived], None], Callable[[FlowSpecDerived, Any], None]]
652
+ f: Union[Callable[[FlowSpecDerived], None], Callable[[FlowSpecDerived, Any], None]],
653
653
  ):
654
654
  """
655
655
  Marks a method in a FlowSpec as a Metaflow Step. Note that this
@@ -103,9 +103,6 @@ def load_module(module_name):
103
103
 
104
104
  def get_modules(extension_point):
105
105
  modules_to_load = []
106
- if not _mfext_supported:
107
- _ext_debug("Not supported for your Python version -- 3.4+ is needed")
108
- return []
109
106
  if extension_point not in _extension_points:
110
107
  raise RuntimeError(
111
108
  "Metaflow extension point '%s' not supported" % extension_point
@@ -146,9 +143,6 @@ def dump_module_info(all_packages=None, pkgs_per_extension_point=None):
146
143
 
147
144
 
148
145
  def get_extensions_in_dir(d):
149
- if not _mfext_supported:
150
- _ext_debug("Not supported for your Python version -- 3.4+ is needed")
151
- return None, None
152
146
  return _get_extension_packages(ignore_info_file=True, restrict_to_directories=[d])
153
147
 
154
148
 
@@ -312,21 +306,16 @@ def multiload_all(modules, extension_point, dst_globals):
312
306
 
313
307
 
314
308
  _py_ver = sys.version_info[:2]
315
- _mfext_supported = False
316
309
  _aliased_modules = []
317
310
 
318
- if _py_ver >= (3, 4):
319
- import importlib.util
311
+ import importlib.util
320
312
 
321
- if _py_ver >= (3, 8):
322
- from importlib import metadata
323
- elif _py_ver >= (3, 7):
324
- from metaflow._vendor.v3_7 import importlib_metadata as metadata
325
- elif _py_ver >= (3, 6):
326
- from metaflow._vendor.v3_6 import importlib_metadata as metadata
327
- else:
328
- from metaflow._vendor.v3_5 import importlib_metadata as metadata
329
- _mfext_supported = True
313
+ if _py_ver >= (3, 8):
314
+ from importlib import metadata
315
+ elif _py_ver >= (3, 7):
316
+ from metaflow._vendor.v3_7 import importlib_metadata as metadata
317
+ else:
318
+ from metaflow._vendor.v3_6 import importlib_metadata as metadata
330
319
 
331
320
  # Extension points are the directories that can be present in a EXT_PKG to
332
321
  # contribute to that extension point. For example, if you have
@@ -355,10 +344,6 @@ def _ext_debug(*args, **kwargs):
355
344
 
356
345
 
357
346
  def _get_extension_packages(ignore_info_file=False, restrict_to_directories=None):
358
- if not _mfext_supported:
359
- _ext_debug("Not supported for your Python version -- 3.4+ is needed")
360
- return {}, {}
361
-
362
347
  # If we have an INFO file with the appropriate information (if running from a saved
363
348
  # code package for example), we use that directly
364
349
  # Pre-compute on _extension_points
@@ -381,12 +366,11 @@ def _get_extension_packages(ignore_info_file=False, restrict_to_directories=None
381
366
  try:
382
367
  extensions_module = importlib.import_module(EXT_PKG)
383
368
  except ImportError as e:
384
- if _py_ver >= (3, 6):
385
- # e.name is set to the name of the package that fails to load
386
- # so don't error ONLY IF the error is importing this module (but do
387
- # error if there is a transitive import error)
388
- if not (isinstance(e, ModuleNotFoundError) and e.name == EXT_PKG):
389
- raise
369
+ # e.name is set to the name of the package that fails to load
370
+ # so don't error ONLY IF the error is importing this module (but do
371
+ # error if there is a transitive import error)
372
+ if not (isinstance(e, ModuleNotFoundError) and e.name == EXT_PKG):
373
+ raise
390
374
  return {}, {}
391
375
 
392
376
  if restrict_to_directories:
@@ -894,20 +878,19 @@ def _attempt_load_module(module_name):
894
878
  try:
895
879
  extension_module = importlib.import_module(module_name)
896
880
  except ImportError as e:
897
- if _py_ver >= (3, 6):
898
- # e.name is set to the name of the package that fails to load
899
- # so don't error ONLY IF the error is importing this module (but do
900
- # error if there is a transitive import error)
901
- errored_names = [EXT_PKG]
902
- parts = module_name.split(".")
903
- for p in parts[1:]:
904
- errored_names.append("%s.%s" % (errored_names[-1], p))
905
- if not (isinstance(e, ModuleNotFoundError) and e.name in errored_names):
906
- print(
907
- "The following exception occurred while trying to load '%s' ('%s')"
908
- % (EXT_PKG, module_name)
909
- )
910
- raise
881
+ # e.name is set to the name of the package that fails to load
882
+ # so don't error ONLY IF the error is importing this module (but do
883
+ # error if there is a transitive import error)
884
+ errored_names = [EXT_PKG]
885
+ parts = module_name.split(".")
886
+ for p in parts[1:]:
887
+ errored_names.append("%s.%s" % (errored_names[-1], p))
888
+ if not (isinstance(e, ModuleNotFoundError) and e.name in errored_names):
889
+ print(
890
+ "The following exception occurred while trying to load '%s' ('%s')"
891
+ % (EXT_PKG, module_name)
892
+ )
893
+ raise
911
894
  _ext_debug(" Unknown error when loading '%s': %s" % (module_name, e))
912
895
  return None
913
896
  else:
metaflow/parameters.py CHANGED
@@ -373,7 +373,7 @@ class Parameter(object):
373
373
  # Do it one item at a time so errors are ignored at that level (as opposed to
374
374
  # at the entire kwargs level)
375
375
  self.kwargs = {
376
- k: resolve_delayed_evaluator(v, ignore_errors=ignore_errors)
376
+ k: resolve_delayed_evaluator(v, ignore_errors=ignore_errors, to_dict=True)
377
377
  for k, v in self.kwargs.items()
378
378
  }
379
379
 
@@ -382,7 +382,7 @@ class Parameter(object):
382
382
  for key, value in self._override_kwargs.items():
383
383
  if value is not None:
384
384
  self.kwargs[key] = resolve_delayed_evaluator(
385
- value, ignore_errors=ignore_errors
385
+ value, ignore_errors=ignore_errors, to_dict=True
386
386
  )
387
387
  # Set two default values if no-one specified them
388
388
  self.kwargs.setdefault("required", False)
@@ -344,20 +344,20 @@ def create(
344
344
 
345
345
 
346
346
  def check_python_version(obj):
347
- # argo-workflows integration for Metaflow isn't supported for Py versions below 3.5.
347
+ # argo-workflows integration for Metaflow isn't supported for Py versions below 3.6.
348
348
  # This constraint can very well be lifted if desired.
349
- if sys.version_info < (3, 5):
349
+ if sys.version_info < (3, 6):
350
350
  obj.echo("")
351
351
  obj.echo(
352
352
  "Metaflow doesn't support Argo Workflows for Python %s right now."
353
353
  % platform.python_version()
354
354
  )
355
355
  obj.echo(
356
- "Please upgrade your Python interpreter to version 3.5 (or higher) or "
356
+ "Please upgrade your Python interpreter to version 3.6 (or higher) or "
357
357
  "reach out to us at slack.outerbounds.co for more help."
358
358
  )
359
359
  raise UnsupportedPythonVersion(
360
- "Try again with a more recent version of Python (>=3.5)."
360
+ "Try again with a more recent version of Python (>=3.6)."
361
361
  )
362
362
 
363
363
 
@@ -9,59 +9,16 @@ from metaflow.exception import MetaflowException
9
9
  from metaflow.plugins.argo.argo_client import ArgoClient
10
10
  from metaflow.metaflow_config import KUBERNETES_NAMESPACE
11
11
  from metaflow.plugins.argo.argo_workflows import ArgoWorkflows
12
- from metaflow.runner.deployer import Deployer, DeployedFlow, TriggeredRun
12
+ from metaflow.runner.deployer import (
13
+ Deployer,
14
+ DeployedFlow,
15
+ TriggeredRun,
16
+ generate_fake_flow_file_contents,
17
+ )
13
18
 
14
19
  from metaflow.runner.utils import get_lower_level_group, handle_timeout, temporary_fifo
15
20
 
16
21
 
17
- def generate_fake_flow_file_contents(
18
- flow_name: str, param_info: dict, project_name: Optional[str] = None
19
- ):
20
- params_code = ""
21
- for _, param_details in param_info.items():
22
- param_python_var_name = param_details["python_var_name"]
23
- param_name = param_details["name"]
24
- param_type = param_details["type"]
25
- param_help = param_details["description"]
26
- param_required = param_details["is_required"]
27
-
28
- if param_type == "JSON":
29
- params_code += (
30
- f" {param_python_var_name} = Parameter('{param_name}', "
31
- f"type=JSONType, help='''{param_help}''', required={param_required})\n"
32
- )
33
- elif param_type == "FilePath":
34
- is_text = param_details.get("is_text", True)
35
- encoding = param_details.get("encoding", "utf-8")
36
- params_code += (
37
- f" {param_python_var_name} = IncludeFile('{param_name}', "
38
- f"is_text={is_text}, encoding='{encoding}', help='''{param_help}''', "
39
- f"required={param_required})\n"
40
- )
41
- else:
42
- params_code += (
43
- f" {param_python_var_name} = Parameter('{param_name}', "
44
- f"type={param_type}, help='''{param_help}''', required={param_required})\n"
45
- )
46
-
47
- project_decorator = f"@project(name='{project_name}')\n" if project_name else ""
48
-
49
- contents = f"""\
50
- from metaflow import FlowSpec, Parameter, IncludeFile, JSONType, step, project
51
- {project_decorator}class {flow_name}(FlowSpec):
52
- {params_code}
53
- @step
54
- def start(self):
55
- self.next(self.end)
56
- @step
57
- def end(self):
58
- pass
59
- if __name__ == '__main__':
60
- {flow_name}()
61
- """
62
- return contents
63
-
64
-
65
22
  class ArgoWorkflowsTriggeredRun(TriggeredRun):
66
23
  """
67
24
  A class representing a triggered Argo Workflow execution.
@@ -35,6 +35,12 @@ class Boto3ClientProvider(object):
35
35
  "Could not import module 'boto3'. Install boto3 first."
36
36
  )
37
37
 
38
+ # Convert dictionary config to Config object if needed
39
+ if "config" in client_params and not isinstance(
40
+ client_params["config"], Config
41
+ ):
42
+ client_params["config"] = Config(**client_params["config"])
43
+
38
44
  if module == "s3" and (
39
45
  "config" not in client_params or client_params["config"].retries is None
40
46
  ):
@@ -61,7 +61,7 @@ def _get_key(key, scopes, warn, keep, def_ldel, def_rdel):
61
61
  # Loop through the scopes
62
62
  for scope in scopes:
63
63
  try:
64
- # For every dot seperated key
64
+ # For every dot separated key
65
65
  for child in key.split("."):
66
66
  # Move into the scope
67
67
  try:
@@ -34,7 +34,7 @@ class TestPathSpecCard(MetaflowCard):
34
34
  class TestEditableCard(MetaflowCard):
35
35
  type = "test_editable_card"
36
36
 
37
- seperator = "$&#!!@*"
37
+ separator = "$&#!!@*"
38
38
 
39
39
  ALLOW_USER_COMPONENTS = True
40
40
 
@@ -42,13 +42,13 @@ class TestEditableCard(MetaflowCard):
42
42
  self._components = components
43
43
 
44
44
  def render(self, task):
45
- return self.seperator.join([str(comp) for comp in self._components])
45
+ return self.separator.join([str(comp) for comp in self._components])
46
46
 
47
47
 
48
48
  class TestEditableCard2(MetaflowCard):
49
49
  type = "test_editable_card_2"
50
50
 
51
- seperator = "$&#!!@*"
51
+ separator = "$&#!!@*"
52
52
 
53
53
  ALLOW_USER_COMPONENTS = True
54
54
 
@@ -56,19 +56,19 @@ class TestEditableCard2(MetaflowCard):
56
56
  self._components = components
57
57
 
58
58
  def render(self, task):
59
- return self.seperator.join([str(comp) for comp in self._components])
59
+ return self.separator.join([str(comp) for comp in self._components])
60
60
 
61
61
 
62
62
  class TestNonEditableCard(MetaflowCard):
63
63
  type = "test_non_editable_card"
64
64
 
65
- seperator = "$&#!!@*"
65
+ separator = "$&#!!@*"
66
66
 
67
67
  def __init__(self, components=[], **kwargs):
68
68
  self._components = components
69
69
 
70
70
  def render(self, task):
71
- return self.seperator.join([str(comp) for comp in self._components])
71
+ return self.separator.join([str(comp) for comp in self._components])
72
72
 
73
73
 
74
74
  class TestMockCard(MetaflowCard):
@@ -57,15 +57,8 @@ class ComponentStore:
57
57
  The `_component_map` attribute is supposed to be a dictionary so that we can access the components by their ids.
58
58
  But we also want to maintain order in which components are inserted since all of these components are going to be visible on a UI.
59
59
  Since python3.6 dictionaries are ordered by default so we can use the default python `dict`.
60
- For python3.5 and below we need to use an OrderedDict since `dict`'s are not ordered by default.
61
60
  """
62
- python_version = int(platform.python_version_tuple()[0]) * 10 + int(
63
- platform.python_version_tuple()[1]
64
- )
65
- if python_version < 36:
66
- self._component_map = OrderedDict()
67
- else:
68
- self._component_map = {}
61
+ self._component_map = {}
69
62
 
70
63
  def __init__(self, logger, card_type=None, components=None, user_set_id=None):
71
64
  self._logger = logger
@@ -131,7 +131,7 @@ def normalize_client_error(err):
131
131
  except ValueError:
132
132
  if error_code in ("AccessDenied", "AllAccessDisabled", "InvalidAccessKeyId"):
133
133
  return 403
134
- if error_code == "NoSuchKey":
134
+ if error_code in ("NoSuchKey", "NoSuchBucket"):
135
135
  return 404
136
136
  if error_code == "InvalidRange":
137
137
  return 416
@@ -72,14 +72,18 @@ class ServiceMetadataProvider(MetadataProvider):
72
72
  @classmethod
73
73
  def compute_info(cls, val):
74
74
  v = val.rstrip("/")
75
- try:
76
- resp = cls._session.get(
77
- os.path.join(v, "ping"), headers=SERVICE_HEADERS.copy()
78
- )
79
- resp.raise_for_status()
80
- except: # noqa E722
81
- raise ValueError("Metaflow service [%s] unreachable." % v)
82
- return v
75
+ for i in range(SERVICE_RETRY_COUNT):
76
+ try:
77
+ resp = cls._session.get(
78
+ os.path.join(v, "ping"), headers=SERVICE_HEADERS.copy()
79
+ )
80
+ resp.raise_for_status()
81
+ except: # noqa E722
82
+ time.sleep(2 ** (i - 1))
83
+ else:
84
+ return v
85
+
86
+ raise ValueError("Metaflow service [%s] unreachable." % v)
83
87
 
84
88
  @classmethod
85
89
  def default_info(cls):
@@ -38,14 +38,24 @@ def info(obj):
38
38
 
39
39
 
40
40
  @package.command(help="List files included in the code package.")
41
+ @click.option(
42
+ "--archive/--no-archive",
43
+ default=False,
44
+ help="If True, lists the file paths as present in the code package archive; "
45
+ "otherwise, lists the files on your filesystem included in the code package",
46
+ show_default=True,
47
+ )
41
48
  @click.pass_obj
42
- def list(obj):
49
+ def list(obj, archive=False):
43
50
  obj.echo(
44
51
  "Files included in the code package " "(change with --package-suffixes):",
45
52
  fg="magenta",
46
53
  bold=False,
47
54
  )
48
- obj.echo_always("\n".join(path for path, _ in obj.package.path_tuples()))
55
+ if archive:
56
+ obj.echo_always("\n".join(path for _, path in obj.package.path_tuples()))
57
+ else:
58
+ obj.echo_always("\n".join(path for path, _ in obj.package.path_tuples()))
49
59
 
50
60
 
51
61
  @package.command(help="Save the current code package in a tar file")
@@ -14,7 +14,7 @@ from urllib.request import urlopen
14
14
  from metaflow.metaflow_config import DATASTORE_LOCAL_DIR, CONDA_USE_FAST_INIT
15
15
  from metaflow.plugins import DATASTORES
16
16
  from metaflow.plugins.pypi.utils import MICROMAMBA_MIRROR_URL, MICROMAMBA_URL
17
- from metaflow.util import which
17
+ from metaflow.util import which, get_metaflow_root
18
18
  from urllib.request import Request
19
19
  import warnings
20
20
 
@@ -366,7 +366,7 @@ if __name__ == "__main__":
366
366
  # Move MAGIC_FILE inside local datastore.
367
367
  os.makedirs(manifest_dir, exist_ok=True)
368
368
  shutil.move(
369
- os.path.join(os.getcwd(), MAGIC_FILE),
369
+ os.path.join(get_metaflow_root(), MAGIC_FILE),
370
370
  os.path.join(manifest_dir, MAGIC_FILE),
371
371
  )
372
372
  with open(os.path.join(manifest_dir, MAGIC_FILE)) as f: