ob-metaflow 2.12.36.2__py2.py3-none-any.whl → 2.12.36.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (54) hide show
  1. metaflow/__init__.py +0 -3
  2. metaflow/cli.py +697 -84
  3. metaflow/cli_args.py +0 -17
  4. metaflow/cmd/develop/stub_generator.py +2 -9
  5. metaflow/decorators.py +2 -63
  6. metaflow/extension_support/plugins.py +27 -41
  7. metaflow/flowspec.py +16 -156
  8. metaflow/includefile.py +22 -50
  9. metaflow/metaflow_config.py +1 -1
  10. metaflow/package.py +3 -17
  11. metaflow/parameters.py +23 -80
  12. metaflow/plugins/__init__.py +0 -4
  13. metaflow/plugins/airflow/airflow_cli.py +0 -1
  14. metaflow/plugins/argo/argo_workflows.py +1 -41
  15. metaflow/plugins/argo/argo_workflows_cli.py +0 -1
  16. metaflow/plugins/argo/argo_workflows_deployer_objects.py +1 -5
  17. metaflow/plugins/aws/batch/batch_decorator.py +2 -2
  18. metaflow/plugins/aws/step_functions/step_functions.py +0 -32
  19. metaflow/plugins/aws/step_functions/step_functions_cli.py +0 -1
  20. metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +0 -3
  21. metaflow/plugins/datatools/s3/s3op.py +3 -3
  22. metaflow/plugins/kubernetes/kubernetes_cli.py +1 -1
  23. metaflow/plugins/kubernetes/kubernetes_decorator.py +2 -2
  24. metaflow/plugins/pypi/conda_decorator.py +10 -20
  25. metaflow/plugins/pypi/pypi_decorator.py +9 -11
  26. metaflow/plugins/timeout_decorator.py +2 -2
  27. metaflow/runner/click_api.py +19 -73
  28. metaflow/runner/deployer.py +1 -1
  29. metaflow/runner/deployer_impl.py +2 -2
  30. metaflow/runner/metaflow_runner.py +1 -4
  31. metaflow/runner/nbdeploy.py +0 -2
  32. metaflow/runner/nbrun.py +1 -1
  33. metaflow/runner/subprocess_manager.py +1 -3
  34. metaflow/runner/utils.py +20 -37
  35. metaflow/runtime.py +73 -111
  36. metaflow/sidecar/sidecar_worker.py +1 -1
  37. metaflow/util.py +0 -17
  38. metaflow/version.py +1 -1
  39. {ob_metaflow-2.12.36.2.dist-info → ob_metaflow-2.12.36.3.dist-info}/METADATA +2 -3
  40. {ob_metaflow-2.12.36.2.dist-info → ob_metaflow-2.12.36.3.dist-info}/RECORD +44 -54
  41. metaflow/cli_components/__init__.py +0 -0
  42. metaflow/cli_components/dump_cmd.py +0 -96
  43. metaflow/cli_components/init_cmd.py +0 -51
  44. metaflow/cli_components/run_cmds.py +0 -358
  45. metaflow/cli_components/step_cmd.py +0 -189
  46. metaflow/cli_components/utils.py +0 -140
  47. metaflow/user_configs/__init__.py +0 -0
  48. metaflow/user_configs/config_decorators.py +0 -563
  49. metaflow/user_configs/config_options.py +0 -495
  50. metaflow/user_configs/config_parameters.py +0 -386
  51. {ob_metaflow-2.12.36.2.dist-info → ob_metaflow-2.12.36.3.dist-info}/LICENSE +0 -0
  52. {ob_metaflow-2.12.36.2.dist-info → ob_metaflow-2.12.36.3.dist-info}/WHEEL +0 -0
  53. {ob_metaflow-2.12.36.2.dist-info → ob_metaflow-2.12.36.3.dist-info}/entry_points.txt +0 -0
  54. {ob_metaflow-2.12.36.2.dist-info → ob_metaflow-2.12.36.3.dist-info}/top_level.txt +0 -0
metaflow/package.py CHANGED
@@ -6,7 +6,6 @@ import time
6
6
  import json
7
7
  from io import BytesIO
8
8
 
9
- from .user_configs.config_parameters import CONFIG_FILE, dump_config_values
10
9
  from .extension_support import EXT_PKG, package_mfext_all
11
10
  from .metaflow_config import DEFAULT_PACKAGE_SUFFIXES
12
11
  from .exception import MetaflowException
@@ -152,23 +151,11 @@ class MetaflowPackage(object):
152
151
  for path_tuple in self._walk(flowdir, suffixes=self.suffixes):
153
152
  yield path_tuple
154
153
 
155
- def _add_configs(self, tar):
156
- buf = BytesIO()
157
- buf.write(json.dumps(dump_config_values(self._flow)).encode("utf-8"))
158
- self._add_file(tar, os.path.basename(CONFIG_FILE), buf)
159
-
160
154
  def _add_info(self, tar):
155
+ info = tarfile.TarInfo(os.path.basename(INFO_FILE))
156
+ env = self.environment.get_environment_info(include_ext_info=True)
161
157
  buf = BytesIO()
162
- buf.write(
163
- json.dumps(
164
- self.environment.get_environment_info(include_ext_info=True)
165
- ).encode("utf-8")
166
- )
167
- self._add_file(tar, os.path.basename(INFO_FILE), buf)
168
-
169
- @staticmethod
170
- def _add_file(tar, filename, buf):
171
- info = tarfile.TarInfo(filename)
158
+ buf.write(json.dumps(env).encode("utf-8"))
172
159
  buf.seek(0)
173
160
  info.size = len(buf.getvalue())
174
161
  # Setting this default to Dec 3, 2019
@@ -188,7 +175,6 @@ class MetaflowPackage(object):
188
175
  fileobj=buf, mode="w:gz", compresslevel=3, dereference=True
189
176
  ) as tar:
190
177
  self._add_info(tar)
191
- self._add_configs(tar)
192
178
  for path, arcname in self.path_tuples():
193
179
  tar.add(path, arcname=arcname, recursive=False, filter=no_mtime)
194
180
 
metaflow/parameters.py CHANGED
@@ -3,7 +3,7 @@ import json
3
3
  from contextlib import contextmanager
4
4
  from threading import local
5
5
 
6
- from typing import Any, Callable, Dict, NamedTuple, Optional, TYPE_CHECKING, Type, Union
6
+ from typing import Any, Callable, Dict, NamedTuple, Optional, Type, Union
7
7
 
8
8
  from metaflow._vendor import click
9
9
 
@@ -14,9 +14,6 @@ from .exception import (
14
14
  MetaflowException,
15
15
  )
16
16
 
17
- if TYPE_CHECKING:
18
- from .user_configs.config_parameters import ConfigValue
19
-
20
17
  try:
21
18
  # Python2
22
19
  strtype = basestring
@@ -35,7 +32,6 @@ ParameterContext = NamedTuple(
35
32
  ("parameter_name", str),
36
33
  ("logger", Callable[..., None]),
37
34
  ("ds_type", str),
38
- ("configs", Optional["ConfigValue"]),
39
35
  ],
40
36
  )
41
37
 
@@ -76,16 +72,6 @@ def flow_context(flow_cls):
76
72
  context_proto = None
77
73
 
78
74
 
79
- def replace_flow_context(flow_cls):
80
- """
81
- Replace the current flow context with a new flow class. This is used
82
- when we change the current flow class after having run user configuration functions
83
- """
84
- current_flow.flow_cls_stack = current_flow.flow_cls_stack[1:]
85
- current_flow.flow_cls_stack.insert(0, flow_cls)
86
- current_flow.flow_cls = current_flow.flow_cls_stack[0]
87
-
88
-
89
75
  class JSONTypeClass(click.ParamType):
90
76
  name = "JSON"
91
77
 
@@ -224,18 +210,12 @@ class DeployTimeField(object):
224
210
  def deploy_time_eval(value):
225
211
  if isinstance(value, DeployTimeField):
226
212
  return value(deploy_time=True)
227
- elif isinstance(value, DelayedEvaluationParameter):
228
- return value(return_str=True)
229
213
  else:
230
214
  return value
231
215
 
232
216
 
233
217
  # this is called by cli.main
234
- def set_parameter_context(flow_name, echo, datastore, configs):
235
- from .user_configs.config_parameters import (
236
- ConfigValue,
237
- ) # Prevent circular dependency
238
-
218
+ def set_parameter_context(flow_name, echo, datastore):
239
219
  global context_proto
240
220
  context_proto = ParameterContext(
241
221
  flow_name=flow_name,
@@ -243,7 +223,6 @@ def set_parameter_context(flow_name, echo, datastore, configs):
243
223
  parameter_name=None,
244
224
  logger=echo,
245
225
  ds_type=datastore.TYPE,
246
- configs=ConfigValue(dict(configs)),
247
226
  )
248
227
 
249
228
 
@@ -300,11 +279,7 @@ class Parameter(object):
300
279
  ----------
301
280
  name : str
302
281
  User-visible parameter name.
303
- default : Union[str, float, int, bool, Dict[str, Any],
304
- Callable[
305
- [ParameterContext], Union[str, float, int, bool, Dict[str, Any]]
306
- ],
307
- ], optional, default None
282
+ default : str or float or int or bool or `JSONType` or a function.
308
283
  Default value for the parameter. Use a special `JSONType` class to
309
284
  indicate that the value must be a valid JSON object. A function
310
285
  implies that the parameter corresponds to a *deploy-time parameter*.
@@ -313,19 +288,15 @@ class Parameter(object):
313
288
  If `default` is not specified, define the parameter type. Specify
314
289
  one of `str`, `float`, `int`, `bool`, or `JSONType`. If None, defaults
315
290
  to the type of `default` or `str` if none specified.
316
- help : str, optional, default None
291
+ help : str, optional
317
292
  Help text to show in `run --help`.
318
- required : bool, optional, default None
319
- Require that the user specified a value for the parameter. Note that if
320
- a default is provide, the required flag is ignored.
321
- A value of None is equivalent to False.
322
- show_default : bool, optional, default None
323
- If True, show the default value in the help text. A value of None is equivalent
324
- to True.
293
+ required : bool, default False
294
+ Require that the user specified a value for the parameter.
295
+ `required=True` implies that the `default` is not used.
296
+ show_default : bool, default True
297
+ If True, show the default value in the help text.
325
298
  """
326
299
 
327
- IS_CONFIG_PARAMETER = False
328
-
329
300
  def __init__(
330
301
  self,
331
302
  name: str,
@@ -336,53 +307,31 @@ class Parameter(object):
336
307
  int,
337
308
  bool,
338
309
  Dict[str, Any],
339
- Callable[
340
- [ParameterContext], Union[str, float, int, bool, Dict[str, Any]]
341
- ],
310
+ Callable[[], Union[str, float, int, bool, Dict[str, Any]]],
342
311
  ]
343
312
  ] = None,
344
313
  type: Optional[
345
314
  Union[Type[str], Type[float], Type[int], Type[bool], JSONTypeClass]
346
315
  ] = None,
347
316
  help: Optional[str] = None,
348
- required: Optional[bool] = None,
349
- show_default: Optional[bool] = None,
317
+ required: bool = False,
318
+ show_default: bool = True,
350
319
  **kwargs: Dict[str, Any]
351
320
  ):
352
321
  self.name = name
353
322
  self.kwargs = kwargs
354
- self._override_kwargs = {
323
+ for k, v in {
355
324
  "default": default,
356
325
  "type": type,
357
326
  "help": help,
358
327
  "required": required,
359
328
  "show_default": show_default,
360
- }
361
-
362
- def init(self):
363
- # Prevent circular import
364
- from .user_configs.config_parameters import (
365
- resolve_delayed_evaluator,
366
- unpack_delayed_evaluator,
367
- )
368
-
369
- # Resolve any value from configurations
370
- self.kwargs = unpack_delayed_evaluator(self.kwargs)
371
- self.kwargs = resolve_delayed_evaluator(self.kwargs)
372
-
373
- # This was the behavior before configs: values specified in args would override
374
- # stuff in kwargs which is what we implement here as well
375
- for key, value in self._override_kwargs.items():
376
- if value is not None:
377
- self.kwargs[key] = value
378
- # Set two default values if no-one specified them
379
- self.kwargs.setdefault("required", False)
380
- self.kwargs.setdefault("show_default", True)
381
-
382
- # Continue processing kwargs free of any configuration values :)
329
+ }.items():
330
+ if v is not None:
331
+ self.kwargs[k] = v
383
332
 
384
333
  # TODO: check that the type is one of the supported types
385
- param_type = self.kwargs["type"] = self._get_type(self.kwargs)
334
+ param_type = self.kwargs["type"] = self._get_type(kwargs)
386
335
 
387
336
  reserved_params = [
388
337
  "params",
@@ -407,27 +356,23 @@ class Parameter(object):
407
356
  raise MetaflowException(
408
357
  "Parameter name '%s' is a reserved "
409
358
  "word. Please use a different "
410
- "name for your parameter." % (self.name)
359
+ "name for your parameter." % (name)
411
360
  )
412
361
 
413
362
  # make sure the user is not trying to pass a function in one of the
414
363
  # fields that don't support function-values yet
415
364
  for field in ("show_default", "separator", "required"):
416
- if callable(self.kwargs.get(field)):
365
+ if callable(kwargs.get(field)):
417
366
  raise MetaflowException(
418
367
  "Parameter *%s*: Field '%s' cannot "
419
- "have a function as its value" % (self.name, field)
368
+ "have a function as its value" % (name, field)
420
369
  )
421
370
 
422
371
  # default can be defined as a function
423
372
  default_field = self.kwargs.get("default")
424
373
  if callable(default_field) and not isinstance(default_field, DeployTimeField):
425
374
  self.kwargs["default"] = DeployTimeField(
426
- self.name,
427
- param_type,
428
- "default",
429
- self.kwargs["default"],
430
- return_str=True,
375
+ name, param_type, "default", self.kwargs["default"], return_str=True
431
376
  )
432
377
 
433
378
  # note that separator doesn't work with DeployTimeFields unless you
@@ -436,7 +381,7 @@ class Parameter(object):
436
381
  if self.separator and not self.is_string_type:
437
382
  raise MetaflowException(
438
383
  "Parameter *%s*: Separator is only allowed "
439
- "for string parameters." % self.name
384
+ "for string parameters." % name
440
385
  )
441
386
 
442
387
  def __repr__(self):
@@ -492,9 +437,7 @@ def add_custom_parameters(deploy_mode=False):
492
437
  flow_cls = getattr(current_flow, "flow_cls", None)
493
438
  if flow_cls is None:
494
439
  return cmd
495
- parameters = [
496
- p for _, p in flow_cls._get_parameters() if not p.IS_CONFIG_PARAMETER
497
- ]
440
+ parameters = [p for _, p in flow_cls._get_parameters()]
498
441
  for arg in parameters[::-1]:
499
442
  kwargs = arg.option_kwargs(deploy_mode)
500
443
  cmd.params.insert(0, click.Option(("--" + arg.name,), **kwargs))
@@ -164,10 +164,6 @@ def get_plugin_cli():
164
164
  return resolve_plugins("cli")
165
165
 
166
166
 
167
- def get_plugin_cli_path():
168
- return resolve_plugins("cli", path_only=True)
169
-
170
-
171
167
  STEP_DECORATORS = resolve_plugins("step_decorator")
172
168
  FLOW_DECORATORS = resolve_plugins("flow_decorator")
173
169
  ENVIRONMENTS = resolve_plugins("environment")
@@ -283,7 +283,6 @@ def make_flow(
283
283
  ):
284
284
  # Attach @kubernetes.
285
285
  decorators._attach_decorators(obj.flow, [KubernetesDecorator.name])
286
- decorators._init(obj.flow)
287
286
 
288
287
  decorators._init_step_decorators(
289
288
  obj.flow, obj.graph, obj.environment, obj.flow_datastore, obj.logger
@@ -61,7 +61,6 @@ from metaflow.plugins.kubernetes.kubernetes import (
61
61
  )
62
62
  from metaflow.plugins.kubernetes.kubernetes_jobsets import KubernetesArgoJobSet
63
63
  from metaflow.unbounded_foreach import UBF_CONTROL, UBF_TASK
64
- from metaflow.user_configs.config_options import ConfigInput
65
64
  from metaflow.util import (
66
65
  compress_list,
67
66
  dict_to_cli_options,
@@ -170,7 +169,6 @@ class ArgoWorkflows(object):
170
169
  self.enable_heartbeat_daemon = enable_heartbeat_daemon
171
170
  self.enable_error_msg_capture = enable_error_msg_capture
172
171
  self.parameters = self._process_parameters()
173
- self.config_parameters = self._process_config_parameters()
174
172
  self.triggers, self.trigger_options = self._process_triggers()
175
173
  self._schedule, self._timezone = self._get_schedule()
176
174
 
@@ -458,10 +456,6 @@ class ArgoWorkflows(object):
458
456
  "case-insensitive." % param.name
459
457
  )
460
458
  seen.add(norm)
461
- # NOTE: We skip config parameters as these do not have dynamic values,
462
- # and need to be treated differently.
463
- if param.IS_CONFIG_PARAMETER:
464
- continue
465
459
 
466
460
  extra_attrs = {}
467
461
  if param.kwargs.get("type") == JSONType:
@@ -495,7 +489,6 @@ class ArgoWorkflows(object):
495
489
  # execution - which needs to be fixed imminently.
496
490
  if not is_required or default_value is not None:
497
491
  default_value = json.dumps(default_value)
498
-
499
492
  parameters[param.name] = dict(
500
493
  name=param.name,
501
494
  value=default_value,
@@ -506,27 +499,6 @@ class ArgoWorkflows(object):
506
499
  )
507
500
  return parameters
508
501
 
509
- def _process_config_parameters(self):
510
- parameters = []
511
- seen = set()
512
- for var, param in self.flow._get_parameters():
513
- if not param.IS_CONFIG_PARAMETER:
514
- continue
515
- # Throw an exception if the parameter is specified twice.
516
- norm = param.name.lower()
517
- if norm in seen:
518
- raise MetaflowException(
519
- "Parameter *%s* is specified twice. "
520
- "Note that parameter names are "
521
- "case-insensitive." % param.name
522
- )
523
- seen.add(norm)
524
-
525
- parameters.append(
526
- dict(name=param.name, kv_name=ConfigInput.make_key_name(param.name))
527
- )
528
- return parameters
529
-
530
502
  def _process_triggers(self):
531
503
  # Impute triggers for Argo Workflow Template specified through @trigger and
532
504
  # @trigger_on_finish decorators
@@ -549,13 +521,8 @@ class ArgoWorkflows(object):
549
521
  # convert them to lower case since Metaflow parameters are case
550
522
  # insensitive.
551
523
  seen = set()
552
- # NOTE: We skip config parameters as their values can not be set through event payloads
553
524
  params = set(
554
- [
555
- param.name.lower()
556
- for var, param in self.flow._get_parameters()
557
- if not param.IS_CONFIG_PARAMETER
558
- ]
525
+ [param.name.lower() for var, param in self.flow._get_parameters()]
559
526
  )
560
527
  trigger_deco = self.flow._flow_decorators.get("trigger")[0]
561
528
  trigger_deco.format_deploytime_value()
@@ -1754,13 +1721,6 @@ class ArgoWorkflows(object):
1754
1721
  metaflow_version["production_token"] = self.production_token
1755
1722
  env["METAFLOW_VERSION"] = json.dumps(metaflow_version)
1756
1723
 
1757
- # map config values
1758
- cfg_env = {
1759
- param["name"]: param["kv_name"] for param in self.config_parameters
1760
- }
1761
- if cfg_env:
1762
- env["METAFLOW_FLOW_CONFIG_VALUE"] = json.dumps(cfg_env)
1763
-
1764
1724
  # Set the template inputs and outputs for passing state. Very simply,
1765
1725
  # the container template takes in input-paths as input and outputs
1766
1726
  # the task-id (which feeds in as input-paths to the subsequent task).
@@ -470,7 +470,6 @@ def make_flow(
470
470
  decorators._attach_decorators(
471
471
  obj.flow, [KubernetesDecorator.name, EnvironmentDecorator.name]
472
472
  )
473
- decorators._init(obj.flow)
474
473
 
475
474
  decorators._init_step_decorators(
476
475
  obj.flow, obj.graph, obj.environment, obj.flow_datastore, obj.logger
@@ -97,7 +97,6 @@ class ArgoWorkflowsTriggeredRun(TriggeredRun):
97
97
  )
98
98
 
99
99
  command_obj = self.deployer.spm.get(pid)
100
- command_obj.sync_wait()
101
100
  return command_obj.process.returncode == 0
102
101
 
103
102
  def unsuspend(self, **kwargs) -> bool:
@@ -132,7 +131,6 @@ class ArgoWorkflowsTriggeredRun(TriggeredRun):
132
131
  )
133
132
 
134
133
  command_obj = self.deployer.spm.get(pid)
135
- command_obj.sync_wait()
136
134
  return command_obj.process.returncode == 0
137
135
 
138
136
  def terminate(self, **kwargs) -> bool:
@@ -167,7 +165,6 @@ class ArgoWorkflowsTriggeredRun(TriggeredRun):
167
165
  )
168
166
 
169
167
  command_obj = self.deployer.spm.get(pid)
170
- command_obj.sync_wait()
171
168
  return command_obj.process.returncode == 0
172
169
 
173
170
  @property
@@ -322,7 +319,6 @@ class ArgoWorkflowsDeployedFlow(DeployedFlow):
322
319
  )
323
320
 
324
321
  command_obj = self.deployer.spm.get(pid)
325
- command_obj.sync_wait()
326
322
  return command_obj.process.returncode == 0
327
323
 
328
324
  def trigger(self, **kwargs) -> ArgoWorkflowsTriggeredRun:
@@ -365,7 +361,7 @@ class ArgoWorkflowsDeployedFlow(DeployedFlow):
365
361
  content = handle_timeout(
366
362
  attribute_file_fd, command_obj, self.deployer.file_read_timeout
367
363
  )
368
- command_obj.sync_wait()
364
+
369
365
  if command_obj.process.returncode == 0:
370
366
  return ArgoWorkflowsTriggeredRun(
371
367
  deployer=self.deployer, content=content
@@ -138,8 +138,8 @@ class BatchDecorator(StepDecorator):
138
138
  supports_conda_environment = True
139
139
  target_platform = "linux-64"
140
140
 
141
- def init(self):
142
- super(BatchDecorator, self).init()
141
+ def __init__(self, attributes=None, statically_defined=False):
142
+ super(BatchDecorator, self).__init__(attributes, statically_defined)
143
143
 
144
144
  # If no docker image is explicitly specified, impute a default image.
145
145
  if not self.attributes["image"]:
@@ -18,7 +18,6 @@ from metaflow.metaflow_config import (
18
18
  SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH,
19
19
  )
20
20
  from metaflow.parameters import deploy_time_eval
21
- from metaflow.user_configs.config_options import ConfigInput
22
21
  from metaflow.util import dict_to_cli_options, to_pascalcase
23
22
 
24
23
  from ..batch.batch import Batch
@@ -72,7 +71,6 @@ class StepFunctions(object):
72
71
  self.username = username
73
72
  self.max_workers = max_workers
74
73
  self.workflow_timeout = workflow_timeout
75
- self.config_parameters = self._process_config_parameters()
76
74
 
77
75
  # https://aws.amazon.com/blogs/aws/step-functions-distributed-map-a-serverless-solution-for-large-scale-parallel-data-processing/
78
76
  self.use_distributed_map = use_distributed_map
@@ -487,10 +485,6 @@ class StepFunctions(object):
487
485
  "case-insensitive." % param.name
488
486
  )
489
487
  seen.add(norm)
490
- # NOTE: We skip config parameters as these do not have dynamic values,
491
- # and need to be treated differently.
492
- if param.IS_CONFIG_PARAMETER:
493
- continue
494
488
 
495
489
  is_required = param.kwargs.get("required", False)
496
490
  # Throw an exception if a schedule is set for a flow with required
@@ -507,27 +501,6 @@ class StepFunctions(object):
507
501
  parameters.append(dict(name=param.name, value=value))
508
502
  return parameters
509
503
 
510
- def _process_config_parameters(self):
511
- parameters = []
512
- seen = set()
513
- for var, param in self.flow._get_parameters():
514
- if not param.IS_CONFIG_PARAMETER:
515
- continue
516
- # Throw an exception if the parameter is specified twice.
517
- norm = param.name.lower()
518
- if norm in seen:
519
- raise MetaflowException(
520
- "Parameter *%s* is specified twice. "
521
- "Note that parameter names are "
522
- "case-insensitive." % param.name
523
- )
524
- seen.add(norm)
525
-
526
- parameters.append(
527
- dict(name=param.name, kv_name=ConfigInput.make_key_name(param.name))
528
- )
529
- return parameters
530
-
531
504
  def _batch(self, node):
532
505
  attrs = {
533
506
  # metaflow.user is only used for setting the AWS Job Name.
@@ -774,11 +747,6 @@ class StepFunctions(object):
774
747
  metaflow_version["production_token"] = self.production_token
775
748
  env["METAFLOW_VERSION"] = json.dumps(metaflow_version)
776
749
 
777
- # map config values
778
- cfg_env = {param["name"]: param["kv_name"] for param in self.config_parameters}
779
- if cfg_env:
780
- env["METAFLOW_FLOW_CONFIG_VALUE"] = json.dumps(cfg_env)
781
-
782
750
  # Set AWS DynamoDb Table Name for state tracking for for-eaches.
783
751
  # There are three instances when metaflow runtime directly interacts
784
752
  # with AWS DynamoDB.
@@ -326,7 +326,6 @@ def make_flow(
326
326
 
327
327
  # Attach AWS Batch decorator to the flow
328
328
  decorators._attach_decorators(obj.flow, [BatchDecorator.name])
329
- decorators._init(obj.flow)
330
329
  decorators._init_step_decorators(
331
330
  obj.flow, obj.graph, obj.environment, obj.flow_datastore, obj.logger
332
331
  )
@@ -46,7 +46,6 @@ class StepFunctionsTriggeredRun(TriggeredRun):
46
46
  )
47
47
 
48
48
  command_obj = self.deployer.spm.get(pid)
49
- command_obj.sync_wait()
50
49
  return command_obj.process.returncode == 0
51
50
 
52
51
 
@@ -175,7 +174,6 @@ class StepFunctionsDeployedFlow(DeployedFlow):
175
174
  )
176
175
 
177
176
  command_obj = self.deployer.spm.get(pid)
178
- command_obj.sync_wait()
179
177
  return command_obj.process.returncode == 0
180
178
 
181
179
  def trigger(self, **kwargs) -> StepFunctionsTriggeredRun:
@@ -219,7 +217,6 @@ class StepFunctionsDeployedFlow(DeployedFlow):
219
217
  attribute_file_fd, command_obj, self.deployer.file_read_timeout
220
218
  )
221
219
 
222
- command_obj.sync_wait()
223
220
  if command_obj.process.returncode == 0:
224
221
  return StepFunctionsTriggeredRun(
225
222
  deployer=self.deployer, content=content
@@ -722,8 +722,8 @@ def cli():
722
722
  pass
723
723
 
724
724
 
725
- @cli.command("list", help="List S3 objects")
726
725
  @tracing.cli_entrypoint("s3op/list")
726
+ @cli.command("list", help="List S3 objects")
727
727
  @click.option(
728
728
  "--recursive/--no-recursive",
729
729
  default=False,
@@ -782,8 +782,8 @@ def lst(
782
782
  print(format_result_line(idx, url.prefix, url.url, str(size)))
783
783
 
784
784
 
785
- @cli.command(help="Upload files to S3")
786
785
  @tracing.cli_entrypoint("s3op/put")
786
+ @cli.command(help="Upload files to S3")
787
787
  @click.option(
788
788
  "--file",
789
789
  "files",
@@ -977,8 +977,8 @@ def _populate_prefixes(prefixes, inputs):
977
977
  return prefixes, is_transient_retry
978
978
 
979
979
 
980
- @cli.command(help="Download files from S3")
981
980
  @tracing.cli_entrypoint("s3op/get")
981
+ @cli.command(help="Download files from S3")
982
982
  @click.option(
983
983
  "--recursive/--no-recursive",
984
984
  default=False,
@@ -33,12 +33,12 @@ def kubernetes():
33
33
  pass
34
34
 
35
35
 
36
+ @tracing.cli_entrypoint("kubernetes/step")
36
37
  @kubernetes.command(
37
38
  help="Execute a single task on Kubernetes. This command calls the top-level step "
38
39
  "command inside a Kubernetes pod with the given options. Typically you do not call "
39
40
  "this command directly; it is used internally by Metaflow."
40
41
  )
41
- @tracing.cli_entrypoint("kubernetes/step")
42
42
  @click.argument("step-name")
43
43
  @click.argument("code-package-sha")
44
44
  @click.argument("code-package-url")
@@ -153,8 +153,8 @@ class KubernetesDecorator(StepDecorator):
153
153
  supports_conda_environment = True
154
154
  target_platform = "linux-64"
155
155
 
156
- def init(self):
157
- super(KubernetesDecorator, self).init()
156
+ def __init__(self, attributes=None, statically_defined=False):
157
+ super(KubernetesDecorator, self).__init__(attributes, statically_defined)
158
158
 
159
159
  if not self.attributes["namespace"]:
160
160
  self.attributes["namespace"] = KUBERNETES_NAMESPACE
@@ -50,26 +50,20 @@ class CondaStepDecorator(StepDecorator):
50
50
  # conda channels, users can specify channel::package as the package name.
51
51
 
52
52
  def __init__(self, attributes=None, statically_defined=False):
53
- self._attributes_with_user_values = (
54
- set(attributes.keys()) if attributes is not None else set()
53
+ self._user_defined_attributes = (
54
+ attributes.copy() if attributes is not None else {}
55
55
  )
56
-
57
56
  super(CondaStepDecorator, self).__init__(attributes, statically_defined)
58
57
 
59
- def init(self):
60
- super(CondaStepDecorator, self).init()
61
-
62
58
  # Support legacy 'libraries=' attribute for the decorator.
63
59
  self.attributes["packages"] = {
64
60
  **self.attributes["libraries"],
65
61
  **self.attributes["packages"],
66
62
  }
67
63
  del self.attributes["libraries"]
68
- if self.attributes["packages"]:
69
- self._attributes_with_user_values.add("packages")
70
64
 
71
65
  def is_attribute_user_defined(self, name):
72
- return name in self._attributes_with_user_values
66
+ return name in self._user_defined_attributes
73
67
 
74
68
  def step_init(self, flow, graph, step, decos, environment, flow_datastore, logger):
75
69
  # The init_environment hook for Environment creates the relevant virtual
@@ -89,10 +83,10 @@ class CondaStepDecorator(StepDecorator):
89
83
  **super_attributes["packages"],
90
84
  **self.attributes["packages"],
91
85
  }
92
- self._attributes_with_user_values.update(
93
- conda_base._attributes_with_user_values
94
- )
95
-
86
+ self._user_defined_attributes = {
87
+ **self._user_defined_attributes,
88
+ **conda_base._user_defined_attributes,
89
+ }
96
90
  self.attributes["python"] = (
97
91
  self.attributes["python"] or super_attributes["python"]
98
92
  )
@@ -339,15 +333,11 @@ class CondaFlowDecorator(FlowDecorator):
339
333
  }
340
334
 
341
335
  def __init__(self, attributes=None, statically_defined=False):
342
- self._attributes_with_user_values = (
343
- set(attributes.keys()) if attributes is not None else set()
336
+ self._user_defined_attributes = (
337
+ attributes.copy() if attributes is not None else {}
344
338
  )
345
-
346
339
  super(CondaFlowDecorator, self).__init__(attributes, statically_defined)
347
340
 
348
- def init(self):
349
- super(CondaFlowDecorator, self).init()
350
-
351
341
  # Support legacy 'libraries=' attribute for the decorator.
352
342
  self.attributes["packages"] = {
353
343
  **self.attributes["libraries"],
@@ -358,7 +348,7 @@ class CondaFlowDecorator(FlowDecorator):
358
348
  self.attributes["python"] = str(self.attributes["python"])
359
349
 
360
350
  def is_attribute_user_defined(self, name):
361
- return name in self._attributes_with_user_values
351
+ return name in self._user_defined_attributes
362
352
 
363
353
  def flow_init(
364
354
  self, flow, graph, environment, flow_datastore, metadata, logger, echo, options