ob-metaflow 2.17.3.1__py2.py3-none-any.whl → 2.18.0.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

@@ -60,7 +60,7 @@ def list(obj, archive=False):
60
60
  @click.pass_obj
61
61
  def save(obj, path):
62
62
  with open(path, "wb") as f:
63
- f.write(obj.package.blob())
63
+ f.write(obj.package.blob)
64
64
  obj.echo(
65
65
  "Code package saved in *%s* with metadata: %s"
66
66
  % (path, obj.package.package_metadata),
metaflow/runtime.py CHANGED
@@ -15,11 +15,13 @@ import tempfile
15
15
  import time
16
16
  import subprocess
17
17
  from datetime import datetime
18
+ from enum import Enum
18
19
  from io import BytesIO
19
20
  from itertools import chain
20
21
  from functools import partial
21
22
  from concurrent import futures
22
23
 
24
+ from typing import Dict, Tuple
23
25
  from metaflow.datastore.exceptions import DataException
24
26
  from contextlib import contextmanager
25
27
 
@@ -60,6 +62,7 @@ PROGRESS_INTERVAL = 300 # s
60
62
  # leveraging the TaskDataStoreSet.
61
63
  PREFETCH_DATA_ARTIFACTS = [
62
64
  "_foreach_stack",
65
+ "_iteration_stack",
63
66
  "_task_ok",
64
67
  "_transition",
65
68
  "_control_mapper_tasks",
@@ -67,6 +70,14 @@ PREFETCH_DATA_ARTIFACTS = [
67
70
  ]
68
71
  RESUME_POLL_SECONDS = 60
69
72
 
73
+
74
+ class LoopBehavior(Enum):
75
+ NONE = "none"
76
+ ENTERING = "entering"
77
+ EXITING = "exiting"
78
+ LOOPING = "looping"
79
+
80
+
70
81
  # Runtime must use logsource=RUNTIME_LOG_SOURCE for all loglines that it
71
82
  # formats according to mflog. See a comment in mflog.__init__
72
83
  mflog_msg = partial(mflog.decorate, RUNTIME_LOG_SOURCE)
@@ -290,6 +301,7 @@ class NativeRuntime(object):
290
301
  pathspec_index,
291
302
  cloned_task_pathspec_index,
292
303
  finished_tuple,
304
+ iteration_tuple,
293
305
  ubf_context,
294
306
  generate_task_obj,
295
307
  verbose=False,
@@ -334,7 +346,7 @@ class NativeRuntime(object):
334
346
  self._metadata,
335
347
  origin_ds_set=self._origin_ds_set,
336
348
  )
337
- self._finished[(step_name, finished_tuple)] = task_pathspec
349
+ self._finished[(step_name, finished_tuple, iteration_tuple)] = task_pathspec
338
350
  self._is_cloned[task_pathspec] = True
339
351
  except Exception as e:
340
352
  self._logger(
@@ -415,6 +427,7 @@ class NativeRuntime(object):
415
427
  finished_tuple = tuple(
416
428
  [s._replace(value=0) for s in task_ds.get("_foreach_stack", ())]
417
429
  )
430
+ iteration_tuple = tuple(task_ds.get("_iteration_stack", ()))
418
431
  cloned_task_pathspec_index = pathspec_index.split("/")[1]
419
432
  if task_ds.get("_control_task_is_mapper_zero", False):
420
433
  # Replace None with index 0 for control task as it is part of the
@@ -440,6 +453,7 @@ class NativeRuntime(object):
440
453
  pathspec_index,
441
454
  cloned_task_pathspec_index,
442
455
  finished_tuple,
456
+ iteration_tuple,
443
457
  is_ubf_mapper_task,
444
458
  ubf_context,
445
459
  )
@@ -454,6 +468,7 @@ class NativeRuntime(object):
454
468
  pathspec_index,
455
469
  cloned_task_pathspec_index,
456
470
  finished_tuple,
471
+ iteration_tuple,
457
472
  ubf_context=ubf_context,
458
473
  generate_task_obj=generate_task_obj and (not is_ubf_mapper_task),
459
474
  verbose=verbose,
@@ -464,6 +479,7 @@ class NativeRuntime(object):
464
479
  pathspec_index,
465
480
  cloned_task_pathspec_index,
466
481
  finished_tuple,
482
+ iteration_tuple,
467
483
  is_ubf_mapper_task,
468
484
  ubf_context,
469
485
  ) in inputs
@@ -484,6 +500,7 @@ class NativeRuntime(object):
484
500
  self._queue_push("start", {"input_paths": [self._params_task.path]})
485
501
  else:
486
502
  self._queue_push("start", {})
503
+
487
504
  progress_tstamp = time.time()
488
505
  with tempfile.NamedTemporaryFile(mode="w", encoding="utf-8") as config_file:
489
506
  # Configurations are passed through a file to avoid overloading the
@@ -504,7 +521,74 @@ class NativeRuntime(object):
504
521
  ):
505
522
  # 1. are any of the current workers finished?
506
523
  if self._cloned_tasks:
507
- finished_tasks = self._cloned_tasks
524
+ finished_tasks = []
525
+
526
+ # For loops (right now just recursive steps), we need to find
527
+ # the exact frontier because if we queue all "successors" to all
528
+ # the finished iterations, we would incorrectly launch multiple
529
+ # successors. We therefore have to strip out all non-last
530
+ # iterations *per* foreach branch.
531
+ idx_per_finished_id = (
532
+ {}
533
+ ) # type: Dict[Tuple[str, Tuple[int, ...], Tuple[int, Tuple[int, ...]]]]
534
+ for task in self._cloned_tasks:
535
+ step_name, foreach_stack, iteration_stack = task.finished_id
536
+ existing_task_idx = idx_per_finished_id.get(
537
+ (step_name, foreach_stack), None
538
+ )
539
+ if existing_task_idx is not None:
540
+ len_diff = len(iteration_stack) - len(
541
+ existing_task_idx[1]
542
+ )
543
+ # In this case, we need to keep only the latest iteration
544
+ if (
545
+ len_diff == 0
546
+ and iteration_stack > existing_task_idx[1]
547
+ ) or len_diff == -1:
548
+ # We remove the one we currently have and replace
549
+ # by this one. The second option means that we are
550
+ # adding the finished iteration marker.
551
+ existing_task = finished_tasks[existing_task_idx[0]]
552
+ # These are the first two lines of _queue_tasks
553
+ # We still consider the tasks finished so we need
554
+ # to update state to be clean.
555
+ self._finished[existing_task.finished_id] = (
556
+ existing_task.path
557
+ )
558
+ self._is_cloned[existing_task.path] = (
559
+ existing_task.is_cloned
560
+ )
561
+
562
+ finished_tasks[existing_task_idx[0]] = task
563
+ idx_per_finished_id[(step_name, foreach_stack)] = (
564
+ existing_task_idx[0],
565
+ iteration_stack,
566
+ )
567
+ elif (
568
+ len_diff == 0
569
+ and iteration_stack < existing_task_idx[1]
570
+ ) or len_diff == 1:
571
+ # The second option is when we have already marked
572
+ # the end of the iteration in self._finished and
573
+ # are now seeing a previous iteration.
574
+ # We just mark the task as finished but we don't
575
+ # put it in the finished_tasks list to pass to
576
+ # the _queue_tasks function
577
+ self._finished[task.finished_id] = task.path
578
+ self._is_cloned[task.path] = task.is_cloned
579
+ else:
580
+ raise MetaflowInternalError(
581
+ "Unexpected recursive cloned tasks -- "
582
+ "this is a bug, please report it."
583
+ )
584
+ else:
585
+ # New entry
586
+ finished_tasks.append(task)
587
+ idx_per_finished_id[(step_name, foreach_stack)] = (
588
+ len(finished_tasks) - 1,
589
+ iteration_stack,
590
+ )
591
+
508
592
  # reset the list of cloned tasks and let poll_workers handle
509
593
  # the remaining transition
510
594
  self._cloned_tasks = []
@@ -578,7 +662,7 @@ class NativeRuntime(object):
578
662
  self._run_exit_hooks()
579
663
 
580
664
  # assert that end was executed and it was successful
581
- if ("end", ()) in self._finished:
665
+ if ("end", (), ()) in self._finished:
582
666
  if self._run_url:
583
667
  self._logger(
584
668
  "Done! See the run in the UI at %s" % self._run_url,
@@ -604,7 +688,7 @@ class NativeRuntime(object):
604
688
  if not exit_hook_decos:
605
689
  return
606
690
 
607
- successful = ("end", ()) in self._finished or self._clone_only
691
+ successful = ("end", (), ()) in self._finished or self._clone_only
608
692
  pathspec = f"{self._graph.name}/{self._run_id}"
609
693
  flow_file = self._environment.get_environment_info()["script"]
610
694
 
@@ -672,29 +756,60 @@ class NativeRuntime(object):
672
756
 
673
757
  # Given the current task information (task_index), the type of transition,
674
758
  # and the split index, return the new task index.
675
- def _translate_index(self, task, next_step, type, split_index=None):
676
- match = re.match(r"^(.+)\[(.*)\]$", task.task_index)
759
+ def _translate_index(
760
+ self, task, next_step, type, split_index=None, loop_mode=LoopBehavior.NONE
761
+ ):
762
+ match = re.match(r"^(.+)\[(.*)\]\[(.*)\]$", task.task_index)
677
763
  if match:
678
- _, foreach_index = match.groups()
764
+ _, foreach_index, iteration_index = match.groups()
679
765
  # Convert foreach_index to a list of integers
680
766
  if len(foreach_index) > 0:
681
767
  foreach_index = foreach_index.split(",")
682
768
  else:
683
769
  foreach_index = []
770
+ # Ditto for iteration_index
771
+ if len(iteration_index) > 0:
772
+ iteration_index = iteration_index.split(",")
773
+ else:
774
+ iteration_index = []
684
775
  else:
685
776
  raise ValueError(
686
- "Index not in the format of {run_id}/{step_name}[{foreach_index}]"
777
+ "Index not in the format of {run_id}/{step_name}[{foreach_index}][{iteration_index}]"
687
778
  )
779
+ if loop_mode == LoopBehavior.NONE:
780
+ # Check if we are entering a looping construct. Right now, only recursive
781
+ # steps are looping constructs
782
+ next_step_node = self._graph[next_step]
783
+ if (
784
+ next_step_node.type == "split-switch"
785
+ and next_step in next_step_node.out_funcs
786
+ ):
787
+ loop_mode = LoopBehavior.ENTERING
788
+
789
+ # Update iteration_index
790
+ if loop_mode == LoopBehavior.ENTERING:
791
+ # We are entering a loop, so we add a new iteration level
792
+ iteration_index.append("0")
793
+ elif loop_mode == LoopBehavior.EXITING:
794
+ iteration_index = iteration_index[:-1]
795
+ elif loop_mode == LoopBehavior.LOOPING:
796
+ if len(iteration_index) == 0:
797
+ raise MetaflowInternalError(
798
+ "In looping mode but there is no iteration index"
799
+ )
800
+ iteration_index[-1] = str(int(iteration_index[-1]) + 1)
801
+ iteration_index = ",".join(iteration_index)
802
+
688
803
  if type == "linear":
689
- return "%s[%s]" % (next_step, ",".join(foreach_index))
804
+ return "%s[%s][%s]" % (next_step, ",".join(foreach_index), iteration_index)
690
805
  elif type == "join":
691
806
  indices = []
692
807
  if len(foreach_index) > 0:
693
808
  indices = foreach_index[:-1]
694
- return "%s[%s]" % (next_step, ",".join(indices))
809
+ return "%s[%s][%s]" % (next_step, ",".join(indices), iteration_index)
695
810
  elif type == "split":
696
811
  foreach_index.append(str(split_index))
697
- return "%s[%s]" % (next_step, ",".join(foreach_index))
812
+ return "%s[%s][%s]" % (next_step, ",".join(foreach_index), iteration_index)
698
813
 
699
814
  # Store the parameters needed for task creation, so that pushing on items
700
815
  # onto the run_queue is an inexpensive operation.
@@ -778,17 +893,19 @@ class NativeRuntime(object):
778
893
  # tasks is incorrect and contains the pathspec of the *cloned* run
779
894
  # but we don't use it for anything. We could look to clean it up though
780
895
  if not task.is_cloned:
781
- _, foreach_stack = task.finished_id
896
+ _, foreach_stack, iteration_stack = task.finished_id
782
897
  top = foreach_stack[-1]
783
898
  bottom = list(foreach_stack[:-1])
784
899
  for i in range(num_splits):
785
900
  s = tuple(bottom + [top._replace(index=i)])
786
- self._finished[(task.step, s)] = mapper_tasks[i]
901
+ self._finished[(task.step, s, iteration_stack)] = mapper_tasks[
902
+ i
903
+ ]
787
904
  self._is_cloned[mapper_tasks[i]] = False
788
905
 
789
906
  # Find and check status of control task and retrieve its pathspec
790
907
  # for retrieving unbounded foreach cardinality.
791
- _, foreach_stack = task.finished_id
908
+ _, foreach_stack, iteration_stack = task.finished_id
792
909
  top = foreach_stack[-1]
793
910
  bottom = list(foreach_stack[:-1])
794
911
  s = tuple(bottom + [top._replace(index=None)])
@@ -797,7 +914,7 @@ class NativeRuntime(object):
797
914
  # it will have index=0 instead of index=None.
798
915
  if task.results.get("_control_task_is_mapper_zero", False):
799
916
  s = tuple(bottom + [top._replace(index=0)])
800
- control_path = self._finished.get((task.step, s))
917
+ control_path = self._finished.get((task.step, s, iteration_stack))
801
918
  if control_path:
802
919
  # Control task was successful.
803
920
  # Additionally check the state of (sibling) mapper tasks as well
@@ -806,7 +923,9 @@ class NativeRuntime(object):
806
923
  required_tasks = []
807
924
  for i in range(num_splits):
808
925
  s = tuple(bottom + [top._replace(index=i)])
809
- required_tasks.append(self._finished.get((task.step, s)))
926
+ required_tasks.append(
927
+ self._finished.get((task.step, s, iteration_stack))
928
+ )
810
929
 
811
930
  if all(required_tasks):
812
931
  index = self._translate_index(task, next_step, "join")
@@ -819,7 +938,7 @@ class NativeRuntime(object):
819
938
  else:
820
939
  # matching_split is the split-parent of the finished task
821
940
  matching_split = self._graph[self._graph[next_step].split_parents[-1]]
822
- _, foreach_stack = task.finished_id
941
+ _, foreach_stack, iteration_stack = task.finished_id
823
942
 
824
943
  direct_parents = set(self._graph[next_step].in_funcs)
825
944
 
@@ -837,7 +956,7 @@ class NativeRuntime(object):
837
956
  filter(
838
957
  lambda x: x is not None,
839
958
  [
840
- self._finished.get((p, s))
959
+ self._finished.get((p, s, iteration_stack))
841
960
  for p in direct_parents
842
961
  for s in siblings(foreach_stack)
843
962
  ],
@@ -852,11 +971,12 @@ class NativeRuntime(object):
852
971
  filter(
853
972
  lambda x: x is not None,
854
973
  [
855
- self._finished.get((p, foreach_stack))
974
+ self._finished.get((p, foreach_stack, iteration_stack))
856
975
  for p in direct_parents
857
976
  ],
858
977
  )
859
978
  )
979
+
860
980
  required_count = len(matching_split.out_funcs)
861
981
  join_type = "linear"
862
982
  index = self._translate_index(task, next_step, "linear")
@@ -868,9 +988,18 @@ class NativeRuntime(object):
868
988
  index,
869
989
  )
870
990
 
871
- def _queue_task_switch(self, task, next_steps):
991
+ def _queue_task_switch(self, task, next_steps, is_recursive):
872
992
  chosen_step = next_steps[0]
873
- index = self._translate_index(task, chosen_step, "linear")
993
+
994
+ loop_mode = LoopBehavior.NONE
995
+ if is_recursive:
996
+ if chosen_step != task.step:
997
+ # We are exiting a loop
998
+ loop_mode = LoopBehavior.EXITING
999
+ else:
1000
+ # We are staying in the loop
1001
+ loop_mode = LoopBehavior.LOOPING
1002
+ index = self._translate_index(task, chosen_step, "linear", None, loop_mode)
874
1003
  self._queue_push(chosen_step, {"input_paths": [task.path]}, index)
875
1004
 
876
1005
  def _queue_task_foreach(self, task, next_steps):
@@ -949,7 +1078,9 @@ class NativeRuntime(object):
949
1078
  next_steps = []
950
1079
  foreach = None
951
1080
  expected = self._graph[task.step].out_funcs
1081
+
952
1082
  if self._graph[task.step].type == "split-switch":
1083
+ is_recursive = task.step in self._graph[task.step].out_funcs
953
1084
  if len(next_steps) != 1:
954
1085
  msg = (
955
1086
  "Switch step *{step}* should transition to exactly "
@@ -970,6 +1101,15 @@ class NativeRuntime(object):
970
1101
  expected=", ".join(expected),
971
1102
  )
972
1103
  )
1104
+ # When exiting a recursive loop, we mark that the loop itself has
1105
+ # finished by adding a special entry in self._finished which has
1106
+ # an iteration stack that is shorter (ie: we are out of the loop) so
1107
+ # that we can then find it when looking at successor tasks to launch.
1108
+ if is_recursive and next_steps[0] != task.step:
1109
+ step_name, finished_tuple, iteration_tuple = task.finished_id
1110
+ self._finished[
1111
+ (step_name, finished_tuple, iteration_tuple[:-1])
1112
+ ] = task.path
973
1113
  elif next_steps != expected:
974
1114
  msg = (
975
1115
  "Based on static analysis of the code, step *{step}* "
@@ -995,8 +1135,8 @@ class NativeRuntime(object):
995
1135
  # Next step is a foreach child
996
1136
  self._queue_task_foreach(task, next_steps)
997
1137
  elif self._graph[task.step].type == "split-switch":
998
- # Next step is switch - queue the chosen step
999
- self._queue_task_switch(task, next_steps)
1138
+ # Current step is switch - queue the chosen step
1139
+ self._queue_task_switch(task, next_steps, is_recursive)
1000
1140
  else:
1001
1141
  # Next steps are normal linear steps
1002
1142
  for step in next_steps:
@@ -1537,13 +1677,13 @@ class Task(object):
1537
1677
  @property
1538
1678
  def finished_id(self):
1539
1679
  # note: id is not available before the task has finished.
1540
- # Index already identifies the task within the foreach,
1541
- # we will remove foreach value so that it is easier to
1680
+ # Index already identifies the task within the foreach and loop.
1681
+ # We will remove foreach value so that it is easier to
1542
1682
  # identify siblings within a foreach.
1543
1683
  foreach_stack_tuple = tuple(
1544
1684
  [s._replace(value=0) for s in self.results["_foreach_stack"]]
1545
1685
  )
1546
- return (self.step, foreach_stack_tuple)
1686
+ return (self.step, foreach_stack_tuple, tuple(self.results["_iteration_stack"]))
1547
1687
 
1548
1688
  @property
1549
1689
  def is_cloned(self):
metaflow/task.py CHANGED
@@ -117,11 +117,21 @@ class MetaflowTask(object):
117
117
 
118
118
  # We back out of the stack of generators
119
119
  for w in reversed(wrappers_stack):
120
- r = w.post_step(orig_step_func.name, self.flow, raised_exception)
121
- if r is None or isinstance(r, Exception):
120
+ try:
121
+ r = w.post_step(orig_step_func.name, self.flow, raised_exception)
122
+ except Exception as ex:
123
+ r = ex
124
+ if r is None:
122
125
  raised_exception = None
126
+ elif isinstance(r, Exception):
127
+ raised_exception = r
123
128
  elif isinstance(r, tuple):
124
- raised_exception, fake_next_call_args = r
129
+ if len(r) == 2:
130
+ raised_exception, fake_next_call_args = r
131
+ else:
132
+ # The last argument is an exception to be re-raised. Used in
133
+ # user_step_decorator's post_step
134
+ raise r[2]
125
135
  else:
126
136
  raise RuntimeError(
127
137
  "Invalid return value from a UserStepDecorator. Expected an"
@@ -239,6 +249,7 @@ class MetaflowTask(object):
239
249
  # Prefetch 'foreach' related artifacts to improve time taken by
240
250
  # _init_foreach.
241
251
  prefetch_data_artifacts = [
252
+ "_iteration_stack",
242
253
  "_foreach_stack",
243
254
  "_foreach_num_splits",
244
255
  "_foreach_var",
@@ -375,6 +386,56 @@ class MetaflowTask(object):
375
386
  elif "_foreach_stack" in inputs[0]:
376
387
  self.flow._foreach_stack = inputs[0]["_foreach_stack"]
377
388
 
389
+ def _init_iteration(self, step_name, inputs, is_recursive_step):
390
+ # We track the iteration "stack" for loops. At this time, we
391
+ # only support one type of "looping" which is a recursive step but
392
+ # this can generalize to arbitrary well-scoped loops in the future.
393
+
394
+ # _iteration_stack will contain the iteration count for each loop
395
+ # level. Currently, there will be only no elements (no loops) or
396
+ # a single element (a single recursive step).
397
+
398
+ # We just need to determine the rules to add a new looping level,
399
+ # increment the looping level or pop the looping level. In our
400
+ # current support for only recursive steps, this is pretty straightforward:
401
+ # 1) if is_recursive_step:
402
+ # - we are entering a loop -- we are either entering for the first time
403
+ # or we are continuing the loop. Note that a recursive step CANNOT
404
+ # be a join step so there is always a single input
405
+ # 1a) If inputs[0]["_iteration_stack"] contains an element, we are looping
406
+ # so we increment the count
407
+ # 1b) If inputs[0]["_iteration_stack"] is empty, this is the first time we
408
+ # are entering the loop so we set the iteration count to 0
409
+ # 2) if it is not a recursive step, we need to determine if this is the step
410
+ # *after* the recursive step. The easiest way to determine that is to
411
+ # look at all inputs (there can be multiple in case of a join) and pop
412
+ # _iteration_stack if it is set. However, since we know that non recursive
413
+ # steps are *never* part of an iteration, we can simplify and just set it
414
+ # to [] without even checking anything. We will have to revisit this if/when
415
+ # more complex loop structures are supported.
416
+
417
+ # Note that just like _foreach_stack, we need to set _iteration_stack to *something*
418
+ # so that it doesn't get clobbered weirdly by merge_artifacts.
419
+
420
+ if is_recursive_step:
421
+ # Case 1)
422
+ if len(inputs) != 1:
423
+ raise MetaflowInternalError(
424
+ "Step *%s* is a recursive step but got multiple inputs." % step_name
425
+ )
426
+ inp = inputs[0]
427
+ if "_iteration_stack" not in inp or not inp["_iteration_stack"]:
428
+ # Case 1b)
429
+ self.flow._iteration_stack = [0]
430
+ else:
431
+ # Case 1a)
432
+ stack = inp["_iteration_stack"]
433
+ stack[-1] += 1
434
+ self.flow._iteration_stack = stack
435
+ else:
436
+ # Case 2)
437
+ self.flow._iteration_stack = []
438
+
378
439
  def _clone_flow(self, datastore):
379
440
  x = self.flow.__class__(use_cli=False)
380
441
  x._set_datastore(datastore)
@@ -563,6 +624,12 @@ class MetaflowTask(object):
563
624
  # 3. initialize foreach state
564
625
  self._init_foreach(step_name, join_type, inputs, split_index)
565
626
 
627
+ # 4. initialize the iteration state
628
+ is_recursive_step = (
629
+ node.type == "split-switch" and step_name in node.out_funcs
630
+ )
631
+ self._init_iteration(step_name, inputs, is_recursive_step)
632
+
566
633
  # Add foreach stack to metadata of the task
567
634
 
568
635
  foreach_stack = (
@@ -271,6 +271,9 @@ class ConfigValue(collections.abc.Mapping, dict):
271
271
  v = obj
272
272
  return v
273
273
 
274
+ def __reduce__(self):
275
+ return (self.__class__, (self.to_dict(),))
276
+
274
277
 
275
278
  class DelayEvaluator(collections.abc.Mapping):
276
279
  """
@@ -484,7 +487,6 @@ class Config(Parameter, collections.abc.Mapping):
484
487
  parser: Optional[Union[str, Callable[[str], Dict[Any, Any]]]] = None,
485
488
  **kwargs: Dict[str, str]
486
489
  ):
487
-
488
490
  if default is not None and default_value is not None:
489
491
  raise MetaflowException(
490
492
  "For config '%s', you can only specify default or default_value, not both"
@@ -658,8 +658,14 @@ def user_step_decorator(*args, **kwargs):
658
658
  self._generator.send(None)
659
659
  except StopIteration as e:
660
660
  to_return = e.value
661
+ except Exception as e:
662
+ return e
661
663
  else:
662
- raise MetaflowException(" %s should only yield once" % self)
664
+ return (
665
+ None,
666
+ None,
667
+ MetaflowException(" %s should only yield once" % self),
668
+ )
663
669
  return to_return
664
670
 
665
671
  return WrapClass
metaflow/version.py CHANGED
@@ -1 +1 @@
1
- metaflow_version = "2.17.3.1"
1
+ metaflow_version = "2.18.0.1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ob-metaflow
3
- Version: 2.17.3.1
3
+ Version: 2.18.0.1
4
4
  Summary: Metaflow: More AI and ML, Less Engineering
5
5
  Author: Netflix, Outerbounds & the Metaflow Community
6
6
  Author-email: help@outerbounds.co
@@ -12,7 +12,7 @@ Requires-Dist: boto3
12
12
  Requires-Dist: pylint
13
13
  Requires-Dist: kubernetes
14
14
  Provides-Extra: stubs
15
- Requires-Dist: metaflow-stubs==2.17.3.1; extra == "stubs"
15
+ Requires-Dist: metaflow-stubs==2.18.0.1; extra == "stubs"
16
16
  Dynamic: author
17
17
  Dynamic: author-email
18
18
  Dynamic: description