ddeutil-workflow 0.0.56__py3-none-any.whl → 0.0.58__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.56"
1
+ __version__: str = "0.0.58"
@@ -502,10 +502,10 @@ class CronPart:
502
502
  except IndexError:
503
503
  next_value: int = -1
504
504
  if value != (next_value - 1):
505
- # NOTE: ``next_value`` is not the subsequent number
505
+ # NOTE: `next_value` is not the subsequent number
506
506
  if start_number is None:
507
507
  # NOTE:
508
- # The last number of the list ``self.values`` is not in a
508
+ # The last number of the list `self.values` is not in a
509
509
  # range.
510
510
  multi_dim_values.append(value)
511
511
  else:
@@ -703,11 +703,14 @@ class CronJob:
703
703
  *,
704
704
  tz: str | None = None,
705
705
  ) -> CronRunner:
706
- """Returns the schedule datetime runner with this cronjob. It would run
707
- ``next``, ``prev``, or ``reset`` to generate running date that you want.
706
+ """Returns CronRunner instance that be datetime runner with this
707
+ cronjob. It can use `next`, `prev`, or `reset` methods to generate
708
+ running date.
708
709
 
709
- :param date: An initial date that want to mark as the start point.
710
- :param tz: A string timezone that want to change on runner.
710
+ :param date: (datetime) An initial date that want to mark as the start
711
+ point. (Default is use the current datetime)
712
+ :param tz: (str) A string timezone that want to change on runner.
713
+ (Default is None)
711
714
 
712
715
  :rtype: CronRunner
713
716
  """
@@ -743,6 +746,10 @@ class CronJobYear(CronJob):
743
746
  class CronRunner:
744
747
  """Create an instance of Date Runner object for datetime generate with
745
748
  cron schedule object value.
749
+
750
+ :param cron: (CronJob | CronJobYear)
751
+ :param date: (datetime)
752
+ :param tz: (str)
746
753
  """
747
754
 
748
755
  shift_limit: ClassVar[int] = 25
@@ -761,11 +768,17 @@ class CronRunner:
761
768
  cron: CronJob | CronJobYear,
762
769
  date: datetime | None = None,
763
770
  *,
764
- tz: str | None = None,
771
+ tz: str | ZoneInfo | None = None,
765
772
  ) -> None:
766
- # NOTE: Prepare timezone if this value does not set, it will use UTC.
767
- self.tz: ZoneInfo = ZoneInfo("UTC")
773
+ self.tz: ZoneInfo | None = None
768
774
  if tz:
775
+ if isinstance(tz, ZoneInfo):
776
+ self.tz = tz
777
+ elif not isinstance(tz, str):
778
+ raise TypeError(
779
+ "Invalid type of `tz` parameter, it should be str or "
780
+ "ZoneInfo instance."
781
+ )
769
782
  try:
770
783
  self.tz = ZoneInfo(tz)
771
784
  except ZoneInfoNotFoundError as err:
@@ -777,9 +790,10 @@ class CronRunner:
777
790
  raise ValueError(
778
791
  "Input schedule start time is not a valid datetime object."
779
792
  )
780
- if tz is None:
781
- self.tz = date.tzinfo
782
- self.date: datetime = date.astimezone(self.tz)
793
+ if tz is not None:
794
+ self.date: datetime = date.astimezone(self.tz)
795
+ else:
796
+ self.date: datetime = date
783
797
  else:
784
798
  self.date: datetime = datetime.now(tz=self.tz)
785
799
 
@@ -20,6 +20,7 @@ from typing import Any, Optional, TypedDict, Union
20
20
 
21
21
  from typing_extensions import Self
22
22
 
23
+ StrOrInt = Union[str, int]
23
24
  TupleStr = tuple[str, ...]
24
25
  DictData = dict[str, Any]
25
26
  DictStr = dict[str, str]
ddeutil/workflow/conf.py CHANGED
@@ -200,7 +200,10 @@ class APIConfig:
200
200
  return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
201
201
 
202
202
 
203
- class BaseLoad(ABC):
203
+ class BaseLoad(ABC): # pragma: no cov
204
+ """Base Load object is the abstraction object for any Load object that
205
+ should to inherit from this base class.
206
+ """
204
207
 
205
208
  @classmethod
206
209
  @abstractmethod
@@ -215,7 +218,7 @@ class BaseLoad(ABC):
215
218
 
216
219
  class FileLoad(BaseLoad):
217
220
  """Base Load object that use to search config data by given some identity
218
- value like name of `Workflow` or `On` templates.
221
+ value like name of `Workflow` or `Crontab` templates.
219
222
 
220
223
  :param name: (str) A name of key of config data that read with YAML
221
224
  Environment object.
@@ -335,8 +338,13 @@ class FileLoad(BaseLoad):
335
338
  """
336
339
  excluded: list[str] = excluded or []
337
340
  path: Path = dynamic("conf_path", f=path, extras=extras)
341
+ paths: Optional[list[Path]] = paths or (extras or {}).get("conf_paths")
338
342
  if not paths:
339
343
  paths: list[Path] = [path]
344
+ elif not isinstance(paths, list):
345
+ raise TypeError(
346
+ f"Multi-config paths does not support for type: {type(paths)}"
347
+ )
340
348
  else:
341
349
  paths.append(path)
342
350
 
@@ -431,17 +439,21 @@ def dynamic(
431
439
  """Dynamic get config if extra value was passed at run-time.
432
440
 
433
441
  :param key: (str) A config key that get from Config object.
434
- :param f: An inner config function scope.
442
+ :param f: (T) An inner config function scope.
435
443
  :param extras: An extra values that pass at run-time.
444
+
445
+ :rtype: T
436
446
  """
437
- rsx: Optional[T] = extras[key] if extras and key in extras else None
438
- rs: Optional[T] = getattr(config, key, None) if f is None else f
439
- if rsx is not None and not isinstance(rsx, type(rs)):
447
+ extra: Optional[T] = (extras or {}).get(key, None)
448
+ conf: Optional[T] = getattr(config, key, None) if f is None else f
449
+ if extra is None:
450
+ return conf
451
+ if not isinstance(extra, type(conf)):
440
452
  raise TypeError(
441
- f"Type of config {key!r} from extras: {rsx!r} does not valid "
442
- f"as config {type(rs)}."
453
+ f"Type of config {key!r} from extras: {extra!r} does not valid "
454
+ f"as config {type(conf)}."
443
455
  )
444
- return rsx if rsx is not None else rs
456
+ return extra
445
457
 
446
458
 
447
459
  class Loader(Protocol): # pragma: no cov
ddeutil/workflow/event.py CHANGED
@@ -3,8 +3,8 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """Event module that store all event object. Now, it has only `On` and `OnYear`
7
- model these are schedule with crontab event.
6
+ """Event module that store all event object. Now, it has only `Crontab` and
7
+ `CrontabYear` model these are schedule with crontab event.
8
8
  """
9
9
  from __future__ import annotations
10
10
 
@@ -63,9 +63,9 @@ def interval2crontab(
63
63
  return f"{h} {m} {'1' if interval == 'monthly' else '*'} * {d}"
64
64
 
65
65
 
66
- class On(BaseModel):
67
- """On model (Warped crontab object by Pydantic model) to keep crontab value
68
- and generate CronRunner object from this crontab value.
66
+ class Crontab(BaseModel):
67
+ """Cron event model (Warped the CronJob object by Pydantic model) to keep
68
+ crontab value and generate CronRunner object from this crontab value.
69
69
 
70
70
  Methods:
71
71
  - generate: is the main use-case of this schedule object.
@@ -117,6 +117,7 @@ class On(BaseModel):
117
117
  passing["cronjob"] = interval2crontab(
118
118
  **{v: value[v] for v in value if v in ("interval", "day", "time")}
119
119
  )
120
+ print(passing)
120
121
  return cls(extras=extras | passing.pop("extras", {}), **passing)
121
122
 
122
123
  @classmethod
@@ -127,7 +128,7 @@ class On(BaseModel):
127
128
  extras: DictData | None = None,
128
129
  ) -> Self:
129
130
  """Constructor from the name of config loader that will use loader
130
- object for getting the `On` data.
131
+ object for getting the `Crontab` data.
131
132
 
132
133
  :param name: (str) A name of config that will get from loader.
133
134
  :param extras: (DictData) An extra parameter that use to override core
@@ -171,7 +172,7 @@ class On(BaseModel):
171
172
  def __prepare_values(cls, data: Any) -> Any:
172
173
  """Extract tz key from value and change name to timezone key.
173
174
 
174
- :param data: (DictData) A data that want to pass for create an On
175
+ :param data: (DictData) A data that want to pass for create an Crontab
175
176
  model.
176
177
 
177
178
  :rtype: DictData
@@ -264,9 +265,9 @@ class On(BaseModel):
264
265
  return runner
265
266
 
266
267
 
267
- class YearOn(On):
268
- """On with enhance Year Pydantic model for limit year matrix that use by
269
- some data schedule tools like AWS Glue.
268
+ class CrontabYear(Crontab):
269
+ """Cron event with enhance Year Pydantic model for limit year matrix that
270
+ use by some data schedule tools like AWS Glue.
270
271
  """
271
272
 
272
273
  model_config = ConfigDict(arbitrary_types_allowed=True)
@@ -9,16 +9,16 @@ annotate for handle error only.
9
9
  """
10
10
  from __future__ import annotations
11
11
 
12
- from typing import TypedDict
12
+ from typing import Literal, TypedDict, overload
13
13
 
14
- ErrorData = TypedDict(
15
- "ErrorData",
16
- {
17
- "class": Exception,
18
- "name": str,
19
- "message": str,
20
- },
21
- )
14
+
15
+ class ErrorData(TypedDict):
16
+ """Error data type dict for typing necessary keys of return of to_dict func
17
+ and method.
18
+ """
19
+
20
+ name: str
21
+ message: str
22
22
 
23
23
 
24
24
  def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
@@ -29,20 +29,41 @@ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
29
29
  :rtype: ErrorData
30
30
  """
31
31
  return {
32
- "class": exception,
33
32
  "name": exception.__class__.__name__,
34
33
  "message": str(exception),
35
34
  }
36
35
 
37
36
 
38
37
  class BaseWorkflowException(Exception):
38
+ """Base Workflow exception class will implement the `refs` argument for
39
+ making an error context to the result context.
40
+ """
41
+
42
+ def __init__(self, message: str, *, refs: str | None = None):
43
+ super().__init__(message)
44
+ self.refs: str | None = refs
45
+
46
+ @overload
47
+ def to_dict(
48
+ self, with_refs: Literal[True] = ...
49
+ ) -> dict[str, ErrorData]: ... # pragma: no cov
50
+
51
+ @overload
52
+ def to_dict(
53
+ self, with_refs: Literal[False] = ...
54
+ ) -> ErrorData: ... # pragma: no cov
39
55
 
40
- def to_dict(self) -> ErrorData:
56
+ def to_dict(
57
+ self, with_refs: bool = False
58
+ ) -> ErrorData | dict[str, ErrorData]:
41
59
  """Return ErrorData data from the current exception object.
42
60
 
43
61
  :rtype: ErrorData
44
62
  """
45
- return to_dict(self)
63
+ data: ErrorData = to_dict(self)
64
+ if with_refs and (self.refs is not None and self.refs != "EMPTY"):
65
+ return {self.refs: data}
66
+ return data
46
67
 
47
68
 
48
69
  class UtilException(BaseWorkflowException): ...
ddeutil/workflow/job.py CHANGED
@@ -18,8 +18,10 @@ method.
18
18
  from __future__ import annotations
19
19
 
20
20
  import copy
21
+ import time
21
22
  from concurrent.futures import (
22
23
  FIRST_EXCEPTION,
24
+ CancelledError,
23
25
  Future,
24
26
  ThreadPoolExecutor,
25
27
  as_completed,
@@ -40,13 +42,12 @@ from .__types import DictData, DictStr, Matrix
40
42
  from .exceptions import (
41
43
  JobException,
42
44
  StageException,
43
- UtilException,
44
45
  to_dict,
45
46
  )
46
47
  from .result import CANCEL, FAILED, SKIP, SUCCESS, WAIT, Result, Status
47
48
  from .reusables import has_template, param2template
48
49
  from .stages import Stage
49
- from .utils import NEWLINE, cross_product, filter_func, gen_id
50
+ from .utils import cross_product, filter_func, gen_id
50
51
 
51
52
  MatrixFilter = list[dict[str, Union[str, int]]]
52
53
 
@@ -380,7 +381,7 @@ class Job(BaseModel):
380
381
 
381
382
  :rtype: str
382
383
  """
383
- return dedent(value)
384
+ return dedent(value.lstrip("\n"))
384
385
 
385
386
  @field_validator("stages", mode="after")
386
387
  def __validate_stage_id__(cls, value: list[Stage]) -> list[Stage]:
@@ -429,11 +430,14 @@ class Job(BaseModel):
429
430
  return stage
430
431
  raise ValueError(f"Stage {stage_id!r} does not exists in this job.")
431
432
 
432
- def check_needs(self, jobs: dict[str, Any]) -> Status: # pragma: no cov
433
+ def check_needs(
434
+ self, jobs: dict[str, DictData]
435
+ ) -> Status: # pragma: no cov
433
436
  """Return trigger status from checking job's need trigger rule logic was
434
437
  valid. The return status should be SUCCESS, FAILED, WAIT, or SKIP.
435
438
 
436
- :param jobs: A mapping of job ID and its context data.
439
+ :param jobs: (dict[str, DictData]) A mapping of job ID and its context
440
+ data that return from execution process.
437
441
 
438
442
  :raise NotImplementedError: If the job trigger rule out of scope.
439
443
 
@@ -450,28 +454,34 @@ class Job(BaseModel):
450
454
  }
451
455
  if len(need_exist) != len(self.needs):
452
456
  return WAIT
453
- elif all("skipped" in need_exist[job] for job in need_exist):
457
+ elif all(need_exist[job].get("skipped", False) for job in need_exist):
454
458
  return SKIP
455
459
  elif self.trigger_rule == Rule.ALL_DONE:
456
460
  return SUCCESS
457
461
  elif self.trigger_rule == Rule.ALL_SUCCESS:
458
462
  rs = all(
459
- k not in need_exist[job]
460
- for k in ("errors", "skipped")
463
+ (
464
+ "errors" not in need_exist[job]
465
+ and not need_exist[job].get("skipped", False)
466
+ )
461
467
  for job in need_exist
462
468
  )
463
469
  elif self.trigger_rule == Rule.ALL_FAILED:
464
470
  rs = all("errors" in need_exist[job] for job in need_exist)
465
471
  elif self.trigger_rule == Rule.ONE_SUCCESS:
466
472
  rs = sum(
467
- k not in need_exist[job]
468
- for k in ("errors", "skipped")
473
+ (
474
+ "errors" not in need_exist[job]
475
+ and not need_exist[job].get("skipped", False)
476
+ )
469
477
  for job in need_exist
470
478
  ) + 1 == len(self.needs)
471
479
  elif self.trigger_rule == Rule.ONE_FAILED:
472
480
  rs = sum("errors" in need_exist[job] for job in need_exist) == 1
473
481
  elif self.trigger_rule == Rule.NONE_SKIPPED:
474
- rs = all("skipped" not in need_exist[job] for job in need_exist)
482
+ rs = all(
483
+ not need_exist[job].get("skipped", False) for job in need_exist
484
+ )
475
485
  elif self.trigger_rule == Rule.NONE_FAILED:
476
486
  rs = all("errors" not in need_exist[job] for job in need_exist)
477
487
  else: # pragma: no cov
@@ -613,20 +623,19 @@ class Job(BaseModel):
613
623
  :param event: (Event) An Event manager instance that use to cancel this
614
624
  execution if it forces stopped by parent execution.
615
625
 
616
- :raise NotImplementedError: If the `runs-on` value does not implement on
617
- this execution.
618
-
619
626
  :rtype: Result
620
627
  """
621
628
  result: Result = Result.construct_with_rs_or_id(
622
629
  run_id=run_id,
623
630
  parent_run_id=parent_run_id,
624
- id_logic=(self.id or "not-set"),
631
+ id_logic=(self.id or "EMPTY"),
625
632
  extras=self.extras,
626
633
  )
627
634
 
628
635
  result.trace.info(
629
- f"[JOB]: Execute: {self.id!r} on {self.runs_on.type.value!r}"
636
+ f"[JOB]: Execute "
637
+ f"{''.join(self.runs_on.type.value.split('_')).title()}: "
638
+ f"{self.id!r}"
630
639
  )
631
640
  if self.runs_on.type == RunsOn.LOCAL:
632
641
  return local_execute(
@@ -647,12 +656,18 @@ class Job(BaseModel):
647
656
  event=event,
648
657
  )
649
658
 
650
- # pragma: no cov
651
659
  result.trace.error(
652
- f"[JOB]: Execute not support runs-on: {self.runs_on.type!r} yet."
660
+ f"[JOB]: Execute not support runs-on: {self.runs_on.type.value!r} "
661
+ f"yet."
653
662
  )
654
- raise NotImplementedError(
655
- f"Execute runs-on type: {self.runs_on.type} does not support yet."
663
+ return result.catch(
664
+ status=FAILED,
665
+ context={
666
+ "errors": JobException(
667
+ f"Execute runs-on type: {self.runs_on.type.value!r} does "
668
+ f"not support yet."
669
+ ).to_dict(),
670
+ },
656
671
  )
657
672
 
658
673
 
@@ -664,10 +679,10 @@ def local_execute_strategy(
664
679
  result: Result | None = None,
665
680
  event: Event | None = None,
666
681
  ) -> Result:
667
- """Local job strategy execution with passing dynamic parameters from the
668
- workflow execution to strategy matrix.
682
+ """Local strategy execution with passing dynamic parameters from the
683
+ job execution and strategy matrix.
669
684
 
670
- This execution is the minimum level of execution of this job model.
685
+ This execution is the minimum level of job execution.
671
686
  It different with `self.execute` because this method run only one
672
687
  strategy and return with context of this strategy data.
673
688
 
@@ -684,22 +699,22 @@ def local_execute_strategy(
684
699
  :param event: (Event) An Event manager instance that use to cancel this
685
700
  execution if it forces stopped by parent execution.
686
701
 
687
- :raise JobException: If stage execution raise any error as `StageException`
688
- or `UtilException`.
702
+ :raise JobException: If event was set.
703
+ :raise JobException: If stage execution raise any error as `StageException`.
704
+ :raise JobException: If the result from execution has `FAILED` status.
689
705
 
690
706
  :rtype: Result
691
707
  """
692
708
  result: Result = result or Result(
693
- run_id=gen_id(job.id or "not-set", unique=True),
709
+ run_id=gen_id(job.id or "EMPTY", unique=True),
694
710
  extras=job.extras,
695
711
  )
696
712
  if strategy:
697
713
  strategy_id: str = gen_id(strategy)
698
- result.trace.info(f"[JOB]: Start Strategy: {strategy_id!r}")
714
+ result.trace.info(f"[JOB]: Execute Strategy: {strategy_id!r}")
699
715
  result.trace.info(f"[JOB]: ... matrix: {strategy!r}")
700
716
  else:
701
717
  strategy_id: str = "EMPTY"
702
- result.trace.info("[JOB]: Start Strategy: 'EMPTY'")
703
718
 
704
719
  context: DictData = copy.deepcopy(params)
705
720
  context.update({"matrix": strategy, "stages": {}})
@@ -714,11 +729,8 @@ def local_execute_strategy(
714
729
  continue
715
730
 
716
731
  if event and event.is_set():
717
- error_msg: str = (
718
- "Job strategy was canceled from event that had set before "
719
- "job strategy execution."
720
- )
721
- return result.catch(
732
+ error_msg: str = "Job strategy was canceled because event was set."
733
+ result.catch(
722
734
  status=CANCEL,
723
735
  context={
724
736
  strategy_id: {
@@ -728,6 +740,7 @@ def local_execute_strategy(
728
740
  },
729
741
  },
730
742
  )
743
+ raise JobException(error_msg, refs=strategy_id)
731
744
 
732
745
  try:
733
746
  result.trace.info(f"[JOB]: Execute Stage: {stage.iden!r}")
@@ -738,8 +751,7 @@ def local_execute_strategy(
738
751
  event=event,
739
752
  )
740
753
  stage.set_outputs(rs.context, to=context)
741
- except (StageException, UtilException) as e:
742
- result.trace.error(f"[JOB]: {e.__class__.__name__}: {e}")
754
+ except StageException as e:
743
755
  result.catch(
744
756
  status=FAILED,
745
757
  context={
@@ -751,15 +763,15 @@ def local_execute_strategy(
751
763
  },
752
764
  )
753
765
  raise JobException(
754
- f"Stage raise: {e.__class__.__name__}: {e}"
766
+ message=f"Handler Error: {e.__class__.__name__}: {e}",
767
+ refs=strategy_id,
755
768
  ) from e
756
769
 
757
770
  if rs.status == FAILED:
758
771
  error_msg: str = (
759
- f"Strategy break because stage, {stage.iden!r}, return FAILED "
760
- f"status."
772
+ f"Strategy break because stage, {stage.iden!r}, return "
773
+ f"`FAILED` status."
761
774
  )
762
- result.trace.warning(f"[JOB]: {error_msg}")
763
775
  result.catch(
764
776
  status=FAILED,
765
777
  context={
@@ -770,7 +782,7 @@ def local_execute_strategy(
770
782
  },
771
783
  },
772
784
  )
773
- raise JobException(error_msg)
785
+ raise JobException(error_msg, refs=strategy_id)
774
786
 
775
787
  return result.catch(
776
788
  status=SUCCESS,
@@ -792,11 +804,19 @@ def local_execute(
792
804
  event: Event | None = None,
793
805
  ) -> Result:
794
806
  """Local job execution with passing dynamic parameters from the workflow
795
- execution or itself execution. It will generate matrix values at the first
807
+ execution or directly. It will generate matrix values at the first
796
808
  step and run multithread on this metrics to the `stages` field of this job.
797
809
 
798
- This method does not raise any `JobException` if it runs with
799
- multi-threading strategy.
810
+ Important:
811
+ This method does not raise any `JobException` because it allows run
812
+ parallel mode. If it raises error from strategy execution, it will catch
813
+ that error and store it in the `errors` key with list of error.
814
+
815
+ {
816
+ "errors": [
817
+ {"name": "...", "message": "..."}, ...
818
+ ]
819
+ }
800
820
 
801
821
  :param job: (Job) A job model.
802
822
  :param params: (DictData) A parameter data.
@@ -810,20 +830,20 @@ def local_execute(
810
830
  result: Result = Result.construct_with_rs_or_id(
811
831
  run_id=run_id,
812
832
  parent_run_id=parent_run_id,
813
- id_logic=(job.id or "not-set"),
833
+ id_logic=(job.id or "EMPTY"),
814
834
  extras=job.extras,
815
835
  )
816
836
 
817
- event: Event = Event() if event is None else event
837
+ event: Event = event or Event()
818
838
  fail_fast_flag: bool = job.strategy.fail_fast
819
839
  ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
820
840
  workers: int = job.strategy.max_parallel
821
841
  result.trace.info(
822
- f"[JOB]: {ls}-Execute: {job.id} with {workers} "
842
+ f"[JOB]: Execute {ls}: {job.id!r} with {workers} "
823
843
  f"worker{'s' if workers > 1 else ''}."
824
844
  )
825
845
 
826
- if event and event.is_set(): # pragma: no cov
846
+ if event and event.is_set():
827
847
  return result.catch(
828
848
  status=CANCEL,
829
849
  context={
@@ -859,14 +879,23 @@ def local_execute(
859
879
  done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
860
880
  if len(done) != len(futures):
861
881
  result.trace.warning(
862
- "[JOB]: Set event for stop pending stage future."
882
+ "[JOB]: Handler Fail-Fast: Got exception and set event."
863
883
  )
864
884
  event.set()
865
885
  for future in not_done:
866
886
  future.cancel()
887
+ time.sleep(0.075)
867
888
 
868
- nd: str = f", strategies not run: {not_done}" if not_done else ""
869
- result.trace.debug(f"... Strategy set Fail-Fast{nd}")
889
+ nd: str = (
890
+ (
891
+ f", {len(not_done)} strateg"
892
+ f"{'ies' if len(not_done) > 1 else 'y'} not run!!!"
893
+ )
894
+ if not_done
895
+ else ""
896
+ )
897
+ result.trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
898
+ done: list[Future] = as_completed(futures)
870
899
 
871
900
  for future in done:
872
901
  try:
@@ -874,12 +903,14 @@ def local_execute(
874
903
  except JobException as e:
875
904
  status = FAILED
876
905
  result.trace.error(
877
- f"[JOB]: {ls}: {e.__class__.__name__}:{NEWLINE}{e}"
906
+ f"[JOB]: {ls} Error Handler:||{e.__class__.__name__}:||{e}"
878
907
  )
879
908
  if "errors" in context:
880
- context["errors"].append(e.to_dict())
909
+ context["errors"][e.refs] = e.to_dict()
881
910
  else:
882
- context["errors"] = [e.to_dict()]
911
+ context["errors"] = e.to_dict(with_refs=True)
912
+ except CancelledError:
913
+ pass
883
914
  return result.catch(status=status, context=context)
884
915
 
885
916
 
@@ -907,7 +938,7 @@ def self_hosted_execute(
907
938
  result: Result = Result.construct_with_rs_or_id(
908
939
  run_id=run_id,
909
940
  parent_run_id=parent_run_id,
910
- id_logic=(job.id or "not-set"),
941
+ id_logic=(job.id or "EMPTY"),
911
942
  extras=job.extras,
912
943
  )
913
944
 
@@ -953,7 +984,7 @@ def azure_batch_execute(
953
984
  run_id: str | None = None,
954
985
  parent_run_id: str | None = None,
955
986
  event: Event | None = None,
956
- ) -> Result: # pragma no cov
987
+ ) -> Result: # pragma: no cov
957
988
  """Azure Batch job execution that will run all job's stages on the Azure
958
989
  Batch Node and extract the result file to be returning context result.
959
990
 
@@ -983,7 +1014,7 @@ def azure_batch_execute(
983
1014
  result: Result = Result.construct_with_rs_or_id(
984
1015
  run_id=run_id,
985
1016
  parent_run_id=parent_run_id,
986
- id_logic=(job.id or "not-set"),
1017
+ id_logic=(job.id or "EMPTY"),
987
1018
  extras=job.extras,
988
1019
  )
989
1020
  if event and event.is_set():
@@ -1007,7 +1038,7 @@ def docker_execution(
1007
1038
  run_id: str | None = None,
1008
1039
  parent_run_id: str | None = None,
1009
1040
  event: Event | None = None,
1010
- ):
1041
+ ): # pragma: no cov
1011
1042
  """Docker job execution.
1012
1043
 
1013
1044
  Steps:
@@ -1018,7 +1049,7 @@ def docker_execution(
1018
1049
  result: Result = Result.construct_with_rs_or_id(
1019
1050
  run_id=run_id,
1020
1051
  parent_run_id=parent_run_id,
1021
- id_logic=(job.id or "not-set"),
1052
+ id_logic=(job.id or "EMPTY"),
1022
1053
  extras=job.extras,
1023
1054
  )
1024
1055
  if event and event.is_set():