ddeutil-workflow 0.0.56__py3-none-any.whl → 0.0.57__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__cron.py +26 -12
- ddeutil/workflow/__types.py +1 -0
- ddeutil/workflow/conf.py +20 -8
- ddeutil/workflow/event.py +1 -0
- ddeutil/workflow/exceptions.py +33 -12
- ddeutil/workflow/job.py +81 -57
- ddeutil/workflow/logs.py +13 -5
- ddeutil/workflow/result.py +9 -4
- ddeutil/workflow/scheduler.py +6 -2
- ddeutil/workflow/stages.py +370 -147
- ddeutil/workflow/utils.py +37 -6
- ddeutil/workflow/workflow.py +205 -230
- {ddeutil_workflow-0.0.56.dist-info → ddeutil_workflow-0.0.57.dist-info}/METADATA +41 -35
- ddeutil_workflow-0.0.57.dist-info/RECORD +31 -0
- {ddeutil_workflow-0.0.56.dist-info → ddeutil_workflow-0.0.57.dist-info}/WHEEL +1 -1
- ddeutil_workflow-0.0.56.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.56.dist-info → ddeutil_workflow-0.0.57.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.56.dist-info → ddeutil_workflow-0.0.57.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.56.dist-info → ddeutil_workflow-0.0.57.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.57"
|
ddeutil/workflow/__cron.py
CHANGED
@@ -502,10 +502,10 @@ class CronPart:
|
|
502
502
|
except IndexError:
|
503
503
|
next_value: int = -1
|
504
504
|
if value != (next_value - 1):
|
505
|
-
# NOTE:
|
505
|
+
# NOTE: `next_value` is not the subsequent number
|
506
506
|
if start_number is None:
|
507
507
|
# NOTE:
|
508
|
-
# The last number of the list
|
508
|
+
# The last number of the list `self.values` is not in a
|
509
509
|
# range.
|
510
510
|
multi_dim_values.append(value)
|
511
511
|
else:
|
@@ -703,11 +703,14 @@ class CronJob:
|
|
703
703
|
*,
|
704
704
|
tz: str | None = None,
|
705
705
|
) -> CronRunner:
|
706
|
-
"""Returns
|
707
|
-
|
706
|
+
"""Returns CronRunner instance that be datetime runner with this
|
707
|
+
cronjob. It can use `next`, `prev`, or `reset` methods to generate
|
708
|
+
running date.
|
708
709
|
|
709
|
-
:param date: An initial date that want to mark as the start
|
710
|
-
|
710
|
+
:param date: (datetime) An initial date that want to mark as the start
|
711
|
+
point. (Default is use the current datetime)
|
712
|
+
:param tz: (str) A string timezone that want to change on runner.
|
713
|
+
(Default is None)
|
711
714
|
|
712
715
|
:rtype: CronRunner
|
713
716
|
"""
|
@@ -743,6 +746,10 @@ class CronJobYear(CronJob):
|
|
743
746
|
class CronRunner:
|
744
747
|
"""Create an instance of Date Runner object for datetime generate with
|
745
748
|
cron schedule object value.
|
749
|
+
|
750
|
+
:param cron: (CronJob | CronJobYear)
|
751
|
+
:param date: (datetime)
|
752
|
+
:param tz: (str)
|
746
753
|
"""
|
747
754
|
|
748
755
|
shift_limit: ClassVar[int] = 25
|
@@ -761,11 +768,17 @@ class CronRunner:
|
|
761
768
|
cron: CronJob | CronJobYear,
|
762
769
|
date: datetime | None = None,
|
763
770
|
*,
|
764
|
-
tz: str | None = None,
|
771
|
+
tz: str | ZoneInfo | None = None,
|
765
772
|
) -> None:
|
766
|
-
|
767
|
-
self.tz: ZoneInfo = ZoneInfo("UTC")
|
773
|
+
self.tz: ZoneInfo | None = None
|
768
774
|
if tz:
|
775
|
+
if isinstance(tz, ZoneInfo):
|
776
|
+
self.tz = tz
|
777
|
+
elif not isinstance(tz, str):
|
778
|
+
raise TypeError(
|
779
|
+
"Invalid type of `tz` parameter, it should be str or "
|
780
|
+
"ZoneInfo instance."
|
781
|
+
)
|
769
782
|
try:
|
770
783
|
self.tz = ZoneInfo(tz)
|
771
784
|
except ZoneInfoNotFoundError as err:
|
@@ -777,9 +790,10 @@ class CronRunner:
|
|
777
790
|
raise ValueError(
|
778
791
|
"Input schedule start time is not a valid datetime object."
|
779
792
|
)
|
780
|
-
if tz is None:
|
781
|
-
self.
|
782
|
-
|
793
|
+
if tz is not None:
|
794
|
+
self.date: datetime = date.astimezone(self.tz)
|
795
|
+
else:
|
796
|
+
self.date: datetime = date
|
783
797
|
else:
|
784
798
|
self.date: datetime = datetime.now(tz=self.tz)
|
785
799
|
|
ddeutil/workflow/__types.py
CHANGED
ddeutil/workflow/conf.py
CHANGED
@@ -200,7 +200,10 @@ class APIConfig:
|
|
200
200
|
return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
|
201
201
|
|
202
202
|
|
203
|
-
class BaseLoad(ABC):
|
203
|
+
class BaseLoad(ABC): # pragma: no cov
|
204
|
+
"""Base Load object is the abstraction object for any Load object that
|
205
|
+
should to inherit from this base class.
|
206
|
+
"""
|
204
207
|
|
205
208
|
@classmethod
|
206
209
|
@abstractmethod
|
@@ -335,8 +338,13 @@ class FileLoad(BaseLoad):
|
|
335
338
|
"""
|
336
339
|
excluded: list[str] = excluded or []
|
337
340
|
path: Path = dynamic("conf_path", f=path, extras=extras)
|
341
|
+
paths: Optional[list[Path]] = paths or (extras or {}).get("conf_paths")
|
338
342
|
if not paths:
|
339
343
|
paths: list[Path] = [path]
|
344
|
+
elif not isinstance(paths, list):
|
345
|
+
raise TypeError(
|
346
|
+
f"Multi-config paths does not support for type: {type(paths)}"
|
347
|
+
)
|
340
348
|
else:
|
341
349
|
paths.append(path)
|
342
350
|
|
@@ -431,17 +439,21 @@ def dynamic(
|
|
431
439
|
"""Dynamic get config if extra value was passed at run-time.
|
432
440
|
|
433
441
|
:param key: (str) A config key that get from Config object.
|
434
|
-
:param f: An inner config function scope.
|
442
|
+
:param f: (T) An inner config function scope.
|
435
443
|
:param extras: An extra values that pass at run-time.
|
444
|
+
|
445
|
+
:rtype: T
|
436
446
|
"""
|
437
|
-
|
438
|
-
|
439
|
-
if
|
447
|
+
extra: Optional[T] = (extras or {}).get(key, None)
|
448
|
+
conf: Optional[T] = getattr(config, key, None) if f is None else f
|
449
|
+
if extra is None:
|
450
|
+
return conf
|
451
|
+
if not isinstance(extra, type(conf)):
|
440
452
|
raise TypeError(
|
441
|
-
f"Type of config {key!r} from extras: {
|
442
|
-
f"as config {type(
|
453
|
+
f"Type of config {key!r} from extras: {extra!r} does not valid "
|
454
|
+
f"as config {type(conf)}."
|
443
455
|
)
|
444
|
-
return
|
456
|
+
return extra
|
445
457
|
|
446
458
|
|
447
459
|
class Loader(Protocol): # pragma: no cov
|
ddeutil/workflow/event.py
CHANGED
ddeutil/workflow/exceptions.py
CHANGED
@@ -9,16 +9,16 @@ annotate for handle error only.
|
|
9
9
|
"""
|
10
10
|
from __future__ import annotations
|
11
11
|
|
12
|
-
from typing import TypedDict
|
12
|
+
from typing import Literal, TypedDict, overload
|
13
13
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
14
|
+
|
15
|
+
class ErrorData(TypedDict):
|
16
|
+
"""Error data type dict for typing necessary keys of return of to_dict func
|
17
|
+
and method.
|
18
|
+
"""
|
19
|
+
|
20
|
+
name: str
|
21
|
+
message: str
|
22
22
|
|
23
23
|
|
24
24
|
def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
|
@@ -29,20 +29,41 @@ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
|
|
29
29
|
:rtype: ErrorData
|
30
30
|
"""
|
31
31
|
return {
|
32
|
-
"class": exception,
|
33
32
|
"name": exception.__class__.__name__,
|
34
33
|
"message": str(exception),
|
35
34
|
}
|
36
35
|
|
37
36
|
|
38
37
|
class BaseWorkflowException(Exception):
|
38
|
+
"""Base Workflow exception class will implement the `refs` argument for
|
39
|
+
making an error context to the result context.
|
40
|
+
"""
|
41
|
+
|
42
|
+
def __init__(self, message: str, *, refs: str | None = None):
|
43
|
+
super().__init__(message)
|
44
|
+
self.refs: str | None = refs
|
45
|
+
|
46
|
+
@overload
|
47
|
+
def to_dict(
|
48
|
+
self, with_refs: Literal[True] = ...
|
49
|
+
) -> dict[str, ErrorData]: ... # pragma: no cov
|
50
|
+
|
51
|
+
@overload
|
52
|
+
def to_dict(
|
53
|
+
self, with_refs: Literal[False] = ...
|
54
|
+
) -> ErrorData: ... # pragma: no cov
|
39
55
|
|
40
|
-
def to_dict(
|
56
|
+
def to_dict(
|
57
|
+
self, with_refs: bool = False
|
58
|
+
) -> ErrorData | dict[str, ErrorData]:
|
41
59
|
"""Return ErrorData data from the current exception object.
|
42
60
|
|
43
61
|
:rtype: ErrorData
|
44
62
|
"""
|
45
|
-
|
63
|
+
data: ErrorData = to_dict(self)
|
64
|
+
if with_refs and (self.refs is not None and self.refs != "EMPTY"):
|
65
|
+
return {self.refs: data}
|
66
|
+
return data
|
46
67
|
|
47
68
|
|
48
69
|
class UtilException(BaseWorkflowException): ...
|
ddeutil/workflow/job.py
CHANGED
@@ -18,8 +18,10 @@ method.
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
20
|
import copy
|
21
|
+
import time
|
21
22
|
from concurrent.futures import (
|
22
23
|
FIRST_EXCEPTION,
|
24
|
+
CancelledError,
|
23
25
|
Future,
|
24
26
|
ThreadPoolExecutor,
|
25
27
|
as_completed,
|
@@ -40,13 +42,12 @@ from .__types import DictData, DictStr, Matrix
|
|
40
42
|
from .exceptions import (
|
41
43
|
JobException,
|
42
44
|
StageException,
|
43
|
-
UtilException,
|
44
45
|
to_dict,
|
45
46
|
)
|
46
47
|
from .result import CANCEL, FAILED, SKIP, SUCCESS, WAIT, Result, Status
|
47
48
|
from .reusables import has_template, param2template
|
48
49
|
from .stages import Stage
|
49
|
-
from .utils import
|
50
|
+
from .utils import cross_product, filter_func, gen_id
|
50
51
|
|
51
52
|
MatrixFilter = list[dict[str, Union[str, int]]]
|
52
53
|
|
@@ -380,7 +381,7 @@ class Job(BaseModel):
|
|
380
381
|
|
381
382
|
:rtype: str
|
382
383
|
"""
|
383
|
-
return dedent(value)
|
384
|
+
return dedent(value.lstrip("\n"))
|
384
385
|
|
385
386
|
@field_validator("stages", mode="after")
|
386
387
|
def __validate_stage_id__(cls, value: list[Stage]) -> list[Stage]:
|
@@ -429,11 +430,14 @@ class Job(BaseModel):
|
|
429
430
|
return stage
|
430
431
|
raise ValueError(f"Stage {stage_id!r} does not exists in this job.")
|
431
432
|
|
432
|
-
def check_needs(
|
433
|
+
def check_needs(
|
434
|
+
self, jobs: dict[str, DictData]
|
435
|
+
) -> Status: # pragma: no cov
|
433
436
|
"""Return trigger status from checking job's need trigger rule logic was
|
434
437
|
valid. The return status should be SUCCESS, FAILED, WAIT, or SKIP.
|
435
438
|
|
436
|
-
:param jobs: A mapping of job ID and its context
|
439
|
+
:param jobs: (dict[str, DictData]) A mapping of job ID and its context
|
440
|
+
data that return from execution process.
|
437
441
|
|
438
442
|
:raise NotImplementedError: If the job trigger rule out of scope.
|
439
443
|
|
@@ -450,28 +454,34 @@ class Job(BaseModel):
|
|
450
454
|
}
|
451
455
|
if len(need_exist) != len(self.needs):
|
452
456
|
return WAIT
|
453
|
-
elif all("skipped"
|
457
|
+
elif all(need_exist[job].get("skipped", False) for job in need_exist):
|
454
458
|
return SKIP
|
455
459
|
elif self.trigger_rule == Rule.ALL_DONE:
|
456
460
|
return SUCCESS
|
457
461
|
elif self.trigger_rule == Rule.ALL_SUCCESS:
|
458
462
|
rs = all(
|
459
|
-
|
460
|
-
|
463
|
+
(
|
464
|
+
"errors" not in need_exist[job]
|
465
|
+
and not need_exist[job].get("skipped", False)
|
466
|
+
)
|
461
467
|
for job in need_exist
|
462
468
|
)
|
463
469
|
elif self.trigger_rule == Rule.ALL_FAILED:
|
464
470
|
rs = all("errors" in need_exist[job] for job in need_exist)
|
465
471
|
elif self.trigger_rule == Rule.ONE_SUCCESS:
|
466
472
|
rs = sum(
|
467
|
-
|
468
|
-
|
473
|
+
(
|
474
|
+
"errors" not in need_exist[job]
|
475
|
+
and not need_exist[job].get("skipped", False)
|
476
|
+
)
|
469
477
|
for job in need_exist
|
470
478
|
) + 1 == len(self.needs)
|
471
479
|
elif self.trigger_rule == Rule.ONE_FAILED:
|
472
480
|
rs = sum("errors" in need_exist[job] for job in need_exist) == 1
|
473
481
|
elif self.trigger_rule == Rule.NONE_SKIPPED:
|
474
|
-
rs = all(
|
482
|
+
rs = all(
|
483
|
+
not need_exist[job].get("skipped", False) for job in need_exist
|
484
|
+
)
|
475
485
|
elif self.trigger_rule == Rule.NONE_FAILED:
|
476
486
|
rs = all("errors" not in need_exist[job] for job in need_exist)
|
477
487
|
else: # pragma: no cov
|
@@ -613,20 +623,19 @@ class Job(BaseModel):
|
|
613
623
|
:param event: (Event) An Event manager instance that use to cancel this
|
614
624
|
execution if it forces stopped by parent execution.
|
615
625
|
|
616
|
-
:raise NotImplementedError: If the `runs-on` value does not implement on
|
617
|
-
this execution.
|
618
|
-
|
619
626
|
:rtype: Result
|
620
627
|
"""
|
621
628
|
result: Result = Result.construct_with_rs_or_id(
|
622
629
|
run_id=run_id,
|
623
630
|
parent_run_id=parent_run_id,
|
624
|
-
id_logic=(self.id or "
|
631
|
+
id_logic=(self.id or "EMPTY"),
|
625
632
|
extras=self.extras,
|
626
633
|
)
|
627
634
|
|
628
635
|
result.trace.info(
|
629
|
-
f"[JOB]: Execute
|
636
|
+
f"[JOB]: Execute "
|
637
|
+
f"{''.join(self.runs_on.type.value.split('_')).title()}: "
|
638
|
+
f"{self.id!r}"
|
630
639
|
)
|
631
640
|
if self.runs_on.type == RunsOn.LOCAL:
|
632
641
|
return local_execute(
|
@@ -647,12 +656,18 @@ class Job(BaseModel):
|
|
647
656
|
event=event,
|
648
657
|
)
|
649
658
|
|
650
|
-
# pragma: no cov
|
651
659
|
result.trace.error(
|
652
|
-
f"[JOB]: Execute not support runs-on: {self.runs_on.type!r}
|
660
|
+
f"[JOB]: Execute not support runs-on: {self.runs_on.type.value!r} "
|
661
|
+
f"yet."
|
653
662
|
)
|
654
|
-
|
655
|
-
|
663
|
+
return result.catch(
|
664
|
+
status=FAILED,
|
665
|
+
context={
|
666
|
+
"errors": JobException(
|
667
|
+
f"Execute runs-on type: {self.runs_on.type.value!r} does "
|
668
|
+
f"not support yet."
|
669
|
+
).to_dict(),
|
670
|
+
},
|
656
671
|
)
|
657
672
|
|
658
673
|
|
@@ -664,10 +679,10 @@ def local_execute_strategy(
|
|
664
679
|
result: Result | None = None,
|
665
680
|
event: Event | None = None,
|
666
681
|
) -> Result:
|
667
|
-
"""Local
|
668
|
-
|
682
|
+
"""Local strategy execution with passing dynamic parameters from the
|
683
|
+
job execution and strategy matrix.
|
669
684
|
|
670
|
-
This execution is the minimum level of
|
685
|
+
This execution is the minimum level of job execution.
|
671
686
|
It different with `self.execute` because this method run only one
|
672
687
|
strategy and return with context of this strategy data.
|
673
688
|
|
@@ -684,22 +699,22 @@ def local_execute_strategy(
|
|
684
699
|
:param event: (Event) An Event manager instance that use to cancel this
|
685
700
|
execution if it forces stopped by parent execution.
|
686
701
|
|
687
|
-
:raise JobException: If
|
688
|
-
|
702
|
+
:raise JobException: If event was set.
|
703
|
+
:raise JobException: If stage execution raise any error as `StageException`.
|
704
|
+
:raise JobException: If the result from execution has `FAILED` status.
|
689
705
|
|
690
706
|
:rtype: Result
|
691
707
|
"""
|
692
708
|
result: Result = result or Result(
|
693
|
-
run_id=gen_id(job.id or "
|
709
|
+
run_id=gen_id(job.id or "EMPTY", unique=True),
|
694
710
|
extras=job.extras,
|
695
711
|
)
|
696
712
|
if strategy:
|
697
713
|
strategy_id: str = gen_id(strategy)
|
698
|
-
result.trace.info(f"[JOB]:
|
714
|
+
result.trace.info(f"[JOB]: Execute Strategy: {strategy_id!r}")
|
699
715
|
result.trace.info(f"[JOB]: ... matrix: {strategy!r}")
|
700
716
|
else:
|
701
717
|
strategy_id: str = "EMPTY"
|
702
|
-
result.trace.info("[JOB]: Start Strategy: 'EMPTY'")
|
703
718
|
|
704
719
|
context: DictData = copy.deepcopy(params)
|
705
720
|
context.update({"matrix": strategy, "stages": {}})
|
@@ -714,11 +729,8 @@ def local_execute_strategy(
|
|
714
729
|
continue
|
715
730
|
|
716
731
|
if event and event.is_set():
|
717
|
-
error_msg: str =
|
718
|
-
|
719
|
-
"job strategy execution."
|
720
|
-
)
|
721
|
-
return result.catch(
|
732
|
+
error_msg: str = "Job strategy was canceled because event was set."
|
733
|
+
result.catch(
|
722
734
|
status=CANCEL,
|
723
735
|
context={
|
724
736
|
strategy_id: {
|
@@ -728,6 +740,7 @@ def local_execute_strategy(
|
|
728
740
|
},
|
729
741
|
},
|
730
742
|
)
|
743
|
+
raise JobException(error_msg, refs=strategy_id)
|
731
744
|
|
732
745
|
try:
|
733
746
|
result.trace.info(f"[JOB]: Execute Stage: {stage.iden!r}")
|
@@ -738,8 +751,7 @@ def local_execute_strategy(
|
|
738
751
|
event=event,
|
739
752
|
)
|
740
753
|
stage.set_outputs(rs.context, to=context)
|
741
|
-
except
|
742
|
-
result.trace.error(f"[JOB]: {e.__class__.__name__}: {e}")
|
754
|
+
except StageException as e:
|
743
755
|
result.catch(
|
744
756
|
status=FAILED,
|
745
757
|
context={
|
@@ -751,15 +763,15 @@ def local_execute_strategy(
|
|
751
763
|
},
|
752
764
|
)
|
753
765
|
raise JobException(
|
754
|
-
f"
|
766
|
+
message=f"Handler Error: {e.__class__.__name__}: {e}",
|
767
|
+
refs=strategy_id,
|
755
768
|
) from e
|
756
769
|
|
757
770
|
if rs.status == FAILED:
|
758
771
|
error_msg: str = (
|
759
|
-
f"Strategy break because stage, {stage.iden!r}, return
|
760
|
-
f"status."
|
772
|
+
f"Strategy break because stage, {stage.iden!r}, return "
|
773
|
+
f"`FAILED` status."
|
761
774
|
)
|
762
|
-
result.trace.warning(f"[JOB]: {error_msg}")
|
763
775
|
result.catch(
|
764
776
|
status=FAILED,
|
765
777
|
context={
|
@@ -770,7 +782,7 @@ def local_execute_strategy(
|
|
770
782
|
},
|
771
783
|
},
|
772
784
|
)
|
773
|
-
raise JobException(error_msg)
|
785
|
+
raise JobException(error_msg, refs=strategy_id)
|
774
786
|
|
775
787
|
return result.catch(
|
776
788
|
status=SUCCESS,
|
@@ -792,11 +804,19 @@ def local_execute(
|
|
792
804
|
event: Event | None = None,
|
793
805
|
) -> Result:
|
794
806
|
"""Local job execution with passing dynamic parameters from the workflow
|
795
|
-
execution or
|
807
|
+
execution or directly. It will generate matrix values at the first
|
796
808
|
step and run multithread on this metrics to the `stages` field of this job.
|
797
809
|
|
798
|
-
|
799
|
-
|
810
|
+
Important:
|
811
|
+
This method does not raise any `JobException` because it allows run
|
812
|
+
parallel mode. If it raises error from strategy execution, it will catch
|
813
|
+
that error and store it in the `errors` key with list of error.
|
814
|
+
|
815
|
+
{
|
816
|
+
"errors": [
|
817
|
+
{"name": "...", "message": "..."}, ...
|
818
|
+
]
|
819
|
+
}
|
800
820
|
|
801
821
|
:param job: (Job) A job model.
|
802
822
|
:param params: (DictData) A parameter data.
|
@@ -810,20 +830,20 @@ def local_execute(
|
|
810
830
|
result: Result = Result.construct_with_rs_or_id(
|
811
831
|
run_id=run_id,
|
812
832
|
parent_run_id=parent_run_id,
|
813
|
-
id_logic=(job.id or "
|
833
|
+
id_logic=(job.id or "EMPTY"),
|
814
834
|
extras=job.extras,
|
815
835
|
)
|
816
836
|
|
817
|
-
event: Event = Event()
|
837
|
+
event: Event = event or Event()
|
818
838
|
fail_fast_flag: bool = job.strategy.fail_fast
|
819
839
|
ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
|
820
840
|
workers: int = job.strategy.max_parallel
|
821
841
|
result.trace.info(
|
822
|
-
f"[JOB]: {ls}
|
842
|
+
f"[JOB]: Execute {ls}: {job.id} with {workers} "
|
823
843
|
f"worker{'s' if workers > 1 else ''}."
|
824
844
|
)
|
825
845
|
|
826
|
-
if event and event.is_set():
|
846
|
+
if event and event.is_set():
|
827
847
|
return result.catch(
|
828
848
|
status=CANCEL,
|
829
849
|
context={
|
@@ -859,14 +879,16 @@ def local_execute(
|
|
859
879
|
done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
|
860
880
|
if len(done) != len(futures):
|
861
881
|
result.trace.warning(
|
862
|
-
"[JOB]:
|
882
|
+
"[JOB]: Handler Fail-Fast: Got exception and set event."
|
863
883
|
)
|
864
884
|
event.set()
|
865
885
|
for future in not_done:
|
866
886
|
future.cancel()
|
887
|
+
time.sleep(0.075)
|
867
888
|
|
868
889
|
nd: str = f", strategies not run: {not_done}" if not_done else ""
|
869
|
-
result.trace.debug(f"...
|
890
|
+
result.trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
|
891
|
+
done: list[Future] = as_completed(futures)
|
870
892
|
|
871
893
|
for future in done:
|
872
894
|
try:
|
@@ -874,12 +896,14 @@ def local_execute(
|
|
874
896
|
except JobException as e:
|
875
897
|
status = FAILED
|
876
898
|
result.trace.error(
|
877
|
-
f"[JOB]: {ls}
|
899
|
+
f"[JOB]: {ls} Error Handler:||{e.__class__.__name__}:||{e}"
|
878
900
|
)
|
879
901
|
if "errors" in context:
|
880
|
-
context["errors"].
|
902
|
+
context["errors"][e.refs] = e.to_dict()
|
881
903
|
else:
|
882
|
-
context["errors"] =
|
904
|
+
context["errors"] = e.to_dict(with_refs=True)
|
905
|
+
except CancelledError:
|
906
|
+
pass
|
883
907
|
return result.catch(status=status, context=context)
|
884
908
|
|
885
909
|
|
@@ -907,7 +931,7 @@ def self_hosted_execute(
|
|
907
931
|
result: Result = Result.construct_with_rs_or_id(
|
908
932
|
run_id=run_id,
|
909
933
|
parent_run_id=parent_run_id,
|
910
|
-
id_logic=(job.id or "
|
934
|
+
id_logic=(job.id or "EMPTY"),
|
911
935
|
extras=job.extras,
|
912
936
|
)
|
913
937
|
|
@@ -953,7 +977,7 @@ def azure_batch_execute(
|
|
953
977
|
run_id: str | None = None,
|
954
978
|
parent_run_id: str | None = None,
|
955
979
|
event: Event | None = None,
|
956
|
-
) -> Result: # pragma no cov
|
980
|
+
) -> Result: # pragma: no cov
|
957
981
|
"""Azure Batch job execution that will run all job's stages on the Azure
|
958
982
|
Batch Node and extract the result file to be returning context result.
|
959
983
|
|
@@ -983,7 +1007,7 @@ def azure_batch_execute(
|
|
983
1007
|
result: Result = Result.construct_with_rs_or_id(
|
984
1008
|
run_id=run_id,
|
985
1009
|
parent_run_id=parent_run_id,
|
986
|
-
id_logic=(job.id or "
|
1010
|
+
id_logic=(job.id or "EMPTY"),
|
987
1011
|
extras=job.extras,
|
988
1012
|
)
|
989
1013
|
if event and event.is_set():
|
@@ -1007,7 +1031,7 @@ def docker_execution(
|
|
1007
1031
|
run_id: str | None = None,
|
1008
1032
|
parent_run_id: str | None = None,
|
1009
1033
|
event: Event | None = None,
|
1010
|
-
):
|
1034
|
+
): # pragma: no cov
|
1011
1035
|
"""Docker job execution.
|
1012
1036
|
|
1013
1037
|
Steps:
|
@@ -1018,7 +1042,7 @@ def docker_execution(
|
|
1018
1042
|
result: Result = Result.construct_with_rs_or_id(
|
1019
1043
|
run_id=run_id,
|
1020
1044
|
parent_run_id=parent_run_id,
|
1021
|
-
id_logic=(job.id or "
|
1045
|
+
id_logic=(job.id or "EMPTY"),
|
1022
1046
|
extras=job.extras,
|
1023
1047
|
)
|
1024
1048
|
if event and event.is_set():
|
ddeutil/workflow/logs.py
CHANGED
@@ -29,7 +29,7 @@ from typing_extensions import Self
|
|
29
29
|
|
30
30
|
from .__types import DictData, DictStr
|
31
31
|
from .conf import config, dynamic
|
32
|
-
from .utils import cut_id, get_dt_now
|
32
|
+
from .utils import cut_id, get_dt_now, prepare_newline
|
33
33
|
|
34
34
|
|
35
35
|
@lru_cache
|
@@ -71,7 +71,9 @@ def get_dt_tznow() -> datetime: # pragma: no cov
|
|
71
71
|
|
72
72
|
|
73
73
|
class TraceMeta(BaseModel): # pragma: no cov
|
74
|
-
"""Trace
|
74
|
+
"""Trace Metadata model for making the current metadata of this CPU, Memory
|
75
|
+
process, and thread data.
|
76
|
+
"""
|
75
77
|
|
76
78
|
mode: Literal["stdout", "stderr"]
|
77
79
|
datetime: str
|
@@ -91,6 +93,11 @@ class TraceMeta(BaseModel): # pragma: no cov
|
|
91
93
|
) -> Self:
|
92
94
|
"""Make the current TraceMeta instance that catching local state.
|
93
95
|
|
96
|
+
:param mode: A metadata mode.
|
97
|
+
:param message: A message.
|
98
|
+
:param extras: (DictData) An extra parameter that want to override core
|
99
|
+
config values.
|
100
|
+
|
94
101
|
:rtype: Self
|
95
102
|
"""
|
96
103
|
frame_info: Traceback = getframeinfo(
|
@@ -232,7 +239,7 @@ class BaseTrace(ABC): # pragma: no cov
|
|
232
239
|
|
233
240
|
:param message: (str) A message that want to log.
|
234
241
|
"""
|
235
|
-
msg: str = self.make_message(message)
|
242
|
+
msg: str = prepare_newline(self.make_message(message))
|
236
243
|
|
237
244
|
if mode != "debug" or (
|
238
245
|
mode == "debug" and dynamic("debug", extras=self.extras)
|
@@ -445,6 +452,7 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
445
452
|
async def awriter(
|
446
453
|
self, message: str, is_err: bool = False
|
447
454
|
) -> None: # pragma: no cov
|
455
|
+
"""Write with async mode."""
|
448
456
|
if not dynamic("enable_write_log", extras=self.extras):
|
449
457
|
return
|
450
458
|
|
@@ -744,7 +752,7 @@ class FileAudit(BaseAudit):
|
|
744
752
|
|
745
753
|
# NOTE: Check environ variable was set for real writing.
|
746
754
|
if not dynamic("enable_write_audit", extras=self.extras):
|
747
|
-
trace.debug("[
|
755
|
+
trace.debug("[AUDIT]: Skip writing log cause config was set")
|
748
756
|
return self
|
749
757
|
|
750
758
|
log_file: Path = (
|
@@ -813,7 +821,7 @@ class SQLiteAudit(BaseAudit): # pragma: no cov
|
|
813
821
|
|
814
822
|
# NOTE: Check environ variable was set for real writing.
|
815
823
|
if not dynamic("enable_write_audit", extras=self.extras):
|
816
|
-
trace.debug("[
|
824
|
+
trace.debug("[AUDIT]: Skip writing log cause config was set")
|
817
825
|
return self
|
818
826
|
|
819
827
|
raise NotImplementedError("SQLiteAudit does not implement yet.")
|