ddeutil-workflow 0.0.55__py3-none-any.whl → 0.0.57__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__cron.py +26 -12
- ddeutil/workflow/__init__.py +4 -2
- ddeutil/workflow/__main__.py +30 -0
- ddeutil/workflow/__types.py +1 -0
- ddeutil/workflow/conf.py +163 -101
- ddeutil/workflow/{cron.py → event.py} +37 -20
- ddeutil/workflow/exceptions.py +44 -14
- ddeutil/workflow/job.py +87 -58
- ddeutil/workflow/logs.py +13 -5
- ddeutil/workflow/result.py +9 -4
- ddeutil/workflow/scheduler.py +38 -73
- ddeutil/workflow/stages.py +370 -147
- ddeutil/workflow/utils.py +37 -6
- ddeutil/workflow/workflow.py +243 -302
- {ddeutil_workflow-0.0.55.dist-info → ddeutil_workflow-0.0.57.dist-info}/METADATA +41 -35
- ddeutil_workflow-0.0.57.dist-info/RECORD +31 -0
- {ddeutil_workflow-0.0.55.dist-info → ddeutil_workflow-0.0.57.dist-info}/WHEEL +1 -1
- ddeutil_workflow-0.0.57.dist-info/entry_points.txt +2 -0
- ddeutil_workflow-0.0.55.dist-info/RECORD +0 -30
- {ddeutil_workflow-0.0.55.dist-info → ddeutil_workflow-0.0.57.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.55.dist-info → ddeutil_workflow-0.0.57.dist-info}/top_level.txt +0 -0
ddeutil/workflow/exceptions.py
CHANGED
@@ -9,31 +9,61 @@ annotate for handle error only.
|
|
9
9
|
"""
|
10
10
|
from __future__ import annotations
|
11
11
|
|
12
|
-
from typing import TypedDict
|
12
|
+
from typing import Literal, TypedDict, overload
|
13
13
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
14
|
+
|
15
|
+
class ErrorData(TypedDict):
|
16
|
+
"""Error data type dict for typing necessary keys of return of to_dict func
|
17
|
+
and method.
|
18
|
+
"""
|
19
|
+
|
20
|
+
name: str
|
21
|
+
message: str
|
22
22
|
|
23
23
|
|
24
24
|
def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
|
25
|
-
"""Create dict data from exception instance.
|
25
|
+
"""Create dict data from exception instance.
|
26
|
+
|
27
|
+
:param exception: An exception object.
|
28
|
+
|
29
|
+
:rtype: ErrorData
|
30
|
+
"""
|
26
31
|
return {
|
27
|
-
"class": exception,
|
28
32
|
"name": exception.__class__.__name__,
|
29
33
|
"message": str(exception),
|
30
34
|
}
|
31
35
|
|
32
36
|
|
33
37
|
class BaseWorkflowException(Exception):
|
34
|
-
|
35
|
-
|
36
|
-
|
38
|
+
"""Base Workflow exception class will implement the `refs` argument for
|
39
|
+
making an error context to the result context.
|
40
|
+
"""
|
41
|
+
|
42
|
+
def __init__(self, message: str, *, refs: str | None = None):
|
43
|
+
super().__init__(message)
|
44
|
+
self.refs: str | None = refs
|
45
|
+
|
46
|
+
@overload
|
47
|
+
def to_dict(
|
48
|
+
self, with_refs: Literal[True] = ...
|
49
|
+
) -> dict[str, ErrorData]: ... # pragma: no cov
|
50
|
+
|
51
|
+
@overload
|
52
|
+
def to_dict(
|
53
|
+
self, with_refs: Literal[False] = ...
|
54
|
+
) -> ErrorData: ... # pragma: no cov
|
55
|
+
|
56
|
+
def to_dict(
|
57
|
+
self, with_refs: bool = False
|
58
|
+
) -> ErrorData | dict[str, ErrorData]:
|
59
|
+
"""Return ErrorData data from the current exception object.
|
60
|
+
|
61
|
+
:rtype: ErrorData
|
62
|
+
"""
|
63
|
+
data: ErrorData = to_dict(self)
|
64
|
+
if with_refs and (self.refs is not None and self.refs != "EMPTY"):
|
65
|
+
return {self.refs: data}
|
66
|
+
return data
|
37
67
|
|
38
68
|
|
39
69
|
class UtilException(BaseWorkflowException): ...
|
ddeutil/workflow/job.py
CHANGED
@@ -18,8 +18,10 @@ method.
|
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
20
|
import copy
|
21
|
+
import time
|
21
22
|
from concurrent.futures import (
|
22
23
|
FIRST_EXCEPTION,
|
24
|
+
CancelledError,
|
23
25
|
Future,
|
24
26
|
ThreadPoolExecutor,
|
25
27
|
as_completed,
|
@@ -40,13 +42,12 @@ from .__types import DictData, DictStr, Matrix
|
|
40
42
|
from .exceptions import (
|
41
43
|
JobException,
|
42
44
|
StageException,
|
43
|
-
UtilException,
|
44
45
|
to_dict,
|
45
46
|
)
|
46
47
|
from .result import CANCEL, FAILED, SKIP, SUCCESS, WAIT, Result, Status
|
47
48
|
from .reusables import has_template, param2template
|
48
49
|
from .stages import Stage
|
49
|
-
from .utils import
|
50
|
+
from .utils import cross_product, filter_func, gen_id
|
50
51
|
|
51
52
|
MatrixFilter = list[dict[str, Union[str, int]]]
|
52
53
|
|
@@ -140,14 +141,19 @@ class Strategy(BaseModel):
|
|
140
141
|
|
141
142
|
fail_fast: bool = Field(
|
142
143
|
default=False,
|
144
|
+
description=(
|
145
|
+
"A fail-fast flag that use to cancel strategy execution when it "
|
146
|
+
"has some execution was failed."
|
147
|
+
),
|
143
148
|
alias="fail-fast",
|
144
149
|
)
|
145
150
|
max_parallel: int = Field(
|
146
151
|
default=1,
|
147
152
|
gt=0,
|
153
|
+
lt=10,
|
148
154
|
description=(
|
149
155
|
"The maximum number of executor thread pool that want to run "
|
150
|
-
"parallel"
|
156
|
+
"parallel. This value should gather than 0 and less than 10."
|
151
157
|
),
|
152
158
|
alias="max-parallel",
|
153
159
|
)
|
@@ -375,7 +381,7 @@ class Job(BaseModel):
|
|
375
381
|
|
376
382
|
:rtype: str
|
377
383
|
"""
|
378
|
-
return dedent(value)
|
384
|
+
return dedent(value.lstrip("\n"))
|
379
385
|
|
380
386
|
@field_validator("stages", mode="after")
|
381
387
|
def __validate_stage_id__(cls, value: list[Stage]) -> list[Stage]:
|
@@ -424,11 +430,14 @@ class Job(BaseModel):
|
|
424
430
|
return stage
|
425
431
|
raise ValueError(f"Stage {stage_id!r} does not exists in this job.")
|
426
432
|
|
427
|
-
def check_needs(
|
433
|
+
def check_needs(
|
434
|
+
self, jobs: dict[str, DictData]
|
435
|
+
) -> Status: # pragma: no cov
|
428
436
|
"""Return trigger status from checking job's need trigger rule logic was
|
429
437
|
valid. The return status should be SUCCESS, FAILED, WAIT, or SKIP.
|
430
438
|
|
431
|
-
:param jobs: A mapping of job ID and its context
|
439
|
+
:param jobs: (dict[str, DictData]) A mapping of job ID and its context
|
440
|
+
data that return from execution process.
|
432
441
|
|
433
442
|
:raise NotImplementedError: If the job trigger rule out of scope.
|
434
443
|
|
@@ -445,28 +454,34 @@ class Job(BaseModel):
|
|
445
454
|
}
|
446
455
|
if len(need_exist) != len(self.needs):
|
447
456
|
return WAIT
|
448
|
-
elif all("skipped"
|
457
|
+
elif all(need_exist[job].get("skipped", False) for job in need_exist):
|
449
458
|
return SKIP
|
450
459
|
elif self.trigger_rule == Rule.ALL_DONE:
|
451
460
|
return SUCCESS
|
452
461
|
elif self.trigger_rule == Rule.ALL_SUCCESS:
|
453
462
|
rs = all(
|
454
|
-
|
455
|
-
|
463
|
+
(
|
464
|
+
"errors" not in need_exist[job]
|
465
|
+
and not need_exist[job].get("skipped", False)
|
466
|
+
)
|
456
467
|
for job in need_exist
|
457
468
|
)
|
458
469
|
elif self.trigger_rule == Rule.ALL_FAILED:
|
459
470
|
rs = all("errors" in need_exist[job] for job in need_exist)
|
460
471
|
elif self.trigger_rule == Rule.ONE_SUCCESS:
|
461
472
|
rs = sum(
|
462
|
-
|
463
|
-
|
473
|
+
(
|
474
|
+
"errors" not in need_exist[job]
|
475
|
+
and not need_exist[job].get("skipped", False)
|
476
|
+
)
|
464
477
|
for job in need_exist
|
465
478
|
) + 1 == len(self.needs)
|
466
479
|
elif self.trigger_rule == Rule.ONE_FAILED:
|
467
480
|
rs = sum("errors" in need_exist[job] for job in need_exist) == 1
|
468
481
|
elif self.trigger_rule == Rule.NONE_SKIPPED:
|
469
|
-
rs = all(
|
482
|
+
rs = all(
|
483
|
+
not need_exist[job].get("skipped", False) for job in need_exist
|
484
|
+
)
|
470
485
|
elif self.trigger_rule == Rule.NONE_FAILED:
|
471
486
|
rs = all("errors" not in need_exist[job] for job in need_exist)
|
472
487
|
else: # pragma: no cov
|
@@ -608,20 +623,19 @@ class Job(BaseModel):
|
|
608
623
|
:param event: (Event) An Event manager instance that use to cancel this
|
609
624
|
execution if it forces stopped by parent execution.
|
610
625
|
|
611
|
-
:raise NotImplementedError: If the `runs-on` value does not implement on
|
612
|
-
this execution.
|
613
|
-
|
614
626
|
:rtype: Result
|
615
627
|
"""
|
616
628
|
result: Result = Result.construct_with_rs_or_id(
|
617
629
|
run_id=run_id,
|
618
630
|
parent_run_id=parent_run_id,
|
619
|
-
id_logic=(self.id or "
|
631
|
+
id_logic=(self.id or "EMPTY"),
|
620
632
|
extras=self.extras,
|
621
633
|
)
|
622
634
|
|
623
635
|
result.trace.info(
|
624
|
-
f"[JOB]: Execute
|
636
|
+
f"[JOB]: Execute "
|
637
|
+
f"{''.join(self.runs_on.type.value.split('_')).title()}: "
|
638
|
+
f"{self.id!r}"
|
625
639
|
)
|
626
640
|
if self.runs_on.type == RunsOn.LOCAL:
|
627
641
|
return local_execute(
|
@@ -642,12 +656,18 @@ class Job(BaseModel):
|
|
642
656
|
event=event,
|
643
657
|
)
|
644
658
|
|
645
|
-
# pragma: no cov
|
646
659
|
result.trace.error(
|
647
|
-
f"[JOB]: Execute not support runs-on: {self.runs_on.type!r}
|
660
|
+
f"[JOB]: Execute not support runs-on: {self.runs_on.type.value!r} "
|
661
|
+
f"yet."
|
648
662
|
)
|
649
|
-
|
650
|
-
|
663
|
+
return result.catch(
|
664
|
+
status=FAILED,
|
665
|
+
context={
|
666
|
+
"errors": JobException(
|
667
|
+
f"Execute runs-on type: {self.runs_on.type.value!r} does "
|
668
|
+
f"not support yet."
|
669
|
+
).to_dict(),
|
670
|
+
},
|
651
671
|
)
|
652
672
|
|
653
673
|
|
@@ -659,10 +679,10 @@ def local_execute_strategy(
|
|
659
679
|
result: Result | None = None,
|
660
680
|
event: Event | None = None,
|
661
681
|
) -> Result:
|
662
|
-
"""Local
|
663
|
-
|
682
|
+
"""Local strategy execution with passing dynamic parameters from the
|
683
|
+
job execution and strategy matrix.
|
664
684
|
|
665
|
-
This execution is the minimum level of
|
685
|
+
This execution is the minimum level of job execution.
|
666
686
|
It different with `self.execute` because this method run only one
|
667
687
|
strategy and return with context of this strategy data.
|
668
688
|
|
@@ -679,22 +699,22 @@ def local_execute_strategy(
|
|
679
699
|
:param event: (Event) An Event manager instance that use to cancel this
|
680
700
|
execution if it forces stopped by parent execution.
|
681
701
|
|
682
|
-
:raise JobException: If
|
683
|
-
|
702
|
+
:raise JobException: If event was set.
|
703
|
+
:raise JobException: If stage execution raise any error as `StageException`.
|
704
|
+
:raise JobException: If the result from execution has `FAILED` status.
|
684
705
|
|
685
706
|
:rtype: Result
|
686
707
|
"""
|
687
708
|
result: Result = result or Result(
|
688
|
-
run_id=gen_id(job.id or "
|
709
|
+
run_id=gen_id(job.id or "EMPTY", unique=True),
|
689
710
|
extras=job.extras,
|
690
711
|
)
|
691
712
|
if strategy:
|
692
713
|
strategy_id: str = gen_id(strategy)
|
693
|
-
result.trace.info(f"[JOB]:
|
714
|
+
result.trace.info(f"[JOB]: Execute Strategy: {strategy_id!r}")
|
694
715
|
result.trace.info(f"[JOB]: ... matrix: {strategy!r}")
|
695
716
|
else:
|
696
717
|
strategy_id: str = "EMPTY"
|
697
|
-
result.trace.info("[JOB]: Start Strategy: 'EMPTY'")
|
698
718
|
|
699
719
|
context: DictData = copy.deepcopy(params)
|
700
720
|
context.update({"matrix": strategy, "stages": {}})
|
@@ -709,11 +729,8 @@ def local_execute_strategy(
|
|
709
729
|
continue
|
710
730
|
|
711
731
|
if event and event.is_set():
|
712
|
-
error_msg: str =
|
713
|
-
|
714
|
-
"job strategy execution."
|
715
|
-
)
|
716
|
-
return result.catch(
|
732
|
+
error_msg: str = "Job strategy was canceled because event was set."
|
733
|
+
result.catch(
|
717
734
|
status=CANCEL,
|
718
735
|
context={
|
719
736
|
strategy_id: {
|
@@ -723,6 +740,7 @@ def local_execute_strategy(
|
|
723
740
|
},
|
724
741
|
},
|
725
742
|
)
|
743
|
+
raise JobException(error_msg, refs=strategy_id)
|
726
744
|
|
727
745
|
try:
|
728
746
|
result.trace.info(f"[JOB]: Execute Stage: {stage.iden!r}")
|
@@ -733,8 +751,7 @@ def local_execute_strategy(
|
|
733
751
|
event=event,
|
734
752
|
)
|
735
753
|
stage.set_outputs(rs.context, to=context)
|
736
|
-
except
|
737
|
-
result.trace.error(f"[JOB]: {e.__class__.__name__}: {e}")
|
754
|
+
except StageException as e:
|
738
755
|
result.catch(
|
739
756
|
status=FAILED,
|
740
757
|
context={
|
@@ -746,15 +763,15 @@ def local_execute_strategy(
|
|
746
763
|
},
|
747
764
|
)
|
748
765
|
raise JobException(
|
749
|
-
f"
|
766
|
+
message=f"Handler Error: {e.__class__.__name__}: {e}",
|
767
|
+
refs=strategy_id,
|
750
768
|
) from e
|
751
769
|
|
752
770
|
if rs.status == FAILED:
|
753
771
|
error_msg: str = (
|
754
|
-
f"Strategy break because stage, {stage.iden!r}, return
|
755
|
-
f"status."
|
772
|
+
f"Strategy break because stage, {stage.iden!r}, return "
|
773
|
+
f"`FAILED` status."
|
756
774
|
)
|
757
|
-
result.trace.warning(f"[JOB]: {error_msg}")
|
758
775
|
result.catch(
|
759
776
|
status=FAILED,
|
760
777
|
context={
|
@@ -765,7 +782,7 @@ def local_execute_strategy(
|
|
765
782
|
},
|
766
783
|
},
|
767
784
|
)
|
768
|
-
raise JobException(error_msg)
|
785
|
+
raise JobException(error_msg, refs=strategy_id)
|
769
786
|
|
770
787
|
return result.catch(
|
771
788
|
status=SUCCESS,
|
@@ -787,11 +804,19 @@ def local_execute(
|
|
787
804
|
event: Event | None = None,
|
788
805
|
) -> Result:
|
789
806
|
"""Local job execution with passing dynamic parameters from the workflow
|
790
|
-
execution or
|
807
|
+
execution or directly. It will generate matrix values at the first
|
791
808
|
step and run multithread on this metrics to the `stages` field of this job.
|
792
809
|
|
793
|
-
|
794
|
-
|
810
|
+
Important:
|
811
|
+
This method does not raise any `JobException` because it allows run
|
812
|
+
parallel mode. If it raises error from strategy execution, it will catch
|
813
|
+
that error and store it in the `errors` key with list of error.
|
814
|
+
|
815
|
+
{
|
816
|
+
"errors": [
|
817
|
+
{"name": "...", "message": "..."}, ...
|
818
|
+
]
|
819
|
+
}
|
795
820
|
|
796
821
|
:param job: (Job) A job model.
|
797
822
|
:param params: (DictData) A parameter data.
|
@@ -805,20 +830,20 @@ def local_execute(
|
|
805
830
|
result: Result = Result.construct_with_rs_or_id(
|
806
831
|
run_id=run_id,
|
807
832
|
parent_run_id=parent_run_id,
|
808
|
-
id_logic=(job.id or "
|
833
|
+
id_logic=(job.id or "EMPTY"),
|
809
834
|
extras=job.extras,
|
810
835
|
)
|
811
836
|
|
812
|
-
event: Event = Event()
|
837
|
+
event: Event = event or Event()
|
813
838
|
fail_fast_flag: bool = job.strategy.fail_fast
|
814
839
|
ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
|
815
840
|
workers: int = job.strategy.max_parallel
|
816
841
|
result.trace.info(
|
817
|
-
f"[JOB]: {ls}
|
842
|
+
f"[JOB]: Execute {ls}: {job.id} with {workers} "
|
818
843
|
f"worker{'s' if workers > 1 else ''}."
|
819
844
|
)
|
820
845
|
|
821
|
-
if event and event.is_set():
|
846
|
+
if event and event.is_set():
|
822
847
|
return result.catch(
|
823
848
|
status=CANCEL,
|
824
849
|
context={
|
@@ -854,14 +879,16 @@ def local_execute(
|
|
854
879
|
done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
|
855
880
|
if len(done) != len(futures):
|
856
881
|
result.trace.warning(
|
857
|
-
"[JOB]:
|
882
|
+
"[JOB]: Handler Fail-Fast: Got exception and set event."
|
858
883
|
)
|
859
884
|
event.set()
|
860
885
|
for future in not_done:
|
861
886
|
future.cancel()
|
887
|
+
time.sleep(0.075)
|
862
888
|
|
863
889
|
nd: str = f", strategies not run: {not_done}" if not_done else ""
|
864
|
-
result.trace.debug(f"...
|
890
|
+
result.trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
|
891
|
+
done: list[Future] = as_completed(futures)
|
865
892
|
|
866
893
|
for future in done:
|
867
894
|
try:
|
@@ -869,12 +896,14 @@ def local_execute(
|
|
869
896
|
except JobException as e:
|
870
897
|
status = FAILED
|
871
898
|
result.trace.error(
|
872
|
-
f"[JOB]: {ls}
|
899
|
+
f"[JOB]: {ls} Error Handler:||{e.__class__.__name__}:||{e}"
|
873
900
|
)
|
874
901
|
if "errors" in context:
|
875
|
-
context["errors"].
|
902
|
+
context["errors"][e.refs] = e.to_dict()
|
876
903
|
else:
|
877
|
-
context["errors"] =
|
904
|
+
context["errors"] = e.to_dict(with_refs=True)
|
905
|
+
except CancelledError:
|
906
|
+
pass
|
878
907
|
return result.catch(status=status, context=context)
|
879
908
|
|
880
909
|
|
@@ -902,7 +931,7 @@ def self_hosted_execute(
|
|
902
931
|
result: Result = Result.construct_with_rs_or_id(
|
903
932
|
run_id=run_id,
|
904
933
|
parent_run_id=parent_run_id,
|
905
|
-
id_logic=(job.id or "
|
934
|
+
id_logic=(job.id or "EMPTY"),
|
906
935
|
extras=job.extras,
|
907
936
|
)
|
908
937
|
|
@@ -948,7 +977,7 @@ def azure_batch_execute(
|
|
948
977
|
run_id: str | None = None,
|
949
978
|
parent_run_id: str | None = None,
|
950
979
|
event: Event | None = None,
|
951
|
-
) -> Result: # pragma no cov
|
980
|
+
) -> Result: # pragma: no cov
|
952
981
|
"""Azure Batch job execution that will run all job's stages on the Azure
|
953
982
|
Batch Node and extract the result file to be returning context result.
|
954
983
|
|
@@ -978,7 +1007,7 @@ def azure_batch_execute(
|
|
978
1007
|
result: Result = Result.construct_with_rs_or_id(
|
979
1008
|
run_id=run_id,
|
980
1009
|
parent_run_id=parent_run_id,
|
981
|
-
id_logic=(job.id or "
|
1010
|
+
id_logic=(job.id or "EMPTY"),
|
982
1011
|
extras=job.extras,
|
983
1012
|
)
|
984
1013
|
if event and event.is_set():
|
@@ -1002,7 +1031,7 @@ def docker_execution(
|
|
1002
1031
|
run_id: str | None = None,
|
1003
1032
|
parent_run_id: str | None = None,
|
1004
1033
|
event: Event | None = None,
|
1005
|
-
):
|
1034
|
+
): # pragma: no cov
|
1006
1035
|
"""Docker job execution.
|
1007
1036
|
|
1008
1037
|
Steps:
|
@@ -1013,7 +1042,7 @@ def docker_execution(
|
|
1013
1042
|
result: Result = Result.construct_with_rs_or_id(
|
1014
1043
|
run_id=run_id,
|
1015
1044
|
parent_run_id=parent_run_id,
|
1016
|
-
id_logic=(job.id or "
|
1045
|
+
id_logic=(job.id or "EMPTY"),
|
1017
1046
|
extras=job.extras,
|
1018
1047
|
)
|
1019
1048
|
if event and event.is_set():
|
ddeutil/workflow/logs.py
CHANGED
@@ -29,7 +29,7 @@ from typing_extensions import Self
|
|
29
29
|
|
30
30
|
from .__types import DictData, DictStr
|
31
31
|
from .conf import config, dynamic
|
32
|
-
from .utils import cut_id, get_dt_now
|
32
|
+
from .utils import cut_id, get_dt_now, prepare_newline
|
33
33
|
|
34
34
|
|
35
35
|
@lru_cache
|
@@ -71,7 +71,9 @@ def get_dt_tznow() -> datetime: # pragma: no cov
|
|
71
71
|
|
72
72
|
|
73
73
|
class TraceMeta(BaseModel): # pragma: no cov
|
74
|
-
"""Trace
|
74
|
+
"""Trace Metadata model for making the current metadata of this CPU, Memory
|
75
|
+
process, and thread data.
|
76
|
+
"""
|
75
77
|
|
76
78
|
mode: Literal["stdout", "stderr"]
|
77
79
|
datetime: str
|
@@ -91,6 +93,11 @@ class TraceMeta(BaseModel): # pragma: no cov
|
|
91
93
|
) -> Self:
|
92
94
|
"""Make the current TraceMeta instance that catching local state.
|
93
95
|
|
96
|
+
:param mode: A metadata mode.
|
97
|
+
:param message: A message.
|
98
|
+
:param extras: (DictData) An extra parameter that want to override core
|
99
|
+
config values.
|
100
|
+
|
94
101
|
:rtype: Self
|
95
102
|
"""
|
96
103
|
frame_info: Traceback = getframeinfo(
|
@@ -232,7 +239,7 @@ class BaseTrace(ABC): # pragma: no cov
|
|
232
239
|
|
233
240
|
:param message: (str) A message that want to log.
|
234
241
|
"""
|
235
|
-
msg: str = self.make_message(message)
|
242
|
+
msg: str = prepare_newline(self.make_message(message))
|
236
243
|
|
237
244
|
if mode != "debug" or (
|
238
245
|
mode == "debug" and dynamic("debug", extras=self.extras)
|
@@ -445,6 +452,7 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
445
452
|
async def awriter(
|
446
453
|
self, message: str, is_err: bool = False
|
447
454
|
) -> None: # pragma: no cov
|
455
|
+
"""Write with async mode."""
|
448
456
|
if not dynamic("enable_write_log", extras=self.extras):
|
449
457
|
return
|
450
458
|
|
@@ -744,7 +752,7 @@ class FileAudit(BaseAudit):
|
|
744
752
|
|
745
753
|
# NOTE: Check environ variable was set for real writing.
|
746
754
|
if not dynamic("enable_write_audit", extras=self.extras):
|
747
|
-
trace.debug("[
|
755
|
+
trace.debug("[AUDIT]: Skip writing log cause config was set")
|
748
756
|
return self
|
749
757
|
|
750
758
|
log_file: Path = (
|
@@ -813,7 +821,7 @@ class SQLiteAudit(BaseAudit): # pragma: no cov
|
|
813
821
|
|
814
822
|
# NOTE: Check environ variable was set for real writing.
|
815
823
|
if not dynamic("enable_write_audit", extras=self.extras):
|
816
|
-
trace.debug("[
|
824
|
+
trace.debug("[AUDIT]: Skip writing log cause config was set")
|
817
825
|
return self
|
818
826
|
|
819
827
|
raise NotImplementedError("SQLiteAudit does not implement yet.")
|
ddeutil/workflow/result.py
CHANGED
@@ -37,6 +37,14 @@ class Status(IntEnum):
|
|
37
37
|
SKIP: int = 3
|
38
38
|
CANCEL: int = 4
|
39
39
|
|
40
|
+
@property
|
41
|
+
def emoji(self) -> str:
|
42
|
+
"""Return the emoji value of this status.
|
43
|
+
|
44
|
+
:rtype: str
|
45
|
+
"""
|
46
|
+
return {0: "✅", 1: "❌", 2: "🟡", 3: "⏩", 4: "🚫"}[self.value]
|
47
|
+
|
40
48
|
|
41
49
|
SUCCESS = Status.SUCCESS
|
42
50
|
FAILED = Status.FAILED
|
@@ -46,10 +54,7 @@ CANCEL = Status.CANCEL
|
|
46
54
|
|
47
55
|
|
48
56
|
@dataclass(
|
49
|
-
config=ConfigDict(
|
50
|
-
arbitrary_types_allowed=True,
|
51
|
-
use_enum_values=True,
|
52
|
-
),
|
57
|
+
config=ConfigDict(arbitrary_types_allowed=True, use_enum_values=True),
|
53
58
|
)
|
54
59
|
class Result:
|
55
60
|
"""Result Pydantic Model for passing and receiving data context from any
|