ddeutil-workflow 0.0.82__py3-none-any.whl → 0.0.84__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__cron.py +1 -1
- ddeutil/workflow/__init__.py +3 -2
- ddeutil/workflow/__types.py +10 -1
- ddeutil/workflow/audits.py +64 -41
- ddeutil/workflow/errors.py +3 -0
- ddeutil/workflow/event.py +34 -11
- ddeutil/workflow/job.py +5 -15
- ddeutil/workflow/result.py +41 -12
- ddeutil/workflow/stages.py +825 -333
- ddeutil/workflow/traces.py +9 -5
- ddeutil/workflow/utils.py +45 -20
- ddeutil/workflow/workflow.py +71 -80
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.84.dist-info}/METADATA +1 -1
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.84.dist-info}/RECORD +19 -19
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.84.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.84.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.84.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.84.dist-info}/top_level.txt +0 -0
ddeutil/workflow/stages.py
CHANGED
@@ -71,7 +71,9 @@ from typing import (
|
|
71
71
|
Annotated,
|
72
72
|
Any,
|
73
73
|
Callable,
|
74
|
+
ClassVar,
|
74
75
|
Optional,
|
76
|
+
TypedDict,
|
75
77
|
TypeVar,
|
76
78
|
Union,
|
77
79
|
get_type_hints,
|
@@ -80,10 +82,10 @@ from typing import (
|
|
80
82
|
from ddeutil.core import str2list
|
81
83
|
from pydantic import BaseModel, Field, ValidationError
|
82
84
|
from pydantic.functional_validators import field_validator, model_validator
|
83
|
-
from typing_extensions import Self
|
85
|
+
from typing_extensions import NotRequired, Self
|
84
86
|
|
85
87
|
from .__about__ import __python_version__
|
86
|
-
from .__types import DictData, DictStr, StrOrInt, StrOrNone, TupleStr
|
88
|
+
from .__types import DictData, DictStr, StrOrInt, StrOrNone, TupleStr, cast_dict
|
87
89
|
from .conf import dynamic, pass_env
|
88
90
|
from .errors import (
|
89
91
|
StageCancelError,
|
@@ -117,6 +119,7 @@ from .traces import Trace, get_trace
|
|
117
119
|
from .utils import (
|
118
120
|
delay,
|
119
121
|
dump_all,
|
122
|
+
extract_id,
|
120
123
|
filter_func,
|
121
124
|
gen_id,
|
122
125
|
make_exec,
|
@@ -162,6 +165,7 @@ class BaseStage(BaseModel, ABC):
|
|
162
165
|
```
|
163
166
|
"""
|
164
167
|
|
168
|
+
action_stage: ClassVar[bool] = False
|
165
169
|
extras: DictData = Field(
|
166
170
|
default_factory=dict,
|
167
171
|
description="An extra parameter that override core config values.",
|
@@ -202,13 +206,13 @@ class BaseStage(BaseModel, ABC):
|
|
202
206
|
return self.id or self.name
|
203
207
|
|
204
208
|
@field_validator("desc", mode="after")
|
205
|
-
def ___prepare_desc__(cls, value: str) -> str:
|
209
|
+
def ___prepare_desc__(cls, value: Optional[str]) -> Optional[str]:
|
206
210
|
"""Prepare description string that was created on a template.
|
207
211
|
|
208
212
|
Returns:
|
209
213
|
str: A dedent and left strip newline of description string.
|
210
214
|
"""
|
211
|
-
return dedent(value.lstrip("\n"))
|
215
|
+
return value if value is None else dedent(value.lstrip("\n"))
|
212
216
|
|
213
217
|
@model_validator(mode="after")
|
214
218
|
def __prepare_running_id(self) -> Self:
|
@@ -230,6 +234,19 @@ class BaseStage(BaseModel, ABC):
|
|
230
234
|
)
|
231
235
|
return self
|
232
236
|
|
237
|
+
def pass_template(self, value: Any, params: DictData) -> Any:
|
238
|
+
"""Pass template and environment variable to any value that can
|
239
|
+
templating.
|
240
|
+
|
241
|
+
Args:
|
242
|
+
value (Any): An any value.
|
243
|
+
params (DictData):
|
244
|
+
|
245
|
+
Returns:
|
246
|
+
Any: A templated value.
|
247
|
+
"""
|
248
|
+
return pass_env(param2template(value, params, extras=self.extras))
|
249
|
+
|
233
250
|
@abstractmethod
|
234
251
|
def process(
|
235
252
|
self,
|
@@ -244,10 +261,10 @@ class BaseStage(BaseModel, ABC):
|
|
244
261
|
This is important method that make this class is able to be the stage.
|
245
262
|
|
246
263
|
Args:
|
247
|
-
params: A parameter data that want to use in this
|
264
|
+
params (DictData): A parameter data that want to use in this
|
248
265
|
execution.
|
249
|
-
run_id: A running stage ID.
|
250
|
-
context: A context data.
|
266
|
+
run_id (str): A running stage ID.
|
267
|
+
context (DictData): A context data.
|
251
268
|
parent_run_id: A parent running ID. (Default is None)
|
252
269
|
event: An event manager that use to track parent process
|
253
270
|
was not force stopped.
|
@@ -300,8 +317,9 @@ class BaseStage(BaseModel, ABC):
|
|
300
317
|
Result: The execution result with updated status and context.
|
301
318
|
"""
|
302
319
|
ts: float = time.monotonic()
|
303
|
-
parent_run_id
|
304
|
-
|
320
|
+
parent_run_id, run_id = extract_id(
|
321
|
+
self.iden, run_id=run_id, extras=self.extras
|
322
|
+
)
|
305
323
|
context: DictData = {"status": WAIT}
|
306
324
|
trace: Trace = get_trace(
|
307
325
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
@@ -353,15 +371,12 @@ class BaseStage(BaseModel, ABC):
|
|
353
371
|
StageError,
|
354
372
|
) as e: # pragma: no cov
|
355
373
|
if isinstance(e, StageNestedError):
|
356
|
-
trace.info(f"[STAGE]:
|
374
|
+
trace.info(f"[STAGE]: Nested: {e}")
|
375
|
+
elif isinstance(e, (StageSkipError, StageNestedSkipError)):
|
376
|
+
trace.info(f"[STAGE]: ⏭️ Skip: {e}")
|
357
377
|
else:
|
358
|
-
emoji: str = (
|
359
|
-
"⏭️"
|
360
|
-
if isinstance(e, (StageSkipError, StageNestedSkipError))
|
361
|
-
else "🚨"
|
362
|
-
)
|
363
378
|
trace.info(
|
364
|
-
f"[STAGE]:
|
379
|
+
f"[STAGE]: Stage Failed:||🚨 {traceback.format_exc()}||"
|
365
380
|
)
|
366
381
|
st: Status = get_status_from_error(e)
|
367
382
|
return Result(
|
@@ -381,7 +396,9 @@ class BaseStage(BaseModel, ABC):
|
|
381
396
|
extras=self.extras,
|
382
397
|
)
|
383
398
|
except Exception as e:
|
384
|
-
trace.error(
|
399
|
+
trace.error(
|
400
|
+
f"[STAGE]: Error Failed:||🚨 {traceback.format_exc()}||"
|
401
|
+
)
|
385
402
|
return Result(
|
386
403
|
run_id=run_id,
|
387
404
|
parent_run_id=parent_run_id,
|
@@ -492,10 +509,12 @@ class BaseStage(BaseModel, ABC):
|
|
492
509
|
"""Get the outputs from stages data. It will get this stage ID from
|
493
510
|
the stage outputs mapping.
|
494
511
|
|
495
|
-
:
|
496
|
-
stage
|
512
|
+
Args:
|
513
|
+
output (DictData): A stage output context that want to get this
|
514
|
+
stage ID `outputs` key.
|
497
515
|
|
498
|
-
:
|
516
|
+
Returns:
|
517
|
+
DictData: An output value that have get with its identity.
|
499
518
|
"""
|
500
519
|
if self.id is None and not dynamic(
|
501
520
|
"stage_default_id", extras=self.extras
|
@@ -533,7 +552,7 @@ class BaseStage(BaseModel, ABC):
|
|
533
552
|
# should use the `re` module to validate eval-string before
|
534
553
|
# running.
|
535
554
|
rs: bool = eval(
|
536
|
-
|
555
|
+
self.pass_template(self.condition, params),
|
537
556
|
globals() | params,
|
538
557
|
{},
|
539
558
|
)
|
@@ -564,16 +583,136 @@ class BaseStage(BaseModel, ABC):
|
|
564
583
|
def is_nested(self) -> bool:
|
565
584
|
"""Return true if this stage is nested stage.
|
566
585
|
|
567
|
-
:
|
586
|
+
Returns:
|
587
|
+
bool: True if this stage is nested stage.
|
568
588
|
"""
|
569
589
|
return False
|
570
590
|
|
571
|
-
def
|
591
|
+
def detail(self) -> DictData: # pragma: no cov
|
592
|
+
"""Return the detail of this stage for generate markdown.
|
593
|
+
|
594
|
+
Returns:
|
595
|
+
DictData: A dict that was dumped from this model with alias mode.
|
596
|
+
"""
|
597
|
+
return self.model_dump(
|
598
|
+
by_alias=True,
|
599
|
+
exclude_defaults=True,
|
600
|
+
exclude={"extras", "id", "name", "desc"},
|
601
|
+
)
|
602
|
+
|
603
|
+
def md(self, level: int = 1) -> str: # pragma: no cov
|
572
604
|
"""Return generated document that will be the interface of this stage.
|
573
605
|
|
574
|
-
:
|
606
|
+
Args:
|
607
|
+
level (int, default 0): A header level that want to generate
|
608
|
+
markdown content.
|
609
|
+
|
610
|
+
Returns:
|
611
|
+
str
|
612
|
+
"""
|
613
|
+
assert level >= 1, "Header level should gather than 0"
|
614
|
+
|
615
|
+
def align_newline(value: Optional[str]) -> str:
|
616
|
+
space: str = " " * 16
|
617
|
+
if value is None:
|
618
|
+
return ""
|
619
|
+
return value.rstrip("\n").replace("\n", f"\n{space}")
|
620
|
+
|
621
|
+
header: str = "#" * level
|
622
|
+
return dedent(
|
623
|
+
f"""
|
624
|
+
{header} Stage: {self.iden}\n
|
625
|
+
{align_newline(self.desc)}\n
|
626
|
+
#{header} Parameters\n
|
627
|
+
| name | type | default | description |
|
628
|
+
| --- | --- | --- | : --- : |\n\n
|
629
|
+
#{header} Details\n
|
630
|
+
```json
|
631
|
+
{self.detail()}
|
632
|
+
```
|
633
|
+
""".lstrip(
|
634
|
+
"\n"
|
635
|
+
)
|
636
|
+
)
|
637
|
+
|
638
|
+
def dryrun(
|
639
|
+
self,
|
640
|
+
params: DictData,
|
641
|
+
run_id: str,
|
642
|
+
context: DictData,
|
643
|
+
*,
|
644
|
+
parent_run_id: Optional[str] = None,
|
645
|
+
event: Optional[Event] = None,
|
646
|
+
) -> Optional[Result]:
|
647
|
+
"""Pre-process method that will use to run with dry-run mode, and it
|
648
|
+
should be used replace of process method when workflow release set with
|
649
|
+
DRYRUN mode.
|
650
|
+
|
651
|
+
By default, this method will set logic to convert this stage model
|
652
|
+
to am EmptyStage if it is action stage before use process method
|
653
|
+
instead process itself.
|
654
|
+
|
655
|
+
Args:
|
656
|
+
params (DictData): A parameter data that want to use in this
|
657
|
+
execution.
|
658
|
+
run_id (str): A running stage ID.
|
659
|
+
context (DictData): A context data.
|
660
|
+
parent_run_id (str, default None): A parent running ID.
|
661
|
+
event (Event, default None): An event manager that use to track
|
662
|
+
parent process was not force stopped.
|
663
|
+
|
664
|
+
Returns:
|
665
|
+
Result: The execution result with status and context data.
|
666
|
+
"""
|
667
|
+
trace: Trace = get_trace(
|
668
|
+
run_id, parent_run_id=parent_run_id, extras=self.extras
|
669
|
+
)
|
670
|
+
trace.debug("[STAGE]: Start Dryrun ...")
|
671
|
+
if self.action_stage:
|
672
|
+
return self.to_empty().process(
|
673
|
+
params,
|
674
|
+
run_id,
|
675
|
+
context,
|
676
|
+
parent_run_id=parent_run_id,
|
677
|
+
event=event,
|
678
|
+
)
|
679
|
+
return self.process(
|
680
|
+
params, run_id, context, parent_run_id=parent_run_id, event=event
|
681
|
+
)
|
682
|
+
|
683
|
+
def to_empty(
|
684
|
+
self,
|
685
|
+
sleep: int = 0.35,
|
686
|
+
*,
|
687
|
+
message: Optional[str] = None,
|
688
|
+
) -> EmptyStage:
|
689
|
+
"""Convert the current Stage model to the EmptyStage model for dry-run
|
690
|
+
mode if the `action_stage` class attribute has set.
|
691
|
+
|
692
|
+
Args:
|
693
|
+
sleep (int, default 0.35): An adjustment sleep time.
|
694
|
+
message (str, default None): A message that want to override default
|
695
|
+
message on EmptyStage model.
|
696
|
+
|
697
|
+
Returns:
|
698
|
+
EmptyStage: An EmptyStage model that passing itself model data to
|
699
|
+
message.
|
575
700
|
"""
|
576
|
-
|
701
|
+
if isinstance(self, EmptyStage):
|
702
|
+
return self.model_copy(update={"sleep": sleep})
|
703
|
+
return EmptyStage.model_validate(
|
704
|
+
{
|
705
|
+
"name": self.name,
|
706
|
+
"id": self.id,
|
707
|
+
"desc": self.desc,
|
708
|
+
"if": self.condition,
|
709
|
+
"echo": (
|
710
|
+
message
|
711
|
+
or f"Convert from {self.__class__.__name__} to EmptyStage"
|
712
|
+
),
|
713
|
+
"sleep": sleep,
|
714
|
+
}
|
715
|
+
)
|
577
716
|
|
578
717
|
|
579
718
|
class BaseAsyncStage(BaseStage, ABC):
|
@@ -686,15 +825,12 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
686
825
|
StageError,
|
687
826
|
) as e: # pragma: no cov
|
688
827
|
if isinstance(e, StageNestedError):
|
689
|
-
await trace.ainfo(f"[STAGE]:
|
828
|
+
await trace.ainfo(f"[STAGE]: Nested: {e}")
|
829
|
+
elif isinstance(e, (StageSkipError, StageNestedSkipError)):
|
830
|
+
await trace.ainfo(f"[STAGE]: ⏭️ Skip: {e}")
|
690
831
|
else:
|
691
|
-
emoji: str = (
|
692
|
-
"⏭️"
|
693
|
-
if isinstance(e, (StageSkipError, StageNestedSkipError))
|
694
|
-
else "🚨"
|
695
|
-
)
|
696
832
|
await trace.ainfo(
|
697
|
-
f"[STAGE]:
|
833
|
+
f"[STAGE]: Stage Failed:||🚨 {traceback.format_exc()}||"
|
698
834
|
)
|
699
835
|
st: Status = get_status_from_error(e)
|
700
836
|
return Result(
|
@@ -706,8 +842,8 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
706
842
|
status=st,
|
707
843
|
updated=(
|
708
844
|
None
|
709
|
-
if isinstance(e, StageSkipError)
|
710
|
-
else {"
|
845
|
+
if isinstance(e, (StageSkipError, StageNestedSkipError))
|
846
|
+
else {"errors": e.to_dict()}
|
711
847
|
),
|
712
848
|
),
|
713
849
|
info={"execution_time": time.monotonic() - ts},
|
@@ -715,7 +851,7 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
715
851
|
)
|
716
852
|
except Exception as e:
|
717
853
|
await trace.aerror(
|
718
|
-
f"[STAGE]: Error
|
854
|
+
f"[STAGE]: Error Failed:||🚨 {traceback.format_exc()}||"
|
719
855
|
)
|
720
856
|
return Result(
|
721
857
|
run_id=run_id,
|
@@ -738,12 +874,14 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
738
874
|
) -> Result:
|
739
875
|
"""Wrapped the axecute method before returning to handler axecute.
|
740
876
|
|
741
|
-
:
|
742
|
-
|
743
|
-
|
744
|
-
|
877
|
+
Args:
|
878
|
+
params: (DictData) A parameter data that want to use in this
|
879
|
+
execution.
|
880
|
+
event: (Event) An event manager that use to track parent execute
|
881
|
+
was not force stopped.
|
745
882
|
|
746
|
-
:
|
883
|
+
Returns:
|
884
|
+
Result: A Result object.
|
747
885
|
"""
|
748
886
|
catch(context, status=WAIT)
|
749
887
|
return await self.async_process(
|
@@ -764,7 +902,10 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
764
902
|
default=0,
|
765
903
|
ge=0,
|
766
904
|
lt=20,
|
767
|
-
description=
|
905
|
+
description=(
|
906
|
+
"A retry number if stage process got the error exclude skip and "
|
907
|
+
"cancel exception class."
|
908
|
+
),
|
768
909
|
)
|
769
910
|
|
770
911
|
def _execute(
|
@@ -778,12 +919,14 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
778
919
|
"""Wrapped the execute method with retry strategy before returning to
|
779
920
|
handler execute.
|
780
921
|
|
781
|
-
:
|
782
|
-
|
783
|
-
|
784
|
-
|
922
|
+
Args:
|
923
|
+
params: (DictData) A parameter data that want to use in this
|
924
|
+
execution.
|
925
|
+
event: (Event) An event manager that use to track parent execute
|
926
|
+
was not force stopped.
|
785
927
|
|
786
|
-
:
|
928
|
+
Returns:
|
929
|
+
Result: A Result object.
|
787
930
|
"""
|
788
931
|
current_retry: int = 0
|
789
932
|
exception: Exception
|
@@ -791,9 +934,19 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
791
934
|
trace: Trace = get_trace(
|
792
935
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
793
936
|
)
|
794
|
-
|
795
937
|
# NOTE: First execution for not pass to retry step if it passes.
|
796
938
|
try:
|
939
|
+
if (
|
940
|
+
self.extras.get("__sys_release_dryrun_mode", False)
|
941
|
+
and self.action_stage
|
942
|
+
):
|
943
|
+
return self.dryrun(
|
944
|
+
params | {"retry": current_retry},
|
945
|
+
run_id=run_id,
|
946
|
+
context=context,
|
947
|
+
parent_run_id=parent_run_id,
|
948
|
+
event=event,
|
949
|
+
)
|
797
950
|
return self.process(
|
798
951
|
params | {"retry": current_retry},
|
799
952
|
run_id=run_id,
|
@@ -820,6 +973,17 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
820
973
|
status=WAIT,
|
821
974
|
updated={"retry": current_retry},
|
822
975
|
)
|
976
|
+
if (
|
977
|
+
self.extras.get("__sys_release_dryrun_mode", False)
|
978
|
+
and self.action_stage
|
979
|
+
):
|
980
|
+
return self.dryrun(
|
981
|
+
params | {"retry": current_retry},
|
982
|
+
run_id=run_id,
|
983
|
+
context=context,
|
984
|
+
parent_run_id=parent_run_id,
|
985
|
+
event=event,
|
986
|
+
)
|
823
987
|
return self.process(
|
824
988
|
params | {"retry": current_retry},
|
825
989
|
run_id=run_id,
|
@@ -827,6 +991,14 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
827
991
|
parent_run_id=parent_run_id,
|
828
992
|
event=event,
|
829
993
|
)
|
994
|
+
except (
|
995
|
+
StageNestedSkipError,
|
996
|
+
StageNestedCancelError,
|
997
|
+
StageSkipError,
|
998
|
+
StageCancelError,
|
999
|
+
):
|
1000
|
+
trace.debug("[STAGE]: process raise skip or cancel error.")
|
1001
|
+
raise
|
830
1002
|
except Exception as e:
|
831
1003
|
current_retry += 1
|
832
1004
|
trace.warning(
|
@@ -852,12 +1024,14 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
852
1024
|
"""Wrapped the axecute method with retry strategy before returning to
|
853
1025
|
handler axecute.
|
854
1026
|
|
855
|
-
:
|
856
|
-
|
857
|
-
|
858
|
-
|
1027
|
+
Args:
|
1028
|
+
params: (DictData) A parameter data that want to use in this
|
1029
|
+
execution.
|
1030
|
+
event: (Event) An event manager that use to track parent execute
|
1031
|
+
was not force stopped.
|
859
1032
|
|
860
|
-
:
|
1033
|
+
Returns:
|
1034
|
+
Result: A Result object.
|
861
1035
|
"""
|
862
1036
|
current_retry: int = 0
|
863
1037
|
exception: Exception
|
@@ -868,6 +1042,17 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
868
1042
|
|
869
1043
|
# NOTE: First execution for not pass to retry step if it passes.
|
870
1044
|
try:
|
1045
|
+
if (
|
1046
|
+
self.extras.get("__sys_release_dryrun_mode", False)
|
1047
|
+
and self.action_stage
|
1048
|
+
):
|
1049
|
+
return self.dryrun(
|
1050
|
+
params | {"retry": current_retry},
|
1051
|
+
run_id=run_id,
|
1052
|
+
context=context,
|
1053
|
+
parent_run_id=parent_run_id,
|
1054
|
+
event=event,
|
1055
|
+
)
|
871
1056
|
return await self.async_process(
|
872
1057
|
params | {"retry": current_retry},
|
873
1058
|
run_id=run_id,
|
@@ -894,6 +1079,17 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
894
1079
|
status=WAIT,
|
895
1080
|
updated={"retry": current_retry},
|
896
1081
|
)
|
1082
|
+
if (
|
1083
|
+
self.extras.get("__sys_release_dryrun_mode", False)
|
1084
|
+
and self.action_stage
|
1085
|
+
):
|
1086
|
+
return self.dryrun(
|
1087
|
+
params | {"retry": current_retry},
|
1088
|
+
run_id=run_id,
|
1089
|
+
context=context,
|
1090
|
+
parent_run_id=parent_run_id,
|
1091
|
+
event=event,
|
1092
|
+
)
|
897
1093
|
return await self.async_process(
|
898
1094
|
params | {"retry": current_retry},
|
899
1095
|
run_id=run_id,
|
@@ -901,6 +1097,16 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
901
1097
|
parent_run_id=parent_run_id,
|
902
1098
|
event=event,
|
903
1099
|
)
|
1100
|
+
except (
|
1101
|
+
StageNestedSkipError,
|
1102
|
+
StageNestedCancelError,
|
1103
|
+
StageSkipError,
|
1104
|
+
StageCancelError,
|
1105
|
+
):
|
1106
|
+
await trace.adebug(
|
1107
|
+
"[STAGE]: process raise skip or cancel error."
|
1108
|
+
)
|
1109
|
+
raise
|
904
1110
|
except Exception as e:
|
905
1111
|
current_retry += 1
|
906
1112
|
await trace.awarning(
|
@@ -1004,9 +1210,7 @@ class EmptyStage(BaseAsyncStage):
|
|
1004
1210
|
)
|
1005
1211
|
|
1006
1212
|
if event and event.is_set():
|
1007
|
-
raise StageCancelError(
|
1008
|
-
"Execution was canceled from the event before start parallel."
|
1009
|
-
)
|
1213
|
+
raise StageCancelError("Cancel before start empty process.")
|
1010
1214
|
|
1011
1215
|
trace.info(f"[STAGE]: Message: ( {message} )")
|
1012
1216
|
if self.sleep > 0:
|
@@ -1057,9 +1261,7 @@ class EmptyStage(BaseAsyncStage):
|
|
1057
1261
|
)
|
1058
1262
|
|
1059
1263
|
if event and event.is_set():
|
1060
|
-
raise StageCancelError(
|
1061
|
-
"Execution was canceled from the event before start parallel."
|
1062
|
-
)
|
1264
|
+
raise StageCancelError("Cancel before start empty process.")
|
1063
1265
|
|
1064
1266
|
trace.info(f"[STAGE]: Message: ( {message} )")
|
1065
1267
|
if self.sleep > 0:
|
@@ -1095,6 +1297,7 @@ class BashStage(BaseRetryStage):
|
|
1095
1297
|
... }
|
1096
1298
|
"""
|
1097
1299
|
|
1300
|
+
action_stage: ClassVar[bool] = True
|
1098
1301
|
bash: str = Field(
|
1099
1302
|
description=(
|
1100
1303
|
"A bash statement that want to execute via Python subprocess."
|
@@ -1339,6 +1542,7 @@ class PyStage(BaseRetryStage):
|
|
1339
1542
|
... }
|
1340
1543
|
"""
|
1341
1544
|
|
1545
|
+
action_stage: ClassVar[bool] = True
|
1342
1546
|
run: str = Field(
|
1343
1547
|
description="A Python string statement that want to run with `exec`.",
|
1344
1548
|
)
|
@@ -1577,6 +1781,7 @@ class CallStage(BaseRetryStage):
|
|
1577
1781
|
... }
|
1578
1782
|
"""
|
1579
1783
|
|
1784
|
+
action_stage: ClassVar[bool] = True
|
1580
1785
|
uses: str = Field(
|
1581
1786
|
description=(
|
1582
1787
|
"A caller function with registry importer syntax that use to load "
|
@@ -1658,7 +1863,7 @@ class CallStage(BaseRetryStage):
|
|
1658
1863
|
extras=self.extras,
|
1659
1864
|
),
|
1660
1865
|
"extras": self.extras,
|
1661
|
-
} |
|
1866
|
+
} | self.pass_template(self.args, params)
|
1662
1867
|
sig = inspect.signature(call_func)
|
1663
1868
|
necessary_params: list[str] = []
|
1664
1869
|
has_keyword: bool = False
|
@@ -1673,6 +1878,7 @@ class CallStage(BaseRetryStage):
|
|
1673
1878
|
elif v.kind == Parameter.VAR_KEYWORD:
|
1674
1879
|
has_keyword = True
|
1675
1880
|
|
1881
|
+
# NOTE: Validate private parameter should exist in the args field.
|
1676
1882
|
if any(
|
1677
1883
|
(k.removeprefix("_") not in args and k not in args)
|
1678
1884
|
for k in necessary_params
|
@@ -1690,16 +1896,15 @@ class CallStage(BaseRetryStage):
|
|
1690
1896
|
f"does not set to args. It already set {list(args.keys())}."
|
1691
1897
|
)
|
1692
1898
|
|
1693
|
-
if
|
1694
|
-
|
1899
|
+
if not has_keyword:
|
1900
|
+
if "result" not in sig.parameters:
|
1901
|
+
args.pop("result")
|
1695
1902
|
|
1696
|
-
|
1697
|
-
|
1903
|
+
if "extras" not in sig.parameters: # pragma: no cov
|
1904
|
+
args.pop("extras")
|
1698
1905
|
|
1699
1906
|
if event and event.is_set():
|
1700
|
-
raise StageCancelError(
|
1701
|
-
"Execution was canceled from the event before start parallel."
|
1702
|
-
)
|
1907
|
+
raise StageCancelError("Cancel before start call process.")
|
1703
1908
|
|
1704
1909
|
args: DictData = self.validate_model_args(
|
1705
1910
|
call_func, args, run_id, parent_run_id, extras=self.extras
|
@@ -1779,7 +1984,7 @@ class CallStage(BaseRetryStage):
|
|
1779
1984
|
extras=self.extras,
|
1780
1985
|
),
|
1781
1986
|
"extras": self.extras,
|
1782
|
-
} |
|
1987
|
+
} | self.pass_template(self.args, params)
|
1783
1988
|
sig = inspect.signature(call_func)
|
1784
1989
|
necessary_params: list[str] = []
|
1785
1990
|
has_keyword: bool = False
|
@@ -1810,16 +2015,16 @@ class CallStage(BaseRetryStage):
|
|
1810
2015
|
f"Necessary params, ({', '.join(necessary_params)}, ), "
|
1811
2016
|
f"does not set to args. It already set {list(args.keys())}."
|
1812
2017
|
)
|
1813
|
-
if "result" not in sig.parameters and not has_keyword:
|
1814
|
-
args.pop("result")
|
1815
2018
|
|
1816
|
-
if
|
1817
|
-
|
2019
|
+
if not has_keyword:
|
2020
|
+
if "result" not in sig.parameters:
|
2021
|
+
args.pop("result")
|
2022
|
+
|
2023
|
+
if "extras" not in sig.parameters: # pragma: no cov
|
2024
|
+
args.pop("extras")
|
1818
2025
|
|
1819
2026
|
if event and event.is_set():
|
1820
|
-
raise StageCancelError(
|
1821
|
-
"Execution was canceled from the event before start parallel."
|
1822
|
-
)
|
2027
|
+
raise StageCancelError("Cancel before start call process.")
|
1823
2028
|
|
1824
2029
|
args: DictData = self.validate_model_args(
|
1825
2030
|
call_func, args, run_id, parent_run_id, extras=self.extras
|
@@ -1866,11 +2071,14 @@ class CallStage(BaseRetryStage):
|
|
1866
2071
|
"""Validate an input arguments before passing to the caller function.
|
1867
2072
|
|
1868
2073
|
Args:
|
1869
|
-
func
|
1870
|
-
args
|
1871
|
-
run_id: A running
|
2074
|
+
func (TagFunc): A tag function object that want to get typing.
|
2075
|
+
args (DictData): An arguments before passing to this tag func.
|
2076
|
+
run_id (str): A running ID.
|
2077
|
+
parent_run_id (str, default None): A parent running ID.
|
2078
|
+
extras (DictData, default None): An extra parameters.
|
1872
2079
|
|
1873
|
-
:
|
2080
|
+
Returns:
|
2081
|
+
DictData: A prepared args paramter that validate with model args.
|
1874
2082
|
"""
|
1875
2083
|
try:
|
1876
2084
|
override: DictData = dict(
|
@@ -1903,8 +2111,87 @@ class CallStage(BaseRetryStage):
|
|
1903
2111
|
)
|
1904
2112
|
return args
|
1905
2113
|
|
2114
|
+
def dryrun(
|
2115
|
+
self,
|
2116
|
+
params: DictData,
|
2117
|
+
run_id: str,
|
2118
|
+
context: DictData,
|
2119
|
+
*,
|
2120
|
+
parent_run_id: Optional[str] = None,
|
2121
|
+
event: Optional[Event] = None,
|
2122
|
+
) -> Optional[Result]: # pragma: no cov
|
2123
|
+
"""Override the dryrun method for this CallStage.
|
2124
|
+
|
2125
|
+
Steps:
|
2126
|
+
- Pre-hook caller function that exist.
|
2127
|
+
- Show function parameters
|
2128
|
+
"""
|
2129
|
+
trace: Trace = get_trace(
|
2130
|
+
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2131
|
+
)
|
2132
|
+
call_func: TagFunc = self.get_caller(params=params)()
|
2133
|
+
trace.info(f"[STAGE]: Caller Func: '{call_func.name}@{call_func.tag}'")
|
2134
|
+
|
2135
|
+
args: DictData = {
|
2136
|
+
"result": Result(
|
2137
|
+
run_id=run_id,
|
2138
|
+
parent_run_id=parent_run_id,
|
2139
|
+
status=WAIT,
|
2140
|
+
context=context,
|
2141
|
+
extras=self.extras,
|
2142
|
+
),
|
2143
|
+
"extras": self.extras,
|
2144
|
+
} | self.pass_template(self.args, params)
|
2145
|
+
sig = inspect.signature(call_func)
|
2146
|
+
trace.debug(f"[STAGE]: {sig.parameters}")
|
2147
|
+
necessary_params: list[str] = []
|
2148
|
+
has_keyword: bool = False
|
2149
|
+
for k in sig.parameters:
|
2150
|
+
if (
|
2151
|
+
v := sig.parameters[k]
|
2152
|
+
).default == Parameter.empty and v.kind not in (
|
2153
|
+
Parameter.VAR_KEYWORD,
|
2154
|
+
Parameter.VAR_POSITIONAL,
|
2155
|
+
):
|
2156
|
+
necessary_params.append(k)
|
2157
|
+
elif v.kind == Parameter.VAR_KEYWORD:
|
2158
|
+
has_keyword = True
|
2159
|
+
|
2160
|
+
func_typed: dict[str, Any] = get_type_hints(call_func)
|
2161
|
+
map_type: str = "||".join(
|
2162
|
+
f"\t{p}: {func_typed[p]}"
|
2163
|
+
for p in necessary_params
|
2164
|
+
if p in func_typed
|
2165
|
+
)
|
2166
|
+
map_type_args: str = "||".join(f"\t{a}: {type(a)}" for a in args)
|
2167
|
+
if not has_keyword:
|
2168
|
+
if "result" not in sig.parameters:
|
2169
|
+
args.pop("result")
|
2170
|
+
|
2171
|
+
if "extras" not in sig.parameters:
|
2172
|
+
args.pop("extras")
|
2173
|
+
|
2174
|
+
trace.debug(
|
2175
|
+
f"[STAGE]: Details"
|
2176
|
+
f"||Necessary Params:"
|
2177
|
+
f"||{map_type}"
|
2178
|
+
f"||Return Type: {func_typed['return']}"
|
2179
|
+
f"||Argument Params:"
|
2180
|
+
f"||{map_type_args}"
|
2181
|
+
f"||"
|
2182
|
+
)
|
2183
|
+
if has_keyword:
|
2184
|
+
trace.debug("[STAGE]: This caller function support keyword param.")
|
2185
|
+
return Result(
|
2186
|
+
run_id=run_id,
|
2187
|
+
parent_run_id=parent_run_id,
|
2188
|
+
status=SUCCESS,
|
2189
|
+
context=catch(context=context, status=SUCCESS),
|
2190
|
+
extras=self.extras,
|
2191
|
+
)
|
2192
|
+
|
1906
2193
|
|
1907
|
-
class BaseNestedStage(
|
2194
|
+
class BaseNestedStage(BaseAsyncStage, ABC):
|
1908
2195
|
"""Base Nested Stage model. This model is use for checking the child stage
|
1909
2196
|
is the nested stage or not.
|
1910
2197
|
"""
|
@@ -1932,8 +2219,9 @@ class BaseNestedStage(BaseRetryStage, ABC):
|
|
1932
2219
|
"""Make the errors context result with the refs value depends on the nested
|
1933
2220
|
execute func.
|
1934
2221
|
|
1935
|
-
:
|
1936
|
-
|
2222
|
+
Args:
|
2223
|
+
context: (DictData) A context data.
|
2224
|
+
error: (StageError) A stage exception object.
|
1937
2225
|
"""
|
1938
2226
|
if "errors" in context:
|
1939
2227
|
context["errors"][error.refs] = error.to_dict()
|
@@ -1968,7 +2256,7 @@ class BaseNestedStage(BaseRetryStage, ABC):
|
|
1968
2256
|
)
|
1969
2257
|
|
1970
2258
|
|
1971
|
-
class TriggerStage(
|
2259
|
+
class TriggerStage(BaseRetryStage):
|
1972
2260
|
"""Trigger workflow executor stage that run an input trigger Workflow
|
1973
2261
|
execute method. This is the stage that allow you to create the reusable
|
1974
2262
|
Workflow template with dynamic parameters.
|
@@ -2026,18 +2314,9 @@ class TriggerStage(BaseNestedStage):
|
|
2026
2314
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2027
2315
|
)
|
2028
2316
|
_trigger: str = param2template(self.trigger, params, extras=self.extras)
|
2029
|
-
|
2030
|
-
|
2031
|
-
# "[NESTED]: Circle execution via trigger itself workflow name."
|
2032
|
-
# )
|
2317
|
+
if _trigger == self.extras.get("__sys_exec_break_circle", "NOTSET"):
|
2318
|
+
raise StageError("Circle execute via trigger itself workflow name.")
|
2033
2319
|
trace.info(f"[NESTED]: Load Workflow Config: {_trigger!r}")
|
2034
|
-
|
2035
|
-
# # NOTE: add noted key for cancel circle execution.
|
2036
|
-
# if "stop_circle_workflow_name" in self.extras:
|
2037
|
-
# self.extras["stop_circle_workflow_name"].append(_trigger)
|
2038
|
-
# else:
|
2039
|
-
# self.extras.update({"stop_circle_workflow_name": [_trigger]})
|
2040
|
-
|
2041
2320
|
result: Result = Workflow.from_conf(
|
2042
2321
|
name=pass_env(_trigger),
|
2043
2322
|
extras=self.extras,
|
@@ -2053,13 +2332,69 @@ class TriggerStage(BaseNestedStage):
|
|
2053
2332
|
if (msg := result.context.get("errors", {}).get("message"))
|
2054
2333
|
else "."
|
2055
2334
|
)
|
2056
|
-
|
2335
|
+
return result.catch(
|
2336
|
+
status=FAILED,
|
2337
|
+
context={
|
2338
|
+
"status": FAILED,
|
2339
|
+
"errors": StageError(
|
2340
|
+
f"Trigger workflow was failed{err_msg}"
|
2341
|
+
).to_dict(),
|
2342
|
+
},
|
2343
|
+
)
|
2057
2344
|
elif result.status == CANCEL:
|
2058
|
-
|
2345
|
+
return result.catch(
|
2346
|
+
status=CANCEL,
|
2347
|
+
context={
|
2348
|
+
"status": CANCEL,
|
2349
|
+
"errors": StageCancelError(
|
2350
|
+
"Trigger workflow was cancel."
|
2351
|
+
).to_dict(),
|
2352
|
+
},
|
2353
|
+
)
|
2059
2354
|
elif result.status == SKIP:
|
2060
|
-
|
2355
|
+
return result.catch(
|
2356
|
+
status=SKIP,
|
2357
|
+
context={
|
2358
|
+
"status": SKIP,
|
2359
|
+
"errors": StageSkipError(
|
2360
|
+
"Trigger workflow was skipped."
|
2361
|
+
).to_dict(),
|
2362
|
+
},
|
2363
|
+
)
|
2061
2364
|
return result
|
2062
2365
|
|
2366
|
+
async def async_process(
|
2367
|
+
self,
|
2368
|
+
params: DictData,
|
2369
|
+
run_id: str,
|
2370
|
+
context: DictData,
|
2371
|
+
*,
|
2372
|
+
parent_run_id: Optional[str] = None,
|
2373
|
+
event: Optional[Event] = None,
|
2374
|
+
) -> Result: # pragma: no cov
|
2375
|
+
"""Async process for nested-stage do not implement yet.
|
2376
|
+
|
2377
|
+
Args:
|
2378
|
+
params: A parameter data that want to use in this
|
2379
|
+
execution.
|
2380
|
+
run_id: A running stage ID.
|
2381
|
+
context: A context data.
|
2382
|
+
parent_run_id: A parent running ID. (Default is None)
|
2383
|
+
event: An event manager that use to track parent process
|
2384
|
+
was not force stopped.
|
2385
|
+
|
2386
|
+
Returns:
|
2387
|
+
Result: The execution result with status and context data.
|
2388
|
+
"""
|
2389
|
+
raise NotImplementedError(
|
2390
|
+
"The Trigger stage does not implement the `axecute` method yet."
|
2391
|
+
)
|
2392
|
+
|
2393
|
+
|
2394
|
+
class ParallelContext(TypedDict):
|
2395
|
+
branch: str
|
2396
|
+
stages: NotRequired[dict[str, Any]]
|
2397
|
+
|
2063
2398
|
|
2064
2399
|
class ParallelStage(BaseNestedStage):
|
2065
2400
|
"""Parallel stage executor that execute branch stages with multithreading.
|
@@ -2098,10 +2433,8 @@ class ParallelStage(BaseNestedStage):
|
|
2098
2433
|
parallel: dict[str, list[Stage]] = Field(
|
2099
2434
|
description="A mapping of branch name and its stages.",
|
2100
2435
|
)
|
2101
|
-
max_workers: int = Field(
|
2436
|
+
max_workers: Union[int, str] = Field(
|
2102
2437
|
default=2,
|
2103
|
-
ge=1,
|
2104
|
-
lt=20,
|
2105
2438
|
description=(
|
2106
2439
|
"The maximum multi-thread pool worker size for execution parallel. "
|
2107
2440
|
"This value should be gather or equal than 1, and less than 20."
|
@@ -2109,14 +2442,20 @@ class ParallelStage(BaseNestedStage):
|
|
2109
2442
|
alias="max-workers",
|
2110
2443
|
)
|
2111
2444
|
|
2112
|
-
|
2445
|
+
@field_validator("max_workers")
|
2446
|
+
def __validate_max_workers(cls, value: Union[int, str]) -> Union[int, str]:
|
2447
|
+
"""Validate `max_workers` field that should has value between 1 and 19."""
|
2448
|
+
if isinstance(value, int) and (value < 1 or value >= 20):
|
2449
|
+
raise ValueError("A max-workers value should between 1 and 19.")
|
2450
|
+
return value
|
2451
|
+
|
2452
|
+
def _process_nested(
|
2113
2453
|
self,
|
2114
2454
|
branch: str,
|
2115
2455
|
params: DictData,
|
2116
|
-
|
2456
|
+
trace: Trace,
|
2117
2457
|
context: DictData,
|
2118
2458
|
*,
|
2119
|
-
parent_run_id: Optional[str] = None,
|
2120
2459
|
event: Optional[Event] = None,
|
2121
2460
|
) -> tuple[Status, DictData]:
|
2122
2461
|
"""Execute branch that will execute all nested-stage that was set in
|
@@ -2125,15 +2464,14 @@ class ParallelStage(BaseNestedStage):
|
|
2125
2464
|
Args:
|
2126
2465
|
branch (str): A branch ID.
|
2127
2466
|
params (DictData): A parameter data.
|
2128
|
-
|
2467
|
+
trace (Trace): A Trace model.
|
2129
2468
|
context (DictData):
|
2130
|
-
parent_run_id (str | None, default None): A parent running ID.
|
2131
2469
|
event: (Event) An Event manager instance that use to cancel this
|
2132
2470
|
execution if it forces stopped by parent execution.
|
2133
2471
|
(Default is None)
|
2134
2472
|
|
2135
2473
|
Raises:
|
2136
|
-
StageCancelError: If event was set.
|
2474
|
+
StageCancelError: If event was set before start stage execution.
|
2137
2475
|
StageCancelError: If result from a nested-stage return canceled
|
2138
2476
|
status.
|
2139
2477
|
StageError: If result from a nested-stage return failed status.
|
@@ -2141,15 +2479,10 @@ class ParallelStage(BaseNestedStage):
|
|
2141
2479
|
Returns:
|
2142
2480
|
tuple[Status, DictData]: A pair of status and result context data.
|
2143
2481
|
"""
|
2144
|
-
trace:
|
2145
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2146
|
-
)
|
2147
|
-
trace.debug(f"[NESTED]: Execute Branch: {branch!r}")
|
2148
|
-
|
2149
|
-
# NOTE: Create nested-context
|
2482
|
+
trace.info(f"[NESTED]: Execute Branch: {branch!r}")
|
2150
2483
|
current_context: DictData = copy.deepcopy(params)
|
2151
2484
|
current_context.update({"branch": branch})
|
2152
|
-
nestet_context:
|
2485
|
+
nestet_context: ParallelContext = {"branch": branch, "stages": {}}
|
2153
2486
|
|
2154
2487
|
total_stage: int = len(self.parallel[branch])
|
2155
2488
|
skips: list[bool] = [False] * total_stage
|
@@ -2160,8 +2493,7 @@ class ParallelStage(BaseNestedStage):
|
|
2160
2493
|
|
2161
2494
|
if event and event.is_set():
|
2162
2495
|
error_msg: str = (
|
2163
|
-
"
|
2164
|
-
"start branch execution."
|
2496
|
+
f"Cancel branch: {branch!r} before start nested process."
|
2165
2497
|
)
|
2166
2498
|
catch(
|
2167
2499
|
context=context,
|
@@ -2181,12 +2513,12 @@ class ParallelStage(BaseNestedStage):
|
|
2181
2513
|
|
2182
2514
|
rs: Result = stage.execute(
|
2183
2515
|
params=current_context,
|
2184
|
-
run_id=parent_run_id,
|
2516
|
+
run_id=trace.parent_run_id,
|
2185
2517
|
event=event,
|
2186
2518
|
)
|
2187
|
-
stage.set_outputs(rs.context, to=nestet_context)
|
2519
|
+
stage.set_outputs(rs.context, to=cast_dict(nestet_context))
|
2188
2520
|
stage.set_outputs(
|
2189
|
-
stage.get_outputs(nestet_context), to=current_context
|
2521
|
+
stage.get_outputs(cast_dict(nestet_context)), to=current_context
|
2190
2522
|
)
|
2191
2523
|
|
2192
2524
|
if rs.status == SKIP:
|
@@ -2195,7 +2527,7 @@ class ParallelStage(BaseNestedStage):
|
|
2195
2527
|
|
2196
2528
|
elif rs.status == FAILED: # pragma: no cov
|
2197
2529
|
error_msg: str = (
|
2198
|
-
f"
|
2530
|
+
f"Break branch: {branch!r} because nested stage: "
|
2199
2531
|
f"{stage.iden!r}, failed."
|
2200
2532
|
)
|
2201
2533
|
catch(
|
@@ -2216,8 +2548,7 @@ class ParallelStage(BaseNestedStage):
|
|
2216
2548
|
|
2217
2549
|
elif rs.status == CANCEL:
|
2218
2550
|
error_msg: str = (
|
2219
|
-
"
|
2220
|
-
"end branch execution."
|
2551
|
+
f"Cancel branch: {branch!r} after end nested process."
|
2221
2552
|
)
|
2222
2553
|
catch(
|
2223
2554
|
context=context,
|
@@ -2257,7 +2588,9 @@ class ParallelStage(BaseNestedStage):
|
|
2257
2588
|
parent_run_id: Optional[str] = None,
|
2258
2589
|
event: Optional[Event] = None,
|
2259
2590
|
) -> Result:
|
2260
|
-
"""Execute parallel each branch via multi-threading pool.
|
2591
|
+
"""Execute parallel each branch via multi-threading pool. The parallel
|
2592
|
+
process will use all-completed strategy to handle result from each
|
2593
|
+
branch.
|
2261
2594
|
|
2262
2595
|
Args:
|
2263
2596
|
params: A parameter data that want to use in this
|
@@ -2268,6 +2601,9 @@ class ParallelStage(BaseNestedStage):
|
|
2268
2601
|
event: An event manager that use to track parent process
|
2269
2602
|
was not force stopped.
|
2270
2603
|
|
2604
|
+
Raises:
|
2605
|
+
StageCancelError: If event was set before start parallel process.
|
2606
|
+
|
2271
2607
|
Returns:
|
2272
2608
|
Result: The execution result with status and context data.
|
2273
2609
|
"""
|
@@ -2275,27 +2611,36 @@ class ParallelStage(BaseNestedStage):
|
|
2275
2611
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2276
2612
|
)
|
2277
2613
|
event: Event = event or Event()
|
2278
|
-
|
2614
|
+
|
2615
|
+
# NOTE: Start prepare max_workers field if it is string type.
|
2616
|
+
if isinstance(self.max_workers, str):
|
2617
|
+
max_workers: int = self.__validate_max_workers(
|
2618
|
+
pass_env(
|
2619
|
+
param2template(
|
2620
|
+
self.max_workers, params=params, extras=self.extras
|
2621
|
+
)
|
2622
|
+
)
|
2623
|
+
)
|
2624
|
+
else:
|
2625
|
+
max_workers: int = self.max_workers
|
2626
|
+
trace.info(f"[NESTED]: Parallel with {max_workers} workers.")
|
2279
2627
|
catch(
|
2280
2628
|
context=context,
|
2281
2629
|
status=WAIT,
|
2282
|
-
updated={"workers":
|
2630
|
+
updated={"workers": max_workers, "parallel": {}},
|
2283
2631
|
)
|
2284
2632
|
len_parallel: int = len(self.parallel)
|
2285
2633
|
if event and event.is_set():
|
2286
|
-
raise StageCancelError(
|
2287
|
-
"Execution was canceled from the event before start parallel."
|
2288
|
-
)
|
2634
|
+
raise StageCancelError("Cancel before start parallel process.")
|
2289
2635
|
|
2290
|
-
with ThreadPoolExecutor(
|
2636
|
+
with ThreadPoolExecutor(max_workers, "stp") as executor:
|
2291
2637
|
futures: list[Future] = [
|
2292
2638
|
executor.submit(
|
2293
|
-
self.
|
2639
|
+
self._process_nested,
|
2294
2640
|
branch=branch,
|
2295
2641
|
params=params,
|
2296
|
-
|
2642
|
+
trace=trace,
|
2297
2643
|
context=context,
|
2298
|
-
parent_run_id=parent_run_id,
|
2299
2644
|
event=event,
|
2300
2645
|
)
|
2301
2646
|
for branch in self.parallel
|
@@ -2310,15 +2655,21 @@ class ParallelStage(BaseNestedStage):
|
|
2310
2655
|
self.mark_errors(errors, e)
|
2311
2656
|
|
2312
2657
|
st: Status = validate_statuses(statuses)
|
2313
|
-
return Result(
|
2314
|
-
run_id=run_id,
|
2315
|
-
parent_run_id=parent_run_id,
|
2658
|
+
return Result.from_trace(trace).catch(
|
2316
2659
|
status=st,
|
2317
2660
|
context=catch(context, status=st, updated=errors),
|
2318
|
-
extras=self.extras,
|
2319
2661
|
)
|
2320
2662
|
|
2321
2663
|
|
2664
|
+
EachType = Union[
|
2665
|
+
list[str],
|
2666
|
+
list[int],
|
2667
|
+
str,
|
2668
|
+
dict[str, Any],
|
2669
|
+
dict[int, Any],
|
2670
|
+
]
|
2671
|
+
|
2672
|
+
|
2322
2673
|
class ForEachStage(BaseNestedStage):
|
2323
2674
|
"""For-Each stage executor that execute all stages with each item in the
|
2324
2675
|
foreach list.
|
@@ -2339,13 +2690,7 @@ class ForEachStage(BaseNestedStage):
|
|
2339
2690
|
... }
|
2340
2691
|
"""
|
2341
2692
|
|
2342
|
-
foreach:
|
2343
|
-
list[str],
|
2344
|
-
list[int],
|
2345
|
-
str,
|
2346
|
-
dict[str, Any],
|
2347
|
-
dict[int, Any],
|
2348
|
-
] = Field(
|
2693
|
+
foreach: EachType = Field(
|
2349
2694
|
description=(
|
2350
2695
|
"A items for passing to stages via ${{ item }} template parameter."
|
2351
2696
|
),
|
@@ -2374,15 +2719,14 @@ class ForEachStage(BaseNestedStage):
|
|
2374
2719
|
),
|
2375
2720
|
)
|
2376
2721
|
|
2377
|
-
def
|
2722
|
+
def _process_nested(
|
2378
2723
|
self,
|
2379
2724
|
index: int,
|
2380
2725
|
item: StrOrInt,
|
2381
2726
|
params: DictData,
|
2382
|
-
|
2727
|
+
trace: Trace,
|
2383
2728
|
context: DictData,
|
2384
2729
|
*,
|
2385
|
-
parent_run_id: Optional[str] = None,
|
2386
2730
|
event: Optional[Event] = None,
|
2387
2731
|
) -> tuple[Status, DictData]:
|
2388
2732
|
"""Execute item that will execute all nested-stage that was set in this
|
@@ -2391,32 +2735,29 @@ class ForEachStage(BaseNestedStage):
|
|
2391
2735
|
This method will create the nested-context from an input context
|
2392
2736
|
data and use it instead the context data.
|
2393
2737
|
|
2394
|
-
:
|
2395
|
-
|
2396
|
-
|
2397
|
-
|
2398
|
-
|
2399
|
-
|
2400
|
-
|
2401
|
-
|
2402
|
-
|
2738
|
+
Args:
|
2739
|
+
index: (int) An index value of foreach loop.
|
2740
|
+
item: (str | int) An item that want to execution.
|
2741
|
+
params: (DictData) A parameter data.
|
2742
|
+
trace (Trace): A Trace model.
|
2743
|
+
context: (DictData)
|
2744
|
+
event: (Event) An Event manager instance that use to cancel this
|
2745
|
+
execution if it forces stopped by parent execution.
|
2746
|
+
(Default is None)
|
2403
2747
|
|
2404
2748
|
This method should raise error when it wants to stop the foreach
|
2405
2749
|
loop such as cancel event or getting the failed status.
|
2406
2750
|
|
2407
|
-
:
|
2408
|
-
|
2409
|
-
|
2751
|
+
Raises:
|
2752
|
+
StageCancelError: If event was set.
|
2753
|
+
StageError: If the stage execution raise any Exception error.
|
2754
|
+
StageError: If the result from execution has `FAILED` status.
|
2410
2755
|
|
2411
|
-
:
|
2756
|
+
Returns:
|
2757
|
+
tuple[Status, DictData]
|
2412
2758
|
"""
|
2413
|
-
trace:
|
2414
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2415
|
-
)
|
2416
|
-
trace.debug(f"[NESTED]: Execute Item: {item!r}")
|
2759
|
+
trace.info(f"[NESTED]: Execute Item: {item!r}")
|
2417
2760
|
key: StrOrInt = index if self.use_index_as_key else item
|
2418
|
-
|
2419
|
-
# NOTE: Create nested-context data from the passing context.
|
2420
2761
|
current_context: DictData = copy.deepcopy(params)
|
2421
2762
|
current_context.update({"item": item, "loop": index})
|
2422
2763
|
nestet_context: DictData = {"item": item, "stages": {}}
|
@@ -2430,8 +2771,7 @@ class ForEachStage(BaseNestedStage):
|
|
2430
2771
|
|
2431
2772
|
if event and event.is_set():
|
2432
2773
|
error_msg: str = (
|
2433
|
-
"
|
2434
|
-
"item execution."
|
2774
|
+
f"Cancel item: {key!r} before start nested process."
|
2435
2775
|
)
|
2436
2776
|
catch(
|
2437
2777
|
context=context,
|
@@ -2449,10 +2789,9 @@ class ForEachStage(BaseNestedStage):
|
|
2449
2789
|
)
|
2450
2790
|
raise StageCancelError(error_msg, refs=key)
|
2451
2791
|
|
2452
|
-
# NOTE: Nested-stage execute will pass only params and context only.
|
2453
2792
|
rs: Result = stage.execute(
|
2454
2793
|
params=current_context,
|
2455
|
-
run_id=parent_run_id,
|
2794
|
+
run_id=trace.parent_run_id,
|
2456
2795
|
event=event,
|
2457
2796
|
)
|
2458
2797
|
stage.set_outputs(rs.context, to=nestet_context)
|
@@ -2466,7 +2805,7 @@ class ForEachStage(BaseNestedStage):
|
|
2466
2805
|
|
2467
2806
|
elif rs.status == FAILED: # pragma: no cov
|
2468
2807
|
error_msg: str = (
|
2469
|
-
f"
|
2808
|
+
f"Break item: {key!r} because nested stage: "
|
2470
2809
|
f"{stage.iden!r}, failed."
|
2471
2810
|
)
|
2472
2811
|
trace.warning(f"[NESTED]: {error_msg}")
|
@@ -2488,8 +2827,7 @@ class ForEachStage(BaseNestedStage):
|
|
2488
2827
|
|
2489
2828
|
elif rs.status == CANCEL:
|
2490
2829
|
error_msg: str = (
|
2491
|
-
"
|
2492
|
-
"end item execution."
|
2830
|
+
f"Cancel item: {key!r} after end nested process."
|
2493
2831
|
)
|
2494
2832
|
catch(
|
2495
2833
|
context=context,
|
@@ -2520,6 +2858,42 @@ class ForEachStage(BaseNestedStage):
|
|
2520
2858
|
},
|
2521
2859
|
)
|
2522
2860
|
|
2861
|
+
def validate_foreach(self, value: Any) -> list[Any]:
|
2862
|
+
"""Validate foreach value that already passed to this model.
|
2863
|
+
|
2864
|
+
Args:
|
2865
|
+
value:
|
2866
|
+
|
2867
|
+
Raises:
|
2868
|
+
TypeError: If value can not try-convert to list type.
|
2869
|
+
ValueError:
|
2870
|
+
|
2871
|
+
Returns:
|
2872
|
+
list[Any]: list of item.
|
2873
|
+
"""
|
2874
|
+
if isinstance(value, str):
|
2875
|
+
try:
|
2876
|
+
value: list[Any] = str2list(value)
|
2877
|
+
except ValueError as e:
|
2878
|
+
raise TypeError(
|
2879
|
+
f"Does not support string foreach: {value!r} that can "
|
2880
|
+
f"not convert to list."
|
2881
|
+
) from e
|
2882
|
+
# [VALIDATE]: Type of the foreach should be `list` type.
|
2883
|
+
elif isinstance(value, dict):
|
2884
|
+
raise TypeError(
|
2885
|
+
f"Does not support dict foreach: {value!r} ({type(value)}) "
|
2886
|
+
f"yet."
|
2887
|
+
)
|
2888
|
+
# [Validate]: Value in the foreach item should not be duplicate when the
|
2889
|
+
# `use_index_as_key` field did not set.
|
2890
|
+
elif len(set(value)) != len(value) and not self.use_index_as_key:
|
2891
|
+
raise ValueError(
|
2892
|
+
"Foreach item should not duplicate. If this stage must to pass "
|
2893
|
+
"duplicate item, it should set `use_index_as_key: true`."
|
2894
|
+
)
|
2895
|
+
return value
|
2896
|
+
|
2523
2897
|
def process(
|
2524
2898
|
self,
|
2525
2899
|
params: DictData,
|
@@ -2551,34 +2925,8 @@ class ForEachStage(BaseNestedStage):
|
|
2551
2925
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2552
2926
|
)
|
2553
2927
|
event: Event = event or Event()
|
2554
|
-
foreach:
|
2555
|
-
|
2556
|
-
)
|
2557
|
-
|
2558
|
-
# [NOTE]: Force convert str to list.
|
2559
|
-
if isinstance(foreach, str):
|
2560
|
-
try:
|
2561
|
-
foreach: list[Any] = str2list(foreach)
|
2562
|
-
except ValueError as e:
|
2563
|
-
raise TypeError(
|
2564
|
-
f"Does not support string foreach: {foreach!r} that can "
|
2565
|
-
f"not convert to list."
|
2566
|
-
) from e
|
2567
|
-
|
2568
|
-
# [VALIDATE]: Type of the foreach should be `list` type.
|
2569
|
-
elif isinstance(foreach, dict):
|
2570
|
-
raise TypeError(
|
2571
|
-
f"Does not support dict foreach: {foreach!r} ({type(foreach)}) "
|
2572
|
-
f"yet."
|
2573
|
-
)
|
2574
|
-
# [Validate]: Value in the foreach item should not be duplicate when the
|
2575
|
-
# `use_index_as_key` field did not set.
|
2576
|
-
elif len(set(foreach)) != len(foreach) and not self.use_index_as_key:
|
2577
|
-
raise ValueError(
|
2578
|
-
"Foreach item should not duplicate. If this stage must to pass "
|
2579
|
-
"duplicate item, it should set `use_index_as_key: true`."
|
2580
|
-
)
|
2581
|
-
|
2928
|
+
foreach: EachType = self.pass_template(self.foreach, params=params)
|
2929
|
+
foreach: list[Any] = self.validate_foreach(foreach)
|
2582
2930
|
trace.info(f"[NESTED]: Foreach: {foreach!r}.")
|
2583
2931
|
catch(
|
2584
2932
|
context=context,
|
@@ -2587,28 +2935,24 @@ class ForEachStage(BaseNestedStage):
|
|
2587
2935
|
)
|
2588
2936
|
len_foreach: int = len(foreach)
|
2589
2937
|
if event and event.is_set():
|
2590
|
-
raise StageCancelError(
|
2591
|
-
"Execution was canceled from the event before start foreach."
|
2592
|
-
)
|
2938
|
+
raise StageCancelError("Cancel before start foreach process.")
|
2593
2939
|
|
2594
2940
|
with ThreadPoolExecutor(self.concurrent, "stf") as executor:
|
2595
2941
|
futures: list[Future] = [
|
2596
2942
|
executor.submit(
|
2597
|
-
self.
|
2598
|
-
index=
|
2943
|
+
self._process_nested,
|
2944
|
+
index=index,
|
2599
2945
|
item=item,
|
2600
2946
|
params=params,
|
2601
|
-
|
2947
|
+
trace=trace,
|
2602
2948
|
context=context,
|
2603
|
-
parent_run_id=parent_run_id,
|
2604
2949
|
event=event,
|
2605
2950
|
)
|
2606
|
-
for
|
2951
|
+
for index, item in enumerate(foreach, start=0)
|
2607
2952
|
]
|
2608
2953
|
|
2609
2954
|
errors: DictData = {}
|
2610
2955
|
statuses: list[Status] = [WAIT] * len_foreach
|
2611
|
-
fail_fast: bool = False
|
2612
2956
|
|
2613
2957
|
done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
|
2614
2958
|
if len(list(done)) != len(futures):
|
@@ -2619,7 +2963,7 @@ class ForEachStage(BaseNestedStage):
|
|
2619
2963
|
for future in not_done:
|
2620
2964
|
future.cancel()
|
2621
2965
|
|
2622
|
-
time.sleep(0.
|
2966
|
+
time.sleep(0.025)
|
2623
2967
|
nd: str = (
|
2624
2968
|
(
|
2625
2969
|
f", {len(not_done)} item"
|
@@ -2630,7 +2974,6 @@ class ForEachStage(BaseNestedStage):
|
|
2630
2974
|
)
|
2631
2975
|
trace.debug(f"[NESTED]: ... Foreach-Stage set failed event{nd}")
|
2632
2976
|
done: Iterator[Future] = as_completed(futures)
|
2633
|
-
fail_fast = True
|
2634
2977
|
|
2635
2978
|
for i, future in enumerate(done, start=0):
|
2636
2979
|
try:
|
@@ -2640,21 +2983,13 @@ class ForEachStage(BaseNestedStage):
|
|
2640
2983
|
statuses[i] = get_status_from_error(e)
|
2641
2984
|
self.mark_errors(errors, e)
|
2642
2985
|
except CancelledError:
|
2986
|
+
statuses[i] = CANCEL
|
2643
2987
|
pass
|
2644
2988
|
|
2645
2989
|
status: Status = validate_statuses(statuses)
|
2646
|
-
|
2647
|
-
# NOTE: Prepare status because it does not cancel from parent event but
|
2648
|
-
# cancel from failed item execution.
|
2649
|
-
if fail_fast and status == CANCEL:
|
2650
|
-
status = FAILED
|
2651
|
-
|
2652
|
-
return Result(
|
2653
|
-
run_id=run_id,
|
2654
|
-
parent_run_id=parent_run_id,
|
2990
|
+
return Result.from_trace(trace).catch(
|
2655
2991
|
status=status,
|
2656
2992
|
context=catch(context, status=status, updated=errors),
|
2657
|
-
extras=self.extras,
|
2658
2993
|
)
|
2659
2994
|
|
2660
2995
|
|
@@ -2689,7 +3024,7 @@ class UntilStage(BaseNestedStage):
|
|
2689
3024
|
),
|
2690
3025
|
)
|
2691
3026
|
until: str = Field(description="A until condition for stop the while loop.")
|
2692
|
-
stages: list[
|
3027
|
+
stages: list[SubStage] = Field(
|
2693
3028
|
default_factory=list,
|
2694
3029
|
description=(
|
2695
3030
|
"A list of stage that will run with each item in until loop."
|
@@ -2706,38 +3041,33 @@ class UntilStage(BaseNestedStage):
|
|
2706
3041
|
alias="max-loop",
|
2707
3042
|
)
|
2708
3043
|
|
2709
|
-
def
|
3044
|
+
def _process_nested(
|
2710
3045
|
self,
|
2711
3046
|
item: T,
|
2712
3047
|
loop: int,
|
2713
3048
|
params: DictData,
|
2714
|
-
|
3049
|
+
trace: Trace,
|
2715
3050
|
context: DictData,
|
2716
3051
|
*,
|
2717
|
-
parent_run_id: Optional[str] = None,
|
2718
3052
|
event: Optional[Event] = None,
|
2719
3053
|
) -> tuple[Status, DictData, T]:
|
2720
3054
|
"""Execute loop that will execute all nested-stage that was set in this
|
2721
3055
|
stage with specific loop and item.
|
2722
3056
|
|
2723
|
-
:
|
2724
|
-
|
2725
|
-
|
2726
|
-
|
2727
|
-
|
2728
|
-
|
2729
|
-
|
2730
|
-
|
3057
|
+
Args:
|
3058
|
+
item: (T) An item that want to execution.
|
3059
|
+
loop: (int) A number of loop.
|
3060
|
+
params: (DictData) A parameter data.
|
3061
|
+
trace: (Trace)
|
3062
|
+
context: (DictData)
|
3063
|
+
event: (Event) An Event manager instance that use to cancel this
|
3064
|
+
execution if it forces stopped by parent execution.
|
2731
3065
|
|
2732
|
-
:
|
2733
|
-
|
3066
|
+
Returns:
|
3067
|
+
tuple[Status, DictData, T]: Return a pair of Result and changed
|
3068
|
+
item.
|
2734
3069
|
"""
|
2735
|
-
trace: Trace = get_trace(
|
2736
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2737
|
-
)
|
2738
3070
|
trace.debug(f"[NESTED]: Execute Loop: {loop} (Item {item!r})")
|
2739
|
-
|
2740
|
-
# NOTE: Create nested-context
|
2741
3071
|
current_context: DictData = copy.deepcopy(params)
|
2742
3072
|
current_context.update({"item": item, "loop": loop})
|
2743
3073
|
nestet_context: DictData = {"loop": loop, "item": item, "stages": {}}
|
@@ -2752,8 +3082,7 @@ class UntilStage(BaseNestedStage):
|
|
2752
3082
|
|
2753
3083
|
if event and event.is_set():
|
2754
3084
|
error_msg: str = (
|
2755
|
-
"
|
2756
|
-
"loop execution."
|
3085
|
+
f"Cancel loop: {i!r} before start nested process."
|
2757
3086
|
)
|
2758
3087
|
catch(
|
2759
3088
|
context=context,
|
@@ -2774,7 +3103,7 @@ class UntilStage(BaseNestedStage):
|
|
2774
3103
|
|
2775
3104
|
rs: Result = stage.execute(
|
2776
3105
|
params=current_context,
|
2777
|
-
run_id=parent_run_id,
|
3106
|
+
run_id=trace.parent_run_id,
|
2778
3107
|
event=event,
|
2779
3108
|
)
|
2780
3109
|
stage.set_outputs(rs.context, to=nestet_context)
|
@@ -2790,8 +3119,8 @@ class UntilStage(BaseNestedStage):
|
|
2790
3119
|
|
2791
3120
|
elif rs.status == FAILED:
|
2792
3121
|
error_msg: str = (
|
2793
|
-
f"
|
2794
|
-
f"
|
3122
|
+
f"Break loop: {i!r} because nested stage: {stage.iden!r}, "
|
3123
|
+
f"failed."
|
2795
3124
|
)
|
2796
3125
|
catch(
|
2797
3126
|
context=context,
|
@@ -2811,10 +3140,7 @@ class UntilStage(BaseNestedStage):
|
|
2811
3140
|
raise StageNestedError(error_msg, refs=loop)
|
2812
3141
|
|
2813
3142
|
elif rs.status == CANCEL:
|
2814
|
-
error_msg: str =
|
2815
|
-
"Loop execution was canceled from the event after "
|
2816
|
-
"end loop execution."
|
2817
|
-
)
|
3143
|
+
error_msg: str = f"Cancel loop: {i!r} after end nested process."
|
2818
3144
|
catch(
|
2819
3145
|
context=context,
|
2820
3146
|
status=CANCEL,
|
@@ -2881,35 +3207,33 @@ class UntilStage(BaseNestedStage):
|
|
2881
3207
|
)
|
2882
3208
|
event: Event = event or Event()
|
2883
3209
|
trace.info(f"[NESTED]: Until: {self.until!r}")
|
2884
|
-
item: Union[str, int, bool] =
|
2885
|
-
param2template(self.item, params, extras=self.extras)
|
2886
|
-
)
|
3210
|
+
item: Union[str, int, bool] = self.pass_template(self.item, params)
|
2887
3211
|
loop: int = 1
|
2888
3212
|
until_rs: bool = True
|
2889
3213
|
exceed_loop: bool = False
|
2890
3214
|
catch(context=context, status=WAIT, updated={"until": {}})
|
2891
3215
|
statuses: list[Status] = []
|
3216
|
+
|
2892
3217
|
while until_rs and not (exceed_loop := (loop > self.max_loop)):
|
2893
3218
|
|
2894
3219
|
if event and event.is_set():
|
2895
3220
|
raise StageCancelError(
|
2896
|
-
"
|
3221
|
+
f"Cancel before start loop process, (loop: {loop})."
|
2897
3222
|
)
|
2898
3223
|
|
2899
|
-
status, context, item = self.
|
3224
|
+
status, context, item = self._process_nested(
|
2900
3225
|
item=item,
|
2901
3226
|
loop=loop,
|
2902
3227
|
params=params,
|
2903
|
-
|
3228
|
+
trace=trace,
|
2904
3229
|
context=context,
|
2905
|
-
parent_run_id=parent_run_id,
|
2906
3230
|
event=event,
|
2907
3231
|
)
|
2908
3232
|
|
2909
3233
|
loop += 1
|
2910
3234
|
if item is None:
|
2911
3235
|
item: int = loop
|
2912
|
-
trace.
|
3236
|
+
trace.debug(
|
2913
3237
|
f"[NESTED]: Return loop not set the item. It uses loop: "
|
2914
3238
|
f"{loop} by default."
|
2915
3239
|
)
|
@@ -2960,6 +3284,13 @@ class Match(BaseModel):
|
|
2960
3284
|
)
|
2961
3285
|
|
2962
3286
|
|
3287
|
+
class Else(BaseModel):
|
3288
|
+
other: list[Stage] = Field(
|
3289
|
+
description="A list of stage that does not match any case.",
|
3290
|
+
alias="else",
|
3291
|
+
)
|
3292
|
+
|
3293
|
+
|
2963
3294
|
class CaseStage(BaseNestedStage):
|
2964
3295
|
"""Case stage executor that execute all stages if the condition was matched.
|
2965
3296
|
|
@@ -2989,10 +3320,34 @@ class CaseStage(BaseNestedStage):
|
|
2989
3320
|
... ],
|
2990
3321
|
... }
|
2991
3322
|
|
3323
|
+
>>> stage = {
|
3324
|
+
... "name": "If stage execution.",
|
3325
|
+
... "case": "${{ param.test }}",
|
3326
|
+
... "match": [
|
3327
|
+
... {
|
3328
|
+
... "case": "1",
|
3329
|
+
... "stages": [
|
3330
|
+
... {
|
3331
|
+
... "name": "Stage case 1",
|
3332
|
+
... "eche": "Hello case 1",
|
3333
|
+
... },
|
3334
|
+
... ],
|
3335
|
+
... },
|
3336
|
+
... {
|
3337
|
+
... "else": [
|
3338
|
+
... {
|
3339
|
+
... "name": "Stage else",
|
3340
|
+
... "eche": "Hello case else",
|
3341
|
+
... },
|
3342
|
+
... ],
|
3343
|
+
... },
|
3344
|
+
... ],
|
3345
|
+
... }
|
3346
|
+
|
2992
3347
|
"""
|
2993
3348
|
|
2994
3349
|
case: str = Field(description="A case condition for routing.")
|
2995
|
-
match: list[Match] = Field(
|
3350
|
+
match: list[Union[Match, Else]] = Field(
|
2996
3351
|
description="A list of Match model that should not be an empty list.",
|
2997
3352
|
)
|
2998
3353
|
skip_not_match: bool = Field(
|
@@ -3004,46 +3359,117 @@ class CaseStage(BaseNestedStage):
|
|
3004
3359
|
alias="skip-not-match",
|
3005
3360
|
)
|
3006
3361
|
|
3007
|
-
|
3362
|
+
@field_validator("match", mode="after")
|
3363
|
+
def __validate_match(
|
3364
|
+
cls, match: list[Union[Match, Else]]
|
3365
|
+
) -> list[Union[Match, Else]]:
|
3366
|
+
"""Validate the match field should contain only one Else model."""
|
3367
|
+
c_else_case: int = 0
|
3368
|
+
c_else_model: int = 0
|
3369
|
+
for m in match:
|
3370
|
+
if isinstance(m, Else):
|
3371
|
+
if c_else_model:
|
3372
|
+
raise ValueError(
|
3373
|
+
"Match field should contain only one `Else` model."
|
3374
|
+
)
|
3375
|
+
c_else_model += 1
|
3376
|
+
continue
|
3377
|
+
if isinstance(m, Match) and m.case == "_":
|
3378
|
+
if c_else_case:
|
3379
|
+
raise ValueError(
|
3380
|
+
"Match field should contain only one else, '_', case."
|
3381
|
+
)
|
3382
|
+
c_else_case += 1
|
3383
|
+
continue
|
3384
|
+
return match
|
3385
|
+
|
3386
|
+
def extract_stages_from_case(
|
3387
|
+
self, case: StrOrNone, params: DictData
|
3388
|
+
) -> tuple[StrOrNone, list[Stage]]:
|
3389
|
+
"""Extract stage from case.
|
3390
|
+
|
3391
|
+
Args:
|
3392
|
+
case (StrOrNone):
|
3393
|
+
params (DictData):
|
3394
|
+
|
3395
|
+
Returns:
|
3396
|
+
tuple[StrOrNone, list[Stage]]: A pair of case and stages.
|
3397
|
+
"""
|
3398
|
+
_else_stages: Optional[list[Stage]] = None
|
3399
|
+
stages: Optional[list[Stage]] = None
|
3400
|
+
|
3401
|
+
# NOTE: Start check the condition of each stage match with this case.
|
3402
|
+
for match in self.match:
|
3403
|
+
|
3404
|
+
if isinstance(match, Else):
|
3405
|
+
_else_stages: list[Stage] = match.other
|
3406
|
+
continue
|
3407
|
+
|
3408
|
+
# NOTE: Store the else case.
|
3409
|
+
if (c := match.case) == "_":
|
3410
|
+
_else_stages: list[Stage] = match.stages
|
3411
|
+
continue
|
3412
|
+
|
3413
|
+
_condition: str = param2template(c, params, extras=self.extras)
|
3414
|
+
if pass_env(case) == pass_env(_condition):
|
3415
|
+
stages: list[Stage] = match.stages
|
3416
|
+
break
|
3417
|
+
|
3418
|
+
if stages is not None:
|
3419
|
+
return case, stages
|
3420
|
+
|
3421
|
+
if _else_stages is None:
|
3422
|
+
if not self.skip_not_match:
|
3423
|
+
raise StageError(
|
3424
|
+
"This stage does not set else for support not match "
|
3425
|
+
"any case."
|
3426
|
+
)
|
3427
|
+
raise StageSkipError(
|
3428
|
+
"Execution was skipped because it does not match any "
|
3429
|
+
"case and the else condition does not set too."
|
3430
|
+
)
|
3431
|
+
|
3432
|
+
# NOTE: Force to use the else when it does not match any case.
|
3433
|
+
return "_", _else_stages
|
3434
|
+
|
3435
|
+
def _process_nested(
|
3008
3436
|
self,
|
3009
3437
|
case: str,
|
3010
3438
|
stages: list[Stage],
|
3011
3439
|
params: DictData,
|
3012
|
-
|
3440
|
+
trace: Trace,
|
3013
3441
|
context: DictData,
|
3014
3442
|
*,
|
3015
|
-
parent_run_id: Optional[str] = None,
|
3016
3443
|
event: Optional[Event] = None,
|
3017
3444
|
) -> tuple[Status, DictData]:
|
3018
3445
|
"""Execute case.
|
3019
3446
|
|
3020
|
-
:
|
3021
|
-
|
3022
|
-
|
3023
|
-
|
3024
|
-
|
3025
|
-
|
3026
|
-
|
3027
|
-
|
3447
|
+
Args:
|
3448
|
+
case: (str) A case that want to execution.
|
3449
|
+
stages: (list[Stage]) A list of stage.
|
3450
|
+
params: (DictData) A parameter data.
|
3451
|
+
trace: (Trace)
|
3452
|
+
context: (DictData)
|
3453
|
+
event: (Event) An Event manager instance that use to cancel this
|
3454
|
+
execution if it forces stopped by parent execution.
|
3028
3455
|
|
3029
|
-
:
|
3456
|
+
Returns:
|
3457
|
+
DictData
|
3030
3458
|
"""
|
3031
|
-
trace:
|
3032
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3033
|
-
)
|
3034
|
-
trace.debug(f"[NESTED]: Execute Case: {case!r}")
|
3459
|
+
trace.info(f"[NESTED]: Case: {case!r}")
|
3035
3460
|
current_context: DictData = copy.deepcopy(params)
|
3036
3461
|
current_context.update({"case": case})
|
3037
3462
|
output: DictData = {"case": case, "stages": {}}
|
3038
|
-
|
3463
|
+
total_stage: int = len(stages)
|
3464
|
+
skips: list[bool] = [False] * total_stage
|
3465
|
+
for i, stage in enumerate(stages, start=0):
|
3039
3466
|
|
3040
3467
|
if self.extras:
|
3041
3468
|
stage.extras = self.extras
|
3042
3469
|
|
3043
3470
|
if event and event.is_set():
|
3044
3471
|
error_msg: str = (
|
3045
|
-
"
|
3046
|
-
"stage case execution."
|
3472
|
+
f"Cancel case: {case!r} before start nested process."
|
3047
3473
|
)
|
3048
3474
|
return CANCEL, catch(
|
3049
3475
|
context=context,
|
@@ -3057,16 +3483,20 @@ class CaseStage(BaseNestedStage):
|
|
3057
3483
|
|
3058
3484
|
rs: Result = stage.execute(
|
3059
3485
|
params=current_context,
|
3060
|
-
run_id=parent_run_id,
|
3486
|
+
run_id=trace.parent_run_id,
|
3061
3487
|
event=event,
|
3062
3488
|
)
|
3063
3489
|
stage.set_outputs(rs.context, to=output)
|
3064
3490
|
stage.set_outputs(stage.get_outputs(output), to=current_context)
|
3065
3491
|
|
3066
|
-
if rs.status ==
|
3492
|
+
if rs.status == SKIP:
|
3493
|
+
skips[i] = True
|
3494
|
+
continue
|
3495
|
+
|
3496
|
+
elif rs.status == FAILED:
|
3067
3497
|
error_msg: str = (
|
3068
|
-
f"
|
3069
|
-
f"
|
3498
|
+
f"Break case: {case!r} because nested stage: {stage.iden}, "
|
3499
|
+
f"failed."
|
3070
3500
|
)
|
3071
3501
|
return FAILED, catch(
|
3072
3502
|
context=context,
|
@@ -3077,9 +3507,25 @@ class CaseStage(BaseNestedStage):
|
|
3077
3507
|
"errors": StageError(error_msg).to_dict(),
|
3078
3508
|
},
|
3079
3509
|
)
|
3080
|
-
|
3510
|
+
|
3511
|
+
elif rs.status == CANCEL:
|
3512
|
+
error_msg: str = (
|
3513
|
+
f"Cancel case {case!r} after end nested process."
|
3514
|
+
)
|
3515
|
+
return CANCEL, catch(
|
3516
|
+
context=context,
|
3517
|
+
status=CANCEL,
|
3518
|
+
updated={
|
3519
|
+
"case": case,
|
3520
|
+
"stages": filter_func(output.pop("stages", {})),
|
3521
|
+
"errors": StageCancelError(error_msg).to_dict(),
|
3522
|
+
},
|
3523
|
+
)
|
3524
|
+
|
3525
|
+
status: Status = SKIP if sum(skips) == total_stage else SUCCESS
|
3526
|
+
return status, catch(
|
3081
3527
|
context=context,
|
3082
|
-
status=
|
3528
|
+
status=status,
|
3083
3529
|
updated={
|
3084
3530
|
"case": case,
|
3085
3531
|
"stages": filter_func(output.pop("stages", {})),
|
@@ -3113,52 +3559,17 @@ class CaseStage(BaseNestedStage):
|
|
3113
3559
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3114
3560
|
)
|
3115
3561
|
|
3116
|
-
|
3117
|
-
trace.info(f"[NESTED]: Get Case: {
|
3118
|
-
|
3119
|
-
_else: Optional[Match] = None
|
3120
|
-
stages: Optional[list[Stage]] = None
|
3121
|
-
|
3122
|
-
# NOTE: Start check the condition of each stage match with this case.
|
3123
|
-
for match in self.match:
|
3124
|
-
# NOTE: Store the else case.
|
3125
|
-
if (c := match.case) == "_":
|
3126
|
-
_else: Match = match
|
3127
|
-
continue
|
3128
|
-
|
3129
|
-
_condition: str = param2template(c, params, extras=self.extras)
|
3130
|
-
if pass_env(_case) == pass_env(_condition):
|
3131
|
-
stages: list[Stage] = match.stages
|
3132
|
-
break
|
3133
|
-
|
3134
|
-
if stages is None:
|
3135
|
-
if _else is None:
|
3136
|
-
if not self.skip_not_match:
|
3137
|
-
raise StageError(
|
3138
|
-
"This stage does not set else for support not match "
|
3139
|
-
"any case."
|
3140
|
-
)
|
3141
|
-
raise StageSkipError(
|
3142
|
-
"Execution was skipped because it does not match any "
|
3143
|
-
"case and the else condition does not set too."
|
3144
|
-
)
|
3145
|
-
|
3146
|
-
# NOTE: Force to use the else when it does not match any case.
|
3147
|
-
_case: str = "_"
|
3148
|
-
stages: list[Stage] = _else.stages
|
3149
|
-
|
3562
|
+
case: StrOrNone = param2template(self.case, params, extras=self.extras)
|
3563
|
+
trace.info(f"[NESTED]: Get Case: {case!r}.")
|
3564
|
+
case, stages = self.extract_stages_from_case(case, params=params)
|
3150
3565
|
if event and event.is_set():
|
3151
|
-
raise StageCancelError(
|
3152
|
-
|
3153
|
-
|
3154
|
-
)
|
3155
|
-
status, context = self._process_case(
|
3156
|
-
case=_case,
|
3566
|
+
raise StageCancelError("Cancel before start case process.")
|
3567
|
+
status, context = self._process_nested(
|
3568
|
+
case=case,
|
3157
3569
|
stages=stages,
|
3158
3570
|
params=params,
|
3159
|
-
|
3571
|
+
trace=trace,
|
3160
3572
|
context=context,
|
3161
|
-
parent_run_id=parent_run_id,
|
3162
3573
|
event=event,
|
3163
3574
|
)
|
3164
3575
|
return Result(
|
@@ -3251,7 +3662,7 @@ class RaiseStage(BaseAsyncStage):
|
|
3251
3662
|
raise StageError(message)
|
3252
3663
|
|
3253
3664
|
|
3254
|
-
class DockerStage(
|
3665
|
+
class DockerStage(BaseRetryStage): # pragma: no cov
|
3255
3666
|
"""Docker container stage execution that will pull the specific Docker image
|
3256
3667
|
with custom authentication and run this image by passing environment
|
3257
3668
|
variables and mounting local volume to this Docker container.
|
@@ -3274,6 +3685,7 @@ class DockerStage(BaseStage): # pragma: no cov
|
|
3274
3685
|
... }
|
3275
3686
|
"""
|
3276
3687
|
|
3688
|
+
action_stage: ClassVar[bool] = True
|
3277
3689
|
image: str = Field(
|
3278
3690
|
description="A Docker image url with tag that want to run.",
|
3279
3691
|
)
|
@@ -3430,6 +3842,33 @@ class DockerStage(BaseStage): # pragma: no cov
|
|
3430
3842
|
trace.info(f"[STAGE]: Docker: {self.image}:{self.tag}")
|
3431
3843
|
raise NotImplementedError("Docker Stage does not implement yet.")
|
3432
3844
|
|
3845
|
+
async def async_process(
|
3846
|
+
self,
|
3847
|
+
params: DictData,
|
3848
|
+
run_id: str,
|
3849
|
+
context: DictData,
|
3850
|
+
*,
|
3851
|
+
parent_run_id: Optional[str] = None,
|
3852
|
+
event: Optional[Event] = None,
|
3853
|
+
) -> Result: # pragma: no cov
|
3854
|
+
"""Async process for nested-stage do not implement yet.
|
3855
|
+
|
3856
|
+
Args:
|
3857
|
+
params: A parameter data that want to use in this
|
3858
|
+
execution.
|
3859
|
+
run_id: A running stage ID.
|
3860
|
+
context: A context data.
|
3861
|
+
parent_run_id: A parent running ID. (Default is None)
|
3862
|
+
event: An event manager that use to track parent process
|
3863
|
+
was not force stopped.
|
3864
|
+
|
3865
|
+
Returns:
|
3866
|
+
Result: The execution result with status and context data.
|
3867
|
+
"""
|
3868
|
+
raise NotImplementedError(
|
3869
|
+
"The Docker stage does not implement the `axecute` method yet."
|
3870
|
+
)
|
3871
|
+
|
3433
3872
|
|
3434
3873
|
class VirtualPyStage(PyStage): # pragma: no cov
|
3435
3874
|
"""Virtual Python stage executor that run Python statement on the dependent
|
@@ -3466,7 +3905,7 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3466
3905
|
|
3467
3906
|
Args:
|
3468
3907
|
py: A Python string statement.
|
3469
|
-
values: A variable that want to set before running
|
3908
|
+
values: A variable that want to set before running these
|
3470
3909
|
deps: An additional Python dependencies that want install before
|
3471
3910
|
run this python stage.
|
3472
3911
|
run_id: (StrOrNone) A running ID of this stage execution.
|
@@ -3584,6 +4023,63 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3584
4023
|
extras=self.extras,
|
3585
4024
|
)
|
3586
4025
|
|
4026
|
+
async def async_process(
|
4027
|
+
self,
|
4028
|
+
params: DictData,
|
4029
|
+
run_id: str,
|
4030
|
+
context: DictData,
|
4031
|
+
*,
|
4032
|
+
parent_run_id: Optional[str] = None,
|
4033
|
+
event: Optional[Event] = None,
|
4034
|
+
) -> Result:
|
4035
|
+
raise NotImplementedError(
|
4036
|
+
"Async process of Virtual Python stage does not implement yet."
|
4037
|
+
)
|
4038
|
+
|
4039
|
+
|
4040
|
+
SubStage = Annotated[
|
4041
|
+
Union[
|
4042
|
+
BashStage,
|
4043
|
+
CallStage,
|
4044
|
+
PyStage,
|
4045
|
+
VirtualPyStage,
|
4046
|
+
RaiseStage,
|
4047
|
+
DockerStage,
|
4048
|
+
TriggerStage,
|
4049
|
+
EmptyStage,
|
4050
|
+
CaseStage,
|
4051
|
+
ForEachStage,
|
4052
|
+
UntilStage,
|
4053
|
+
],
|
4054
|
+
Field(
|
4055
|
+
union_mode="smart",
|
4056
|
+
description=(
|
4057
|
+
"A nested-stage allow list that able to use on the NestedStage "
|
4058
|
+
"model."
|
4059
|
+
),
|
4060
|
+
),
|
4061
|
+
] # pragma: no cov
|
4062
|
+
|
4063
|
+
|
4064
|
+
ActionStage = Annotated[
|
4065
|
+
Union[
|
4066
|
+
BashStage,
|
4067
|
+
CallStage,
|
4068
|
+
VirtualPyStage,
|
4069
|
+
PyStage,
|
4070
|
+
RaiseStage,
|
4071
|
+
DockerStage,
|
4072
|
+
TriggerStage,
|
4073
|
+
EmptyStage,
|
4074
|
+
],
|
4075
|
+
Field(
|
4076
|
+
union_mode="smart",
|
4077
|
+
description=(
|
4078
|
+
"An action stage model that allow to use with nested-stage model."
|
4079
|
+
),
|
4080
|
+
),
|
4081
|
+
] # pragma: no cov
|
4082
|
+
|
3587
4083
|
|
3588
4084
|
# NOTE:
|
3589
4085
|
# An order of parsing stage model on the Job model with `stages` field.
|
@@ -3592,18 +4088,14 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3592
4088
|
#
|
3593
4089
|
Stage = Annotated[
|
3594
4090
|
Union[
|
3595
|
-
|
3596
|
-
BashStage,
|
3597
|
-
CallStage,
|
3598
|
-
TriggerStage,
|
4091
|
+
# NOTE: Nested Stage.
|
3599
4092
|
ForEachStage,
|
3600
4093
|
UntilStage,
|
3601
4094
|
ParallelStage,
|
3602
4095
|
CaseStage,
|
3603
|
-
|
3604
|
-
|
3605
|
-
|
3606
|
-
EmptyStage,
|
4096
|
+
TriggerStage,
|
4097
|
+
# NOTE: Union with the action stage.
|
4098
|
+
ActionStage,
|
3607
4099
|
],
|
3608
4100
|
Field(
|
3609
4101
|
union_mode="smart",
|