ddeutil-workflow 0.0.82__py3-none-any.whl → 0.0.83__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__cron.py +1 -1
- ddeutil/workflow/__init__.py +3 -2
- ddeutil/workflow/__types.py +10 -1
- ddeutil/workflow/audits.py +64 -41
- ddeutil/workflow/errors.py +3 -0
- ddeutil/workflow/event.py +34 -11
- ddeutil/workflow/job.py +5 -15
- ddeutil/workflow/result.py +41 -12
- ddeutil/workflow/stages.py +504 -292
- ddeutil/workflow/traces.py +9 -5
- ddeutil/workflow/utils.py +34 -20
- ddeutil/workflow/workflow.py +32 -50
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.83.dist-info}/METADATA +1 -1
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.83.dist-info}/RECORD +19 -19
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.83.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.83.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.83.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.82.dist-info → ddeutil_workflow-0.0.83.dist-info}/top_level.txt +0 -0
ddeutil/workflow/stages.py
CHANGED
@@ -71,7 +71,9 @@ from typing import (
|
|
71
71
|
Annotated,
|
72
72
|
Any,
|
73
73
|
Callable,
|
74
|
+
ClassVar,
|
74
75
|
Optional,
|
76
|
+
TypedDict,
|
75
77
|
TypeVar,
|
76
78
|
Union,
|
77
79
|
get_type_hints,
|
@@ -80,10 +82,10 @@ from typing import (
|
|
80
82
|
from ddeutil.core import str2list
|
81
83
|
from pydantic import BaseModel, Field, ValidationError
|
82
84
|
from pydantic.functional_validators import field_validator, model_validator
|
83
|
-
from typing_extensions import Self
|
85
|
+
from typing_extensions import NotRequired, Self
|
84
86
|
|
85
87
|
from .__about__ import __python_version__
|
86
|
-
from .__types import DictData, DictStr, StrOrInt, StrOrNone, TupleStr
|
88
|
+
from .__types import DictData, DictStr, StrOrInt, StrOrNone, TupleStr, cast_dict
|
87
89
|
from .conf import dynamic, pass_env
|
88
90
|
from .errors import (
|
89
91
|
StageCancelError,
|
@@ -117,6 +119,7 @@ from .traces import Trace, get_trace
|
|
117
119
|
from .utils import (
|
118
120
|
delay,
|
119
121
|
dump_all,
|
122
|
+
extract_id,
|
120
123
|
filter_func,
|
121
124
|
gen_id,
|
122
125
|
make_exec,
|
@@ -162,6 +165,7 @@ class BaseStage(BaseModel, ABC):
|
|
162
165
|
```
|
163
166
|
"""
|
164
167
|
|
168
|
+
action_stage: ClassVar[bool] = False
|
165
169
|
extras: DictData = Field(
|
166
170
|
default_factory=dict,
|
167
171
|
description="An extra parameter that override core config values.",
|
@@ -230,6 +234,19 @@ class BaseStage(BaseModel, ABC):
|
|
230
234
|
)
|
231
235
|
return self
|
232
236
|
|
237
|
+
def pass_template(self, value: Any, params: DictData) -> Any:
|
238
|
+
"""Pass template and environment variable to any value that can
|
239
|
+
templating.
|
240
|
+
|
241
|
+
Args:
|
242
|
+
value (Any): An any value.
|
243
|
+
params (DictData):
|
244
|
+
|
245
|
+
Returns:
|
246
|
+
Any: A templated value.
|
247
|
+
"""
|
248
|
+
return pass_env(param2template(value, params, extras=self.extras))
|
249
|
+
|
233
250
|
@abstractmethod
|
234
251
|
def process(
|
235
252
|
self,
|
@@ -244,10 +261,10 @@ class BaseStage(BaseModel, ABC):
|
|
244
261
|
This is important method that make this class is able to be the stage.
|
245
262
|
|
246
263
|
Args:
|
247
|
-
params: A parameter data that want to use in this
|
264
|
+
params (DictData): A parameter data that want to use in this
|
248
265
|
execution.
|
249
|
-
run_id: A running stage ID.
|
250
|
-
context: A context data.
|
266
|
+
run_id (str): A running stage ID.
|
267
|
+
context (DictData): A context data.
|
251
268
|
parent_run_id: A parent running ID. (Default is None)
|
252
269
|
event: An event manager that use to track parent process
|
253
270
|
was not force stopped.
|
@@ -300,8 +317,9 @@ class BaseStage(BaseModel, ABC):
|
|
300
317
|
Result: The execution result with updated status and context.
|
301
318
|
"""
|
302
319
|
ts: float = time.monotonic()
|
303
|
-
parent_run_id
|
304
|
-
|
320
|
+
parent_run_id, run_id = extract_id(
|
321
|
+
self.iden, run_id=run_id, extras=self.extras
|
322
|
+
)
|
305
323
|
context: DictData = {"status": WAIT}
|
306
324
|
trace: Trace = get_trace(
|
307
325
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
@@ -353,15 +371,12 @@ class BaseStage(BaseModel, ABC):
|
|
353
371
|
StageError,
|
354
372
|
) as e: # pragma: no cov
|
355
373
|
if isinstance(e, StageNestedError):
|
356
|
-
trace.info(f"[STAGE]:
|
374
|
+
trace.info(f"[STAGE]: Nested: {e}")
|
375
|
+
elif isinstance(e, (StageSkipError, StageNestedSkipError)):
|
376
|
+
trace.info(f"[STAGE]: ⏭️ Skip: {e}")
|
357
377
|
else:
|
358
|
-
emoji: str = (
|
359
|
-
"⏭️"
|
360
|
-
if isinstance(e, (StageSkipError, StageNestedSkipError))
|
361
|
-
else "🚨"
|
362
|
-
)
|
363
378
|
trace.info(
|
364
|
-
f"[STAGE]:
|
379
|
+
f"[STAGE]: Stage Failed:||🚨 {traceback.format_exc()}||"
|
365
380
|
)
|
366
381
|
st: Status = get_status_from_error(e)
|
367
382
|
return Result(
|
@@ -381,7 +396,9 @@ class BaseStage(BaseModel, ABC):
|
|
381
396
|
extras=self.extras,
|
382
397
|
)
|
383
398
|
except Exception as e:
|
384
|
-
trace.error(
|
399
|
+
trace.error(
|
400
|
+
f"[STAGE]: Error Failed:||🚨 {traceback.format_exc()}||"
|
401
|
+
)
|
385
402
|
return Result(
|
386
403
|
run_id=run_id,
|
387
404
|
parent_run_id=parent_run_id,
|
@@ -492,10 +509,12 @@ class BaseStage(BaseModel, ABC):
|
|
492
509
|
"""Get the outputs from stages data. It will get this stage ID from
|
493
510
|
the stage outputs mapping.
|
494
511
|
|
495
|
-
:
|
496
|
-
stage
|
512
|
+
Args:
|
513
|
+
output (DictData): A stage output context that want to get this
|
514
|
+
stage ID `outputs` key.
|
497
515
|
|
498
|
-
:
|
516
|
+
Returns:
|
517
|
+
DictData: An output value that have get with its identity.
|
499
518
|
"""
|
500
519
|
if self.id is None and not dynamic(
|
501
520
|
"stage_default_id", extras=self.extras
|
@@ -568,13 +587,53 @@ class BaseStage(BaseModel, ABC):
|
|
568
587
|
"""
|
569
588
|
return False
|
570
589
|
|
571
|
-
def
|
590
|
+
def detail(self) -> DictData: # pragma: no cov
|
591
|
+
"""Return the detail of this stage for generate markdown.
|
592
|
+
|
593
|
+
Returns:
|
594
|
+
DictData: A dict that was dumped from this model with alias mode.
|
595
|
+
"""
|
596
|
+
return self.model_dump(by_alias=True)
|
597
|
+
|
598
|
+
def md(self) -> str: # pragma: no cov
|
572
599
|
"""Return generated document that will be the interface of this stage.
|
573
600
|
|
574
601
|
:rtype: str
|
575
602
|
"""
|
576
603
|
return self.desc
|
577
604
|
|
605
|
+
def dryrun(
|
606
|
+
self,
|
607
|
+
params: DictData,
|
608
|
+
run_id: str,
|
609
|
+
context: DictData,
|
610
|
+
*,
|
611
|
+
parent_run_id: Optional[str] = None,
|
612
|
+
event: Optional[Event] = None,
|
613
|
+
) -> Optional[Result]: # pragma: no cov
|
614
|
+
"""Pre-process method that will use to run with dry-run mode, and it
|
615
|
+
should be used before process method.
|
616
|
+
"""
|
617
|
+
|
618
|
+
def to_empty(self, sleep: int = 0.35) -> EmptyStage: # pragma: no cov
|
619
|
+
"""Convert the current Stage model to the EmptyStage model for dry-run
|
620
|
+
mode if the `action_stage` class attribute has set.
|
621
|
+
|
622
|
+
Returns:
|
623
|
+
EmptyStage: An EmptyStage model that passing itself model data to
|
624
|
+
message.
|
625
|
+
"""
|
626
|
+
return EmptyStage.model_validate(
|
627
|
+
{
|
628
|
+
"name": self.name,
|
629
|
+
"id": self.id,
|
630
|
+
"desc": self.desc,
|
631
|
+
"if": self.condition,
|
632
|
+
"echo": f"Convert from {self.__class__.__name__}",
|
633
|
+
"sleep": sleep,
|
634
|
+
}
|
635
|
+
)
|
636
|
+
|
578
637
|
|
579
638
|
class BaseAsyncStage(BaseStage, ABC):
|
580
639
|
"""Base Async Stage model to make any stage model allow async execution for
|
@@ -686,15 +745,12 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
686
745
|
StageError,
|
687
746
|
) as e: # pragma: no cov
|
688
747
|
if isinstance(e, StageNestedError):
|
689
|
-
await trace.ainfo(f"[STAGE]:
|
748
|
+
await trace.ainfo(f"[STAGE]: Nested: {e}")
|
749
|
+
elif isinstance(e, (StageSkipError, StageNestedSkipError)):
|
750
|
+
await trace.ainfo(f"[STAGE]: ⏭️ Skip: {e}")
|
690
751
|
else:
|
691
|
-
emoji: str = (
|
692
|
-
"⏭️"
|
693
|
-
if isinstance(e, (StageSkipError, StageNestedSkipError))
|
694
|
-
else "🚨"
|
695
|
-
)
|
696
752
|
await trace.ainfo(
|
697
|
-
f"[STAGE]:
|
753
|
+
f"[STAGE]: Stage Failed:||🚨 {traceback.format_exc()}||"
|
698
754
|
)
|
699
755
|
st: Status = get_status_from_error(e)
|
700
756
|
return Result(
|
@@ -706,8 +762,8 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
706
762
|
status=st,
|
707
763
|
updated=(
|
708
764
|
None
|
709
|
-
if isinstance(e, StageSkipError)
|
710
|
-
else {"
|
765
|
+
if isinstance(e, (StageSkipError, StageNestedSkipError))
|
766
|
+
else {"errors": e.to_dict()}
|
711
767
|
),
|
712
768
|
),
|
713
769
|
info={"execution_time": time.monotonic() - ts},
|
@@ -715,7 +771,7 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
715
771
|
)
|
716
772
|
except Exception as e:
|
717
773
|
await trace.aerror(
|
718
|
-
f"[STAGE]: Error
|
774
|
+
f"[STAGE]: Error Failed:||🚨 {traceback.format_exc()}||"
|
719
775
|
)
|
720
776
|
return Result(
|
721
777
|
run_id=run_id,
|
@@ -827,6 +883,14 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
827
883
|
parent_run_id=parent_run_id,
|
828
884
|
event=event,
|
829
885
|
)
|
886
|
+
except (
|
887
|
+
StageSkipError,
|
888
|
+
StageNestedSkipError,
|
889
|
+
StageCancelError,
|
890
|
+
StageNestedCancelError,
|
891
|
+
):
|
892
|
+
trace.debug("[STAGE]: process raise skip or cancel error.")
|
893
|
+
raise
|
830
894
|
except Exception as e:
|
831
895
|
current_retry += 1
|
832
896
|
trace.warning(
|
@@ -901,6 +965,16 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
901
965
|
parent_run_id=parent_run_id,
|
902
966
|
event=event,
|
903
967
|
)
|
968
|
+
except (
|
969
|
+
StageSkipError,
|
970
|
+
StageNestedSkipError,
|
971
|
+
StageCancelError,
|
972
|
+
StageNestedCancelError,
|
973
|
+
):
|
974
|
+
await trace.adebug(
|
975
|
+
"[STAGE]: process raise skip or cancel error."
|
976
|
+
)
|
977
|
+
raise
|
904
978
|
except Exception as e:
|
905
979
|
current_retry += 1
|
906
980
|
await trace.awarning(
|
@@ -1004,9 +1078,7 @@ class EmptyStage(BaseAsyncStage):
|
|
1004
1078
|
)
|
1005
1079
|
|
1006
1080
|
if event and event.is_set():
|
1007
|
-
raise StageCancelError(
|
1008
|
-
"Execution was canceled from the event before start parallel."
|
1009
|
-
)
|
1081
|
+
raise StageCancelError("Cancel before start empty process.")
|
1010
1082
|
|
1011
1083
|
trace.info(f"[STAGE]: Message: ( {message} )")
|
1012
1084
|
if self.sleep > 0:
|
@@ -1057,9 +1129,7 @@ class EmptyStage(BaseAsyncStage):
|
|
1057
1129
|
)
|
1058
1130
|
|
1059
1131
|
if event and event.is_set():
|
1060
|
-
raise StageCancelError(
|
1061
|
-
"Execution was canceled from the event before start parallel."
|
1062
|
-
)
|
1132
|
+
raise StageCancelError("Cancel before start empty process.")
|
1063
1133
|
|
1064
1134
|
trace.info(f"[STAGE]: Message: ( {message} )")
|
1065
1135
|
if self.sleep > 0:
|
@@ -1697,9 +1767,7 @@ class CallStage(BaseRetryStage):
|
|
1697
1767
|
args.pop("extras")
|
1698
1768
|
|
1699
1769
|
if event and event.is_set():
|
1700
|
-
raise StageCancelError(
|
1701
|
-
"Execution was canceled from the event before start parallel."
|
1702
|
-
)
|
1770
|
+
raise StageCancelError("Cancel before start call process.")
|
1703
1771
|
|
1704
1772
|
args: DictData = self.validate_model_args(
|
1705
1773
|
call_func, args, run_id, parent_run_id, extras=self.extras
|
@@ -1817,9 +1885,7 @@ class CallStage(BaseRetryStage):
|
|
1817
1885
|
args.pop("extras")
|
1818
1886
|
|
1819
1887
|
if event and event.is_set():
|
1820
|
-
raise StageCancelError(
|
1821
|
-
"Execution was canceled from the event before start parallel."
|
1822
|
-
)
|
1888
|
+
raise StageCancelError("Cancel before start call process.")
|
1823
1889
|
|
1824
1890
|
args: DictData = self.validate_model_args(
|
1825
1891
|
call_func, args, run_id, parent_run_id, extras=self.extras
|
@@ -1932,8 +1998,9 @@ class BaseNestedStage(BaseRetryStage, ABC):
|
|
1932
1998
|
"""Make the errors context result with the refs value depends on the nested
|
1933
1999
|
execute func.
|
1934
2000
|
|
1935
|
-
:
|
1936
|
-
|
2001
|
+
Args:
|
2002
|
+
context: (DictData) A context data.
|
2003
|
+
error: (StageError) A stage exception object.
|
1937
2004
|
"""
|
1938
2005
|
if "errors" in context:
|
1939
2006
|
context["errors"][error.refs] = error.to_dict()
|
@@ -2026,18 +2093,9 @@ class TriggerStage(BaseNestedStage):
|
|
2026
2093
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2027
2094
|
)
|
2028
2095
|
_trigger: str = param2template(self.trigger, params, extras=self.extras)
|
2029
|
-
|
2030
|
-
|
2031
|
-
# "[NESTED]: Circle execution via trigger itself workflow name."
|
2032
|
-
# )
|
2096
|
+
if _trigger == self.extras.get("__sys_break_circle_exec", "NOTSET"):
|
2097
|
+
raise StageError("Circle execute via trigger itself workflow name.")
|
2033
2098
|
trace.info(f"[NESTED]: Load Workflow Config: {_trigger!r}")
|
2034
|
-
|
2035
|
-
# # NOTE: add noted key for cancel circle execution.
|
2036
|
-
# if "stop_circle_workflow_name" in self.extras:
|
2037
|
-
# self.extras["stop_circle_workflow_name"].append(_trigger)
|
2038
|
-
# else:
|
2039
|
-
# self.extras.update({"stop_circle_workflow_name": [_trigger]})
|
2040
|
-
|
2041
2099
|
result: Result = Workflow.from_conf(
|
2042
2100
|
name=pass_env(_trigger),
|
2043
2101
|
extras=self.extras,
|
@@ -2053,14 +2111,43 @@ class TriggerStage(BaseNestedStage):
|
|
2053
2111
|
if (msg := result.context.get("errors", {}).get("message"))
|
2054
2112
|
else "."
|
2055
2113
|
)
|
2056
|
-
|
2114
|
+
return result.catch(
|
2115
|
+
status=FAILED,
|
2116
|
+
context={
|
2117
|
+
"status": FAILED,
|
2118
|
+
"errors": StageError(
|
2119
|
+
f"Trigger workflow was failed{err_msg}"
|
2120
|
+
).to_dict(),
|
2121
|
+
},
|
2122
|
+
)
|
2057
2123
|
elif result.status == CANCEL:
|
2058
|
-
|
2124
|
+
return result.catch(
|
2125
|
+
status=CANCEL,
|
2126
|
+
context={
|
2127
|
+
"status": CANCEL,
|
2128
|
+
"errors": StageCancelError(
|
2129
|
+
"Trigger workflow was cancel."
|
2130
|
+
).to_dict(),
|
2131
|
+
},
|
2132
|
+
)
|
2059
2133
|
elif result.status == SKIP:
|
2060
|
-
|
2134
|
+
return result.catch(
|
2135
|
+
status=SKIP,
|
2136
|
+
context={
|
2137
|
+
"status": SKIP,
|
2138
|
+
"errors": StageSkipError(
|
2139
|
+
"Trigger workflow was skipped."
|
2140
|
+
).to_dict(),
|
2141
|
+
},
|
2142
|
+
)
|
2061
2143
|
return result
|
2062
2144
|
|
2063
2145
|
|
2146
|
+
class ParallelContext(TypedDict):
|
2147
|
+
branch: str
|
2148
|
+
stages: NotRequired[dict[str, Any]]
|
2149
|
+
|
2150
|
+
|
2064
2151
|
class ParallelStage(BaseNestedStage):
|
2065
2152
|
"""Parallel stage executor that execute branch stages with multithreading.
|
2066
2153
|
This stage let you set the fix branches for running child stage inside it on
|
@@ -2098,10 +2185,8 @@ class ParallelStage(BaseNestedStage):
|
|
2098
2185
|
parallel: dict[str, list[Stage]] = Field(
|
2099
2186
|
description="A mapping of branch name and its stages.",
|
2100
2187
|
)
|
2101
|
-
max_workers: int = Field(
|
2188
|
+
max_workers: Union[int, str] = Field(
|
2102
2189
|
default=2,
|
2103
|
-
ge=1,
|
2104
|
-
lt=20,
|
2105
2190
|
description=(
|
2106
2191
|
"The maximum multi-thread pool worker size for execution parallel. "
|
2107
2192
|
"This value should be gather or equal than 1, and less than 20."
|
@@ -2109,14 +2194,20 @@ class ParallelStage(BaseNestedStage):
|
|
2109
2194
|
alias="max-workers",
|
2110
2195
|
)
|
2111
2196
|
|
2112
|
-
|
2197
|
+
@field_validator("max_workers")
|
2198
|
+
def __validate_max_workers(cls, value: Union[int, str]) -> Union[int, str]:
|
2199
|
+
"""Validate `max_workers` field that should has value between 1 and 19."""
|
2200
|
+
if isinstance(value, int) and (value < 1 or value >= 20):
|
2201
|
+
raise ValueError("A max-workers value should between 1 and 19.")
|
2202
|
+
return value
|
2203
|
+
|
2204
|
+
def _process_nested(
|
2113
2205
|
self,
|
2114
2206
|
branch: str,
|
2115
2207
|
params: DictData,
|
2116
|
-
|
2208
|
+
trace: Trace,
|
2117
2209
|
context: DictData,
|
2118
2210
|
*,
|
2119
|
-
parent_run_id: Optional[str] = None,
|
2120
2211
|
event: Optional[Event] = None,
|
2121
2212
|
) -> tuple[Status, DictData]:
|
2122
2213
|
"""Execute branch that will execute all nested-stage that was set in
|
@@ -2125,15 +2216,14 @@ class ParallelStage(BaseNestedStage):
|
|
2125
2216
|
Args:
|
2126
2217
|
branch (str): A branch ID.
|
2127
2218
|
params (DictData): A parameter data.
|
2128
|
-
|
2219
|
+
trace (Trace): A Trace model.
|
2129
2220
|
context (DictData):
|
2130
|
-
parent_run_id (str | None, default None): A parent running ID.
|
2131
2221
|
event: (Event) An Event manager instance that use to cancel this
|
2132
2222
|
execution if it forces stopped by parent execution.
|
2133
2223
|
(Default is None)
|
2134
2224
|
|
2135
2225
|
Raises:
|
2136
|
-
StageCancelError: If event was set.
|
2226
|
+
StageCancelError: If event was set before start stage execution.
|
2137
2227
|
StageCancelError: If result from a nested-stage return canceled
|
2138
2228
|
status.
|
2139
2229
|
StageError: If result from a nested-stage return failed status.
|
@@ -2141,15 +2231,10 @@ class ParallelStage(BaseNestedStage):
|
|
2141
2231
|
Returns:
|
2142
2232
|
tuple[Status, DictData]: A pair of status and result context data.
|
2143
2233
|
"""
|
2144
|
-
trace:
|
2145
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2146
|
-
)
|
2147
|
-
trace.debug(f"[NESTED]: Execute Branch: {branch!r}")
|
2148
|
-
|
2149
|
-
# NOTE: Create nested-context
|
2234
|
+
trace.info(f"[NESTED]: Execute Branch: {branch!r}")
|
2150
2235
|
current_context: DictData = copy.deepcopy(params)
|
2151
2236
|
current_context.update({"branch": branch})
|
2152
|
-
nestet_context:
|
2237
|
+
nestet_context: ParallelContext = {"branch": branch, "stages": {}}
|
2153
2238
|
|
2154
2239
|
total_stage: int = len(self.parallel[branch])
|
2155
2240
|
skips: list[bool] = [False] * total_stage
|
@@ -2160,8 +2245,7 @@ class ParallelStage(BaseNestedStage):
|
|
2160
2245
|
|
2161
2246
|
if event and event.is_set():
|
2162
2247
|
error_msg: str = (
|
2163
|
-
"
|
2164
|
-
"start branch execution."
|
2248
|
+
f"Cancel branch: {branch!r} before start nested process."
|
2165
2249
|
)
|
2166
2250
|
catch(
|
2167
2251
|
context=context,
|
@@ -2181,12 +2265,12 @@ class ParallelStage(BaseNestedStage):
|
|
2181
2265
|
|
2182
2266
|
rs: Result = stage.execute(
|
2183
2267
|
params=current_context,
|
2184
|
-
run_id=parent_run_id,
|
2268
|
+
run_id=trace.parent_run_id,
|
2185
2269
|
event=event,
|
2186
2270
|
)
|
2187
|
-
stage.set_outputs(rs.context, to=nestet_context)
|
2271
|
+
stage.set_outputs(rs.context, to=cast_dict(nestet_context))
|
2188
2272
|
stage.set_outputs(
|
2189
|
-
stage.get_outputs(nestet_context), to=current_context
|
2273
|
+
stage.get_outputs(cast_dict(nestet_context)), to=current_context
|
2190
2274
|
)
|
2191
2275
|
|
2192
2276
|
if rs.status == SKIP:
|
@@ -2195,7 +2279,7 @@ class ParallelStage(BaseNestedStage):
|
|
2195
2279
|
|
2196
2280
|
elif rs.status == FAILED: # pragma: no cov
|
2197
2281
|
error_msg: str = (
|
2198
|
-
f"
|
2282
|
+
f"Break branch: {branch!r} because nested stage: "
|
2199
2283
|
f"{stage.iden!r}, failed."
|
2200
2284
|
)
|
2201
2285
|
catch(
|
@@ -2216,8 +2300,7 @@ class ParallelStage(BaseNestedStage):
|
|
2216
2300
|
|
2217
2301
|
elif rs.status == CANCEL:
|
2218
2302
|
error_msg: str = (
|
2219
|
-
"
|
2220
|
-
"end branch execution."
|
2303
|
+
f"Cancel branch: {branch!r} after end nested process."
|
2221
2304
|
)
|
2222
2305
|
catch(
|
2223
2306
|
context=context,
|
@@ -2257,7 +2340,9 @@ class ParallelStage(BaseNestedStage):
|
|
2257
2340
|
parent_run_id: Optional[str] = None,
|
2258
2341
|
event: Optional[Event] = None,
|
2259
2342
|
) -> Result:
|
2260
|
-
"""Execute parallel each branch via multi-threading pool.
|
2343
|
+
"""Execute parallel each branch via multi-threading pool. The parallel
|
2344
|
+
process will use all-completed strategy to handle result from each
|
2345
|
+
branch.
|
2261
2346
|
|
2262
2347
|
Args:
|
2263
2348
|
params: A parameter data that want to use in this
|
@@ -2268,6 +2353,9 @@ class ParallelStage(BaseNestedStage):
|
|
2268
2353
|
event: An event manager that use to track parent process
|
2269
2354
|
was not force stopped.
|
2270
2355
|
|
2356
|
+
Raises:
|
2357
|
+
StageCancelError: If event was set before start parallel process.
|
2358
|
+
|
2271
2359
|
Returns:
|
2272
2360
|
Result: The execution result with status and context data.
|
2273
2361
|
"""
|
@@ -2275,27 +2363,36 @@ class ParallelStage(BaseNestedStage):
|
|
2275
2363
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2276
2364
|
)
|
2277
2365
|
event: Event = event or Event()
|
2278
|
-
|
2366
|
+
|
2367
|
+
# NOTE: Start prepare max_workers field if it is string type.
|
2368
|
+
if isinstance(self.max_workers, str):
|
2369
|
+
max_workers: int = self.__validate_max_workers(
|
2370
|
+
pass_env(
|
2371
|
+
param2template(
|
2372
|
+
self.max_workers, params=params, extras=self.extras
|
2373
|
+
)
|
2374
|
+
)
|
2375
|
+
)
|
2376
|
+
else:
|
2377
|
+
max_workers: int = self.max_workers
|
2378
|
+
trace.info(f"[NESTED]: Parallel with {max_workers} workers.")
|
2279
2379
|
catch(
|
2280
2380
|
context=context,
|
2281
2381
|
status=WAIT,
|
2282
|
-
updated={"workers":
|
2382
|
+
updated={"workers": max_workers, "parallel": {}},
|
2283
2383
|
)
|
2284
2384
|
len_parallel: int = len(self.parallel)
|
2285
2385
|
if event and event.is_set():
|
2286
|
-
raise StageCancelError(
|
2287
|
-
"Execution was canceled from the event before start parallel."
|
2288
|
-
)
|
2386
|
+
raise StageCancelError("Cancel before start parallel process.")
|
2289
2387
|
|
2290
|
-
with ThreadPoolExecutor(
|
2388
|
+
with ThreadPoolExecutor(max_workers, "stp") as executor:
|
2291
2389
|
futures: list[Future] = [
|
2292
2390
|
executor.submit(
|
2293
|
-
self.
|
2391
|
+
self._process_nested,
|
2294
2392
|
branch=branch,
|
2295
2393
|
params=params,
|
2296
|
-
|
2394
|
+
trace=trace,
|
2297
2395
|
context=context,
|
2298
|
-
parent_run_id=parent_run_id,
|
2299
2396
|
event=event,
|
2300
2397
|
)
|
2301
2398
|
for branch in self.parallel
|
@@ -2310,15 +2407,21 @@ class ParallelStage(BaseNestedStage):
|
|
2310
2407
|
self.mark_errors(errors, e)
|
2311
2408
|
|
2312
2409
|
st: Status = validate_statuses(statuses)
|
2313
|
-
return Result(
|
2314
|
-
run_id=run_id,
|
2315
|
-
parent_run_id=parent_run_id,
|
2410
|
+
return Result.from_trace(trace).catch(
|
2316
2411
|
status=st,
|
2317
2412
|
context=catch(context, status=st, updated=errors),
|
2318
|
-
extras=self.extras,
|
2319
2413
|
)
|
2320
2414
|
|
2321
2415
|
|
2416
|
+
EachType = Union[
|
2417
|
+
list[str],
|
2418
|
+
list[int],
|
2419
|
+
str,
|
2420
|
+
dict[str, Any],
|
2421
|
+
dict[int, Any],
|
2422
|
+
]
|
2423
|
+
|
2424
|
+
|
2322
2425
|
class ForEachStage(BaseNestedStage):
|
2323
2426
|
"""For-Each stage executor that execute all stages with each item in the
|
2324
2427
|
foreach list.
|
@@ -2339,13 +2442,7 @@ class ForEachStage(BaseNestedStage):
|
|
2339
2442
|
... }
|
2340
2443
|
"""
|
2341
2444
|
|
2342
|
-
foreach:
|
2343
|
-
list[str],
|
2344
|
-
list[int],
|
2345
|
-
str,
|
2346
|
-
dict[str, Any],
|
2347
|
-
dict[int, Any],
|
2348
|
-
] = Field(
|
2445
|
+
foreach: EachType = Field(
|
2349
2446
|
description=(
|
2350
2447
|
"A items for passing to stages via ${{ item }} template parameter."
|
2351
2448
|
),
|
@@ -2374,15 +2471,14 @@ class ForEachStage(BaseNestedStage):
|
|
2374
2471
|
),
|
2375
2472
|
)
|
2376
2473
|
|
2377
|
-
def
|
2474
|
+
def _process_nested(
|
2378
2475
|
self,
|
2379
2476
|
index: int,
|
2380
2477
|
item: StrOrInt,
|
2381
2478
|
params: DictData,
|
2382
|
-
|
2479
|
+
trace: Trace,
|
2383
2480
|
context: DictData,
|
2384
2481
|
*,
|
2385
|
-
parent_run_id: Optional[str] = None,
|
2386
2482
|
event: Optional[Event] = None,
|
2387
2483
|
) -> tuple[Status, DictData]:
|
2388
2484
|
"""Execute item that will execute all nested-stage that was set in this
|
@@ -2391,32 +2487,29 @@ class ForEachStage(BaseNestedStage):
|
|
2391
2487
|
This method will create the nested-context from an input context
|
2392
2488
|
data and use it instead the context data.
|
2393
2489
|
|
2394
|
-
:
|
2395
|
-
|
2396
|
-
|
2397
|
-
|
2398
|
-
|
2399
|
-
|
2400
|
-
|
2401
|
-
|
2402
|
-
|
2490
|
+
Args:
|
2491
|
+
index: (int) An index value of foreach loop.
|
2492
|
+
item: (str | int) An item that want to execution.
|
2493
|
+
params: (DictData) A parameter data.
|
2494
|
+
trace (Trace): A Trace model.
|
2495
|
+
context: (DictData)
|
2496
|
+
event: (Event) An Event manager instance that use to cancel this
|
2497
|
+
execution if it forces stopped by parent execution.
|
2498
|
+
(Default is None)
|
2403
2499
|
|
2404
2500
|
This method should raise error when it wants to stop the foreach
|
2405
2501
|
loop such as cancel event or getting the failed status.
|
2406
2502
|
|
2407
|
-
:
|
2408
|
-
|
2409
|
-
|
2503
|
+
Raises:
|
2504
|
+
StageCancelError: If event was set.
|
2505
|
+
StageError: If the stage execution raise any Exception error.
|
2506
|
+
StageError: If the result from execution has `FAILED` status.
|
2410
2507
|
|
2411
|
-
:
|
2508
|
+
Returns:
|
2509
|
+
tuple[Status, DictData]
|
2412
2510
|
"""
|
2413
|
-
trace:
|
2414
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2415
|
-
)
|
2416
|
-
trace.debug(f"[NESTED]: Execute Item: {item!r}")
|
2511
|
+
trace.info(f"[NESTED]: Execute Item: {item!r}")
|
2417
2512
|
key: StrOrInt = index if self.use_index_as_key else item
|
2418
|
-
|
2419
|
-
# NOTE: Create nested-context data from the passing context.
|
2420
2513
|
current_context: DictData = copy.deepcopy(params)
|
2421
2514
|
current_context.update({"item": item, "loop": index})
|
2422
2515
|
nestet_context: DictData = {"item": item, "stages": {}}
|
@@ -2430,8 +2523,7 @@ class ForEachStage(BaseNestedStage):
|
|
2430
2523
|
|
2431
2524
|
if event and event.is_set():
|
2432
2525
|
error_msg: str = (
|
2433
|
-
"
|
2434
|
-
"item execution."
|
2526
|
+
f"Cancel item: {key!r} before start nested process."
|
2435
2527
|
)
|
2436
2528
|
catch(
|
2437
2529
|
context=context,
|
@@ -2449,10 +2541,9 @@ class ForEachStage(BaseNestedStage):
|
|
2449
2541
|
)
|
2450
2542
|
raise StageCancelError(error_msg, refs=key)
|
2451
2543
|
|
2452
|
-
# NOTE: Nested-stage execute will pass only params and context only.
|
2453
2544
|
rs: Result = stage.execute(
|
2454
2545
|
params=current_context,
|
2455
|
-
run_id=parent_run_id,
|
2546
|
+
run_id=trace.parent_run_id,
|
2456
2547
|
event=event,
|
2457
2548
|
)
|
2458
2549
|
stage.set_outputs(rs.context, to=nestet_context)
|
@@ -2466,7 +2557,7 @@ class ForEachStage(BaseNestedStage):
|
|
2466
2557
|
|
2467
2558
|
elif rs.status == FAILED: # pragma: no cov
|
2468
2559
|
error_msg: str = (
|
2469
|
-
f"
|
2560
|
+
f"Break item: {key!r} because nested stage: "
|
2470
2561
|
f"{stage.iden!r}, failed."
|
2471
2562
|
)
|
2472
2563
|
trace.warning(f"[NESTED]: {error_msg}")
|
@@ -2488,8 +2579,7 @@ class ForEachStage(BaseNestedStage):
|
|
2488
2579
|
|
2489
2580
|
elif rs.status == CANCEL:
|
2490
2581
|
error_msg: str = (
|
2491
|
-
"
|
2492
|
-
"end item execution."
|
2582
|
+
f"Cancel item: {key!r} after end nested process."
|
2493
2583
|
)
|
2494
2584
|
catch(
|
2495
2585
|
context=context,
|
@@ -2520,6 +2610,42 @@ class ForEachStage(BaseNestedStage):
|
|
2520
2610
|
},
|
2521
2611
|
)
|
2522
2612
|
|
2613
|
+
def validate_foreach(self, value: Any) -> list[Any]:
|
2614
|
+
"""Validate foreach value that already passed to this model.
|
2615
|
+
|
2616
|
+
Args:
|
2617
|
+
value:
|
2618
|
+
|
2619
|
+
Raises:
|
2620
|
+
TypeError: If value can not try-convert to list type.
|
2621
|
+
ValueError:
|
2622
|
+
|
2623
|
+
Returns:
|
2624
|
+
list[Any]: list of item.
|
2625
|
+
"""
|
2626
|
+
if isinstance(value, str):
|
2627
|
+
try:
|
2628
|
+
value: list[Any] = str2list(value)
|
2629
|
+
except ValueError as e:
|
2630
|
+
raise TypeError(
|
2631
|
+
f"Does not support string foreach: {value!r} that can "
|
2632
|
+
f"not convert to list."
|
2633
|
+
) from e
|
2634
|
+
# [VALIDATE]: Type of the foreach should be `list` type.
|
2635
|
+
elif isinstance(value, dict):
|
2636
|
+
raise TypeError(
|
2637
|
+
f"Does not support dict foreach: {value!r} ({type(value)}) "
|
2638
|
+
f"yet."
|
2639
|
+
)
|
2640
|
+
# [Validate]: Value in the foreach item should not be duplicate when the
|
2641
|
+
# `use_index_as_key` field did not set.
|
2642
|
+
elif len(set(value)) != len(value) and not self.use_index_as_key:
|
2643
|
+
raise ValueError(
|
2644
|
+
"Foreach item should not duplicate. If this stage must to pass "
|
2645
|
+
"duplicate item, it should set `use_index_as_key: true`."
|
2646
|
+
)
|
2647
|
+
return value
|
2648
|
+
|
2523
2649
|
def process(
|
2524
2650
|
self,
|
2525
2651
|
params: DictData,
|
@@ -2551,34 +2677,8 @@ class ForEachStage(BaseNestedStage):
|
|
2551
2677
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2552
2678
|
)
|
2553
2679
|
event: Event = event or Event()
|
2554
|
-
foreach:
|
2555
|
-
|
2556
|
-
)
|
2557
|
-
|
2558
|
-
# [NOTE]: Force convert str to list.
|
2559
|
-
if isinstance(foreach, str):
|
2560
|
-
try:
|
2561
|
-
foreach: list[Any] = str2list(foreach)
|
2562
|
-
except ValueError as e:
|
2563
|
-
raise TypeError(
|
2564
|
-
f"Does not support string foreach: {foreach!r} that can "
|
2565
|
-
f"not convert to list."
|
2566
|
-
) from e
|
2567
|
-
|
2568
|
-
# [VALIDATE]: Type of the foreach should be `list` type.
|
2569
|
-
elif isinstance(foreach, dict):
|
2570
|
-
raise TypeError(
|
2571
|
-
f"Does not support dict foreach: {foreach!r} ({type(foreach)}) "
|
2572
|
-
f"yet."
|
2573
|
-
)
|
2574
|
-
# [Validate]: Value in the foreach item should not be duplicate when the
|
2575
|
-
# `use_index_as_key` field did not set.
|
2576
|
-
elif len(set(foreach)) != len(foreach) and not self.use_index_as_key:
|
2577
|
-
raise ValueError(
|
2578
|
-
"Foreach item should not duplicate. If this stage must to pass "
|
2579
|
-
"duplicate item, it should set `use_index_as_key: true`."
|
2580
|
-
)
|
2581
|
-
|
2680
|
+
foreach: EachType = self.pass_template(self.foreach, params=params)
|
2681
|
+
foreach: list[Any] = self.validate_foreach(foreach)
|
2582
2682
|
trace.info(f"[NESTED]: Foreach: {foreach!r}.")
|
2583
2683
|
catch(
|
2584
2684
|
context=context,
|
@@ -2587,28 +2687,24 @@ class ForEachStage(BaseNestedStage):
|
|
2587
2687
|
)
|
2588
2688
|
len_foreach: int = len(foreach)
|
2589
2689
|
if event and event.is_set():
|
2590
|
-
raise StageCancelError(
|
2591
|
-
"Execution was canceled from the event before start foreach."
|
2592
|
-
)
|
2690
|
+
raise StageCancelError("Cancel before start foreach process.")
|
2593
2691
|
|
2594
2692
|
with ThreadPoolExecutor(self.concurrent, "stf") as executor:
|
2595
2693
|
futures: list[Future] = [
|
2596
2694
|
executor.submit(
|
2597
|
-
self.
|
2598
|
-
index=
|
2695
|
+
self._process_nested,
|
2696
|
+
index=index,
|
2599
2697
|
item=item,
|
2600
2698
|
params=params,
|
2601
|
-
|
2699
|
+
trace=trace,
|
2602
2700
|
context=context,
|
2603
|
-
parent_run_id=parent_run_id,
|
2604
2701
|
event=event,
|
2605
2702
|
)
|
2606
|
-
for
|
2703
|
+
for index, item in enumerate(foreach, start=0)
|
2607
2704
|
]
|
2608
2705
|
|
2609
2706
|
errors: DictData = {}
|
2610
2707
|
statuses: list[Status] = [WAIT] * len_foreach
|
2611
|
-
fail_fast: bool = False
|
2612
2708
|
|
2613
2709
|
done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
|
2614
2710
|
if len(list(done)) != len(futures):
|
@@ -2619,7 +2715,7 @@ class ForEachStage(BaseNestedStage):
|
|
2619
2715
|
for future in not_done:
|
2620
2716
|
future.cancel()
|
2621
2717
|
|
2622
|
-
time.sleep(0.
|
2718
|
+
time.sleep(0.025)
|
2623
2719
|
nd: str = (
|
2624
2720
|
(
|
2625
2721
|
f", {len(not_done)} item"
|
@@ -2630,7 +2726,6 @@ class ForEachStage(BaseNestedStage):
|
|
2630
2726
|
)
|
2631
2727
|
trace.debug(f"[NESTED]: ... Foreach-Stage set failed event{nd}")
|
2632
2728
|
done: Iterator[Future] = as_completed(futures)
|
2633
|
-
fail_fast = True
|
2634
2729
|
|
2635
2730
|
for i, future in enumerate(done, start=0):
|
2636
2731
|
try:
|
@@ -2640,21 +2735,13 @@ class ForEachStage(BaseNestedStage):
|
|
2640
2735
|
statuses[i] = get_status_from_error(e)
|
2641
2736
|
self.mark_errors(errors, e)
|
2642
2737
|
except CancelledError:
|
2738
|
+
statuses[i] = CANCEL
|
2643
2739
|
pass
|
2644
2740
|
|
2645
2741
|
status: Status = validate_statuses(statuses)
|
2646
|
-
|
2647
|
-
# NOTE: Prepare status because it does not cancel from parent event but
|
2648
|
-
# cancel from failed item execution.
|
2649
|
-
if fail_fast and status == CANCEL:
|
2650
|
-
status = FAILED
|
2651
|
-
|
2652
|
-
return Result(
|
2653
|
-
run_id=run_id,
|
2654
|
-
parent_run_id=parent_run_id,
|
2742
|
+
return Result.from_trace(trace).catch(
|
2655
2743
|
status=status,
|
2656
2744
|
context=catch(context, status=status, updated=errors),
|
2657
|
-
extras=self.extras,
|
2658
2745
|
)
|
2659
2746
|
|
2660
2747
|
|
@@ -2689,7 +2776,7 @@ class UntilStage(BaseNestedStage):
|
|
2689
2776
|
),
|
2690
2777
|
)
|
2691
2778
|
until: str = Field(description="A until condition for stop the while loop.")
|
2692
|
-
stages: list[
|
2779
|
+
stages: list[NestedStage] = Field(
|
2693
2780
|
default_factory=list,
|
2694
2781
|
description=(
|
2695
2782
|
"A list of stage that will run with each item in until loop."
|
@@ -2706,38 +2793,33 @@ class UntilStage(BaseNestedStage):
|
|
2706
2793
|
alias="max-loop",
|
2707
2794
|
)
|
2708
2795
|
|
2709
|
-
def
|
2796
|
+
def _process_nested(
|
2710
2797
|
self,
|
2711
2798
|
item: T,
|
2712
2799
|
loop: int,
|
2713
2800
|
params: DictData,
|
2714
|
-
|
2801
|
+
trace: Trace,
|
2715
2802
|
context: DictData,
|
2716
2803
|
*,
|
2717
|
-
parent_run_id: Optional[str] = None,
|
2718
2804
|
event: Optional[Event] = None,
|
2719
2805
|
) -> tuple[Status, DictData, T]:
|
2720
2806
|
"""Execute loop that will execute all nested-stage that was set in this
|
2721
2807
|
stage with specific loop and item.
|
2722
2808
|
|
2723
|
-
:
|
2724
|
-
|
2725
|
-
|
2726
|
-
|
2727
|
-
|
2728
|
-
|
2729
|
-
|
2730
|
-
|
2809
|
+
Args:
|
2810
|
+
item: (T) An item that want to execution.
|
2811
|
+
loop: (int) A number of loop.
|
2812
|
+
params: (DictData) A parameter data.
|
2813
|
+
trace: (Trace)
|
2814
|
+
context: (DictData)
|
2815
|
+
event: (Event) An Event manager instance that use to cancel this
|
2816
|
+
execution if it forces stopped by parent execution.
|
2731
2817
|
|
2732
|
-
:
|
2733
|
-
|
2818
|
+
Returns:
|
2819
|
+
tuple[Status, DictData, T]: Return a pair of Result and changed
|
2820
|
+
item.
|
2734
2821
|
"""
|
2735
|
-
trace: Trace = get_trace(
|
2736
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2737
|
-
)
|
2738
2822
|
trace.debug(f"[NESTED]: Execute Loop: {loop} (Item {item!r})")
|
2739
|
-
|
2740
|
-
# NOTE: Create nested-context
|
2741
2823
|
current_context: DictData = copy.deepcopy(params)
|
2742
2824
|
current_context.update({"item": item, "loop": loop})
|
2743
2825
|
nestet_context: DictData = {"loop": loop, "item": item, "stages": {}}
|
@@ -2752,8 +2834,7 @@ class UntilStage(BaseNestedStage):
|
|
2752
2834
|
|
2753
2835
|
if event and event.is_set():
|
2754
2836
|
error_msg: str = (
|
2755
|
-
"
|
2756
|
-
"loop execution."
|
2837
|
+
f"Cancel loop: {i!r} before start nested process."
|
2757
2838
|
)
|
2758
2839
|
catch(
|
2759
2840
|
context=context,
|
@@ -2774,7 +2855,7 @@ class UntilStage(BaseNestedStage):
|
|
2774
2855
|
|
2775
2856
|
rs: Result = stage.execute(
|
2776
2857
|
params=current_context,
|
2777
|
-
run_id=parent_run_id,
|
2858
|
+
run_id=trace.parent_run_id,
|
2778
2859
|
event=event,
|
2779
2860
|
)
|
2780
2861
|
stage.set_outputs(rs.context, to=nestet_context)
|
@@ -2790,8 +2871,8 @@ class UntilStage(BaseNestedStage):
|
|
2790
2871
|
|
2791
2872
|
elif rs.status == FAILED:
|
2792
2873
|
error_msg: str = (
|
2793
|
-
f"
|
2794
|
-
f"
|
2874
|
+
f"Break loop: {i!r} because nested stage: {stage.iden!r}, "
|
2875
|
+
f"failed."
|
2795
2876
|
)
|
2796
2877
|
catch(
|
2797
2878
|
context=context,
|
@@ -2811,10 +2892,7 @@ class UntilStage(BaseNestedStage):
|
|
2811
2892
|
raise StageNestedError(error_msg, refs=loop)
|
2812
2893
|
|
2813
2894
|
elif rs.status == CANCEL:
|
2814
|
-
error_msg: str =
|
2815
|
-
"Loop execution was canceled from the event after "
|
2816
|
-
"end loop execution."
|
2817
|
-
)
|
2895
|
+
error_msg: str = f"Cancel loop: {i!r} after end nested process."
|
2818
2896
|
catch(
|
2819
2897
|
context=context,
|
2820
2898
|
status=CANCEL,
|
@@ -2881,35 +2959,33 @@ class UntilStage(BaseNestedStage):
|
|
2881
2959
|
)
|
2882
2960
|
event: Event = event or Event()
|
2883
2961
|
trace.info(f"[NESTED]: Until: {self.until!r}")
|
2884
|
-
item: Union[str, int, bool] =
|
2885
|
-
param2template(self.item, params, extras=self.extras)
|
2886
|
-
)
|
2962
|
+
item: Union[str, int, bool] = self.pass_template(self.item, params)
|
2887
2963
|
loop: int = 1
|
2888
2964
|
until_rs: bool = True
|
2889
2965
|
exceed_loop: bool = False
|
2890
2966
|
catch(context=context, status=WAIT, updated={"until": {}})
|
2891
2967
|
statuses: list[Status] = []
|
2968
|
+
|
2892
2969
|
while until_rs and not (exceed_loop := (loop > self.max_loop)):
|
2893
2970
|
|
2894
2971
|
if event and event.is_set():
|
2895
2972
|
raise StageCancelError(
|
2896
|
-
"
|
2973
|
+
f"Cancel before start loop process, (loop: {loop})."
|
2897
2974
|
)
|
2898
2975
|
|
2899
|
-
status, context, item = self.
|
2976
|
+
status, context, item = self._process_nested(
|
2900
2977
|
item=item,
|
2901
2978
|
loop=loop,
|
2902
2979
|
params=params,
|
2903
|
-
|
2980
|
+
trace=trace,
|
2904
2981
|
context=context,
|
2905
|
-
parent_run_id=parent_run_id,
|
2906
2982
|
event=event,
|
2907
2983
|
)
|
2908
2984
|
|
2909
2985
|
loop += 1
|
2910
2986
|
if item is None:
|
2911
2987
|
item: int = loop
|
2912
|
-
trace.
|
2988
|
+
trace.debug(
|
2913
2989
|
f"[NESTED]: Return loop not set the item. It uses loop: "
|
2914
2990
|
f"{loop} by default."
|
2915
2991
|
)
|
@@ -2960,6 +3036,13 @@ class Match(BaseModel):
|
|
2960
3036
|
)
|
2961
3037
|
|
2962
3038
|
|
3039
|
+
class Else(BaseModel):
|
3040
|
+
other: list[Stage] = Field(
|
3041
|
+
description="A list of stage that does not match any case.",
|
3042
|
+
alias="else",
|
3043
|
+
)
|
3044
|
+
|
3045
|
+
|
2963
3046
|
class CaseStage(BaseNestedStage):
|
2964
3047
|
"""Case stage executor that execute all stages if the condition was matched.
|
2965
3048
|
|
@@ -2989,10 +3072,34 @@ class CaseStage(BaseNestedStage):
|
|
2989
3072
|
... ],
|
2990
3073
|
... }
|
2991
3074
|
|
3075
|
+
>>> stage = {
|
3076
|
+
... "name": "If stage execution.",
|
3077
|
+
... "case": "${{ param.test }}",
|
3078
|
+
... "match": [
|
3079
|
+
... {
|
3080
|
+
... "case": "1",
|
3081
|
+
... "stages": [
|
3082
|
+
... {
|
3083
|
+
... "name": "Stage case 1",
|
3084
|
+
... "eche": "Hello case 1",
|
3085
|
+
... },
|
3086
|
+
... ],
|
3087
|
+
... },
|
3088
|
+
... {
|
3089
|
+
... "else": [
|
3090
|
+
... {
|
3091
|
+
... "name": "Stage else",
|
3092
|
+
... "eche": "Hello case else",
|
3093
|
+
... },
|
3094
|
+
... ],
|
3095
|
+
... },
|
3096
|
+
... ],
|
3097
|
+
... }
|
3098
|
+
|
2992
3099
|
"""
|
2993
3100
|
|
2994
3101
|
case: str = Field(description="A case condition for routing.")
|
2995
|
-
match: list[Match] = Field(
|
3102
|
+
match: list[Union[Match, Else]] = Field(
|
2996
3103
|
description="A list of Match model that should not be an empty list.",
|
2997
3104
|
)
|
2998
3105
|
skip_not_match: bool = Field(
|
@@ -3004,46 +3111,117 @@ class CaseStage(BaseNestedStage):
|
|
3004
3111
|
alias="skip-not-match",
|
3005
3112
|
)
|
3006
3113
|
|
3007
|
-
|
3114
|
+
@field_validator("match", mode="after")
|
3115
|
+
def __validate_match(
|
3116
|
+
cls, match: list[Union[Match, Else]]
|
3117
|
+
) -> list[Union[Match, Else]]:
|
3118
|
+
"""Validate the match field should contain only one Else model."""
|
3119
|
+
c_else_case: int = 0
|
3120
|
+
c_else_model: int = 0
|
3121
|
+
for m in match:
|
3122
|
+
if isinstance(m, Else):
|
3123
|
+
if c_else_model:
|
3124
|
+
raise ValueError(
|
3125
|
+
"Match field should contain only one `Else` model."
|
3126
|
+
)
|
3127
|
+
c_else_model += 1
|
3128
|
+
continue
|
3129
|
+
if isinstance(m, Match) and m.case == "_":
|
3130
|
+
if c_else_case:
|
3131
|
+
raise ValueError(
|
3132
|
+
"Match field should contain only one else, '_', case."
|
3133
|
+
)
|
3134
|
+
c_else_case += 1
|
3135
|
+
continue
|
3136
|
+
return match
|
3137
|
+
|
3138
|
+
def extract_stages_from_case(
|
3139
|
+
self, case: StrOrNone, params: DictData
|
3140
|
+
) -> tuple[StrOrNone, list[Stage]]:
|
3141
|
+
"""Extract stage from case.
|
3142
|
+
|
3143
|
+
Args:
|
3144
|
+
case (StrOrNone):
|
3145
|
+
params (DictData):
|
3146
|
+
|
3147
|
+
Returns:
|
3148
|
+
tuple[StrOrNone, list[Stage]]: A pair of case and stages.
|
3149
|
+
"""
|
3150
|
+
_else_stages: Optional[list[Stage]] = None
|
3151
|
+
stages: Optional[list[Stage]] = None
|
3152
|
+
|
3153
|
+
# NOTE: Start check the condition of each stage match with this case.
|
3154
|
+
for match in self.match:
|
3155
|
+
|
3156
|
+
if isinstance(match, Else):
|
3157
|
+
_else_stages: list[Stage] = match.other
|
3158
|
+
continue
|
3159
|
+
|
3160
|
+
# NOTE: Store the else case.
|
3161
|
+
if (c := match.case) == "_":
|
3162
|
+
_else_stages: list[Stage] = match.stages
|
3163
|
+
continue
|
3164
|
+
|
3165
|
+
_condition: str = param2template(c, params, extras=self.extras)
|
3166
|
+
if pass_env(case) == pass_env(_condition):
|
3167
|
+
stages: list[Stage] = match.stages
|
3168
|
+
break
|
3169
|
+
|
3170
|
+
if stages is not None:
|
3171
|
+
return case, stages
|
3172
|
+
|
3173
|
+
if _else_stages is None:
|
3174
|
+
if not self.skip_not_match:
|
3175
|
+
raise StageError(
|
3176
|
+
"This stage does not set else for support not match "
|
3177
|
+
"any case."
|
3178
|
+
)
|
3179
|
+
raise StageSkipError(
|
3180
|
+
"Execution was skipped because it does not match any "
|
3181
|
+
"case and the else condition does not set too."
|
3182
|
+
)
|
3183
|
+
|
3184
|
+
# NOTE: Force to use the else when it does not match any case.
|
3185
|
+
return "_", _else_stages
|
3186
|
+
|
3187
|
+
def _process_nested(
|
3008
3188
|
self,
|
3009
3189
|
case: str,
|
3010
3190
|
stages: list[Stage],
|
3011
3191
|
params: DictData,
|
3012
|
-
|
3192
|
+
trace: Trace,
|
3013
3193
|
context: DictData,
|
3014
3194
|
*,
|
3015
|
-
parent_run_id: Optional[str] = None,
|
3016
3195
|
event: Optional[Event] = None,
|
3017
3196
|
) -> tuple[Status, DictData]:
|
3018
3197
|
"""Execute case.
|
3019
3198
|
|
3020
|
-
:
|
3021
|
-
|
3022
|
-
|
3023
|
-
|
3024
|
-
|
3025
|
-
|
3026
|
-
|
3027
|
-
|
3199
|
+
Args:
|
3200
|
+
case: (str) A case that want to execution.
|
3201
|
+
stages: (list[Stage]) A list of stage.
|
3202
|
+
params: (DictData) A parameter data.
|
3203
|
+
trace: (Trace)
|
3204
|
+
context: (DictData)
|
3205
|
+
event: (Event) An Event manager instance that use to cancel this
|
3206
|
+
execution if it forces stopped by parent execution.
|
3028
3207
|
|
3029
|
-
:
|
3208
|
+
Returns:
|
3209
|
+
DictData
|
3030
3210
|
"""
|
3031
|
-
trace:
|
3032
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3033
|
-
)
|
3034
|
-
trace.debug(f"[NESTED]: Execute Case: {case!r}")
|
3211
|
+
trace.info(f"[NESTED]: Case: {case!r}")
|
3035
3212
|
current_context: DictData = copy.deepcopy(params)
|
3036
3213
|
current_context.update({"case": case})
|
3037
3214
|
output: DictData = {"case": case, "stages": {}}
|
3038
|
-
|
3215
|
+
total_stage: int = len(stages)
|
3216
|
+
skips: list[bool] = [False] * total_stage
|
3217
|
+
for i, stage in enumerate(stages, start=0):
|
3039
3218
|
|
3040
3219
|
if self.extras:
|
3041
3220
|
stage.extras = self.extras
|
3042
3221
|
|
3043
3222
|
if event and event.is_set():
|
3044
3223
|
error_msg: str = (
|
3045
|
-
"
|
3046
|
-
"stage case execution."
|
3224
|
+
f"Cancel case: {case!r} before start nested process."
|
3047
3225
|
)
|
3048
3226
|
return CANCEL, catch(
|
3049
3227
|
context=context,
|
@@ -3057,16 +3235,20 @@ class CaseStage(BaseNestedStage):
|
|
3057
3235
|
|
3058
3236
|
rs: Result = stage.execute(
|
3059
3237
|
params=current_context,
|
3060
|
-
run_id=parent_run_id,
|
3238
|
+
run_id=trace.parent_run_id,
|
3061
3239
|
event=event,
|
3062
3240
|
)
|
3063
3241
|
stage.set_outputs(rs.context, to=output)
|
3064
3242
|
stage.set_outputs(stage.get_outputs(output), to=current_context)
|
3065
3243
|
|
3066
|
-
if rs.status ==
|
3244
|
+
if rs.status == SKIP:
|
3245
|
+
skips[i] = True
|
3246
|
+
continue
|
3247
|
+
|
3248
|
+
elif rs.status == FAILED:
|
3067
3249
|
error_msg: str = (
|
3068
|
-
f"
|
3069
|
-
f"
|
3250
|
+
f"Break case: {case!r} because nested stage: {stage.iden}, "
|
3251
|
+
f"failed."
|
3070
3252
|
)
|
3071
3253
|
return FAILED, catch(
|
3072
3254
|
context=context,
|
@@ -3077,9 +3259,25 @@ class CaseStage(BaseNestedStage):
|
|
3077
3259
|
"errors": StageError(error_msg).to_dict(),
|
3078
3260
|
},
|
3079
3261
|
)
|
3080
|
-
|
3262
|
+
|
3263
|
+
elif rs.status == CANCEL:
|
3264
|
+
error_msg: str = (
|
3265
|
+
f"Cancel case {case!r} after end nested process."
|
3266
|
+
)
|
3267
|
+
return CANCEL, catch(
|
3268
|
+
context=context,
|
3269
|
+
status=CANCEL,
|
3270
|
+
updated={
|
3271
|
+
"case": case,
|
3272
|
+
"stages": filter_func(output.pop("stages", {})),
|
3273
|
+
"errors": StageCancelError(error_msg).to_dict(),
|
3274
|
+
},
|
3275
|
+
)
|
3276
|
+
|
3277
|
+
status: Status = SKIP if sum(skips) == total_stage else SUCCESS
|
3278
|
+
return status, catch(
|
3081
3279
|
context=context,
|
3082
|
-
status=
|
3280
|
+
status=status,
|
3083
3281
|
updated={
|
3084
3282
|
"case": case,
|
3085
3283
|
"stages": filter_func(output.pop("stages", {})),
|
@@ -3113,52 +3311,17 @@ class CaseStage(BaseNestedStage):
|
|
3113
3311
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3114
3312
|
)
|
3115
3313
|
|
3116
|
-
|
3117
|
-
trace.info(f"[NESTED]: Get Case: {
|
3118
|
-
|
3119
|
-
_else: Optional[Match] = None
|
3120
|
-
stages: Optional[list[Stage]] = None
|
3121
|
-
|
3122
|
-
# NOTE: Start check the condition of each stage match with this case.
|
3123
|
-
for match in self.match:
|
3124
|
-
# NOTE: Store the else case.
|
3125
|
-
if (c := match.case) == "_":
|
3126
|
-
_else: Match = match
|
3127
|
-
continue
|
3128
|
-
|
3129
|
-
_condition: str = param2template(c, params, extras=self.extras)
|
3130
|
-
if pass_env(_case) == pass_env(_condition):
|
3131
|
-
stages: list[Stage] = match.stages
|
3132
|
-
break
|
3133
|
-
|
3134
|
-
if stages is None:
|
3135
|
-
if _else is None:
|
3136
|
-
if not self.skip_not_match:
|
3137
|
-
raise StageError(
|
3138
|
-
"This stage does not set else for support not match "
|
3139
|
-
"any case."
|
3140
|
-
)
|
3141
|
-
raise StageSkipError(
|
3142
|
-
"Execution was skipped because it does not match any "
|
3143
|
-
"case and the else condition does not set too."
|
3144
|
-
)
|
3145
|
-
|
3146
|
-
# NOTE: Force to use the else when it does not match any case.
|
3147
|
-
_case: str = "_"
|
3148
|
-
stages: list[Stage] = _else.stages
|
3149
|
-
|
3314
|
+
case: StrOrNone = param2template(self.case, params, extras=self.extras)
|
3315
|
+
trace.info(f"[NESTED]: Get Case: {case!r}.")
|
3316
|
+
case, stages = self.extract_stages_from_case(case, params=params)
|
3150
3317
|
if event and event.is_set():
|
3151
|
-
raise StageCancelError(
|
3152
|
-
|
3153
|
-
|
3154
|
-
)
|
3155
|
-
status, context = self._process_case(
|
3156
|
-
case=_case,
|
3318
|
+
raise StageCancelError("Cancel before start case process.")
|
3319
|
+
status, context = self._process_nested(
|
3320
|
+
case=case,
|
3157
3321
|
stages=stages,
|
3158
3322
|
params=params,
|
3159
|
-
|
3323
|
+
trace=trace,
|
3160
3324
|
context=context,
|
3161
|
-
parent_run_id=parent_run_id,
|
3162
3325
|
event=event,
|
3163
3326
|
)
|
3164
3327
|
return Result(
|
@@ -3584,6 +3747,59 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3584
3747
|
extras=self.extras,
|
3585
3748
|
)
|
3586
3749
|
|
3750
|
+
async def async_process(
|
3751
|
+
self,
|
3752
|
+
params: DictData,
|
3753
|
+
run_id: str,
|
3754
|
+
context: DictData,
|
3755
|
+
*,
|
3756
|
+
parent_run_id: Optional[str] = None,
|
3757
|
+
event: Optional[Event] = None,
|
3758
|
+
) -> Result:
|
3759
|
+
raise NotImplementedError(
|
3760
|
+
"Async process of Virtual Python stage does not implement yet."
|
3761
|
+
)
|
3762
|
+
|
3763
|
+
|
3764
|
+
NestedStage = Annotated[
|
3765
|
+
Union[
|
3766
|
+
BashStage,
|
3767
|
+
CallStage,
|
3768
|
+
PyStage,
|
3769
|
+
VirtualPyStage,
|
3770
|
+
RaiseStage,
|
3771
|
+
DockerStage,
|
3772
|
+
TriggerStage,
|
3773
|
+
EmptyStage,
|
3774
|
+
CaseStage,
|
3775
|
+
ForEachStage,
|
3776
|
+
UntilStage,
|
3777
|
+
],
|
3778
|
+
Field(
|
3779
|
+
union_mode="smart",
|
3780
|
+
description="A nested-stage allow list",
|
3781
|
+
),
|
3782
|
+
] # pragma: no cov
|
3783
|
+
|
3784
|
+
|
3785
|
+
ActionStage = Annotated[
|
3786
|
+
Union[
|
3787
|
+
BashStage,
|
3788
|
+
CallStage,
|
3789
|
+
VirtualPyStage,
|
3790
|
+
PyStage,
|
3791
|
+
RaiseStage,
|
3792
|
+
DockerStage,
|
3793
|
+
EmptyStage,
|
3794
|
+
],
|
3795
|
+
Field(
|
3796
|
+
union_mode="smart",
|
3797
|
+
description=(
|
3798
|
+
"An action stage model that allow to use with nested-stage model."
|
3799
|
+
),
|
3800
|
+
),
|
3801
|
+
] # pragma: no cov
|
3802
|
+
|
3587
3803
|
|
3588
3804
|
# NOTE:
|
3589
3805
|
# An order of parsing stage model on the Job model with `stages` field.
|
@@ -3592,18 +3808,14 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3592
3808
|
#
|
3593
3809
|
Stage = Annotated[
|
3594
3810
|
Union[
|
3595
|
-
|
3596
|
-
BashStage,
|
3597
|
-
CallStage,
|
3598
|
-
TriggerStage,
|
3811
|
+
# NOTE: Nested Stage.
|
3599
3812
|
ForEachStage,
|
3600
3813
|
UntilStage,
|
3601
3814
|
ParallelStage,
|
3602
3815
|
CaseStage,
|
3603
|
-
|
3604
|
-
|
3605
|
-
|
3606
|
-
EmptyStage,
|
3816
|
+
TriggerStage,
|
3817
|
+
# NOTE: Union with the action stage.
|
3818
|
+
ActionStage,
|
3607
3819
|
],
|
3608
3820
|
Field(
|
3609
3821
|
union_mode="smart",
|