ddeutil-workflow 0.0.81__py3-none-any.whl → 0.0.83__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +2 -1
- ddeutil/workflow/__cron.py +1 -1
- ddeutil/workflow/__init__.py +21 -7
- ddeutil/workflow/__main__.py +280 -1
- ddeutil/workflow/__types.py +10 -1
- ddeutil/workflow/api/routes/job.py +2 -2
- ddeutil/workflow/api/routes/logs.py +8 -61
- ddeutil/workflow/audits.py +101 -49
- ddeutil/workflow/conf.py +45 -25
- ddeutil/workflow/errors.py +12 -0
- ddeutil/workflow/event.py +34 -11
- ddeutil/workflow/job.py +75 -31
- ddeutil/workflow/result.py +73 -22
- ddeutil/workflow/stages.py +625 -375
- ddeutil/workflow/traces.py +71 -27
- ddeutil/workflow/utils.py +41 -24
- ddeutil/workflow/workflow.py +97 -124
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/METADATA +1 -1
- ddeutil_workflow-0.0.83.dist-info/RECORD +35 -0
- ddeutil/workflow/cli.py +0 -284
- ddeutil_workflow-0.0.81.dist-info/RECORD +0 -36
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/top_level.txt +0 -0
ddeutil/workflow/stages.py
CHANGED
@@ -36,16 +36,6 @@ the stage execution method.
|
|
36
36
|
execute method receives a `params={"params": {...}}` value for passing template
|
37
37
|
searching.
|
38
38
|
|
39
|
-
All stages model inherit from `BaseStage` or `AsyncBaseStage` models that has the
|
40
|
-
base fields:
|
41
|
-
|
42
|
-
| field | alias | data type | default | description |
|
43
|
-
|-----------|-------|-------------|:--------:|-----------------------------------------------------------------------|
|
44
|
-
| id | | str \| None | `None` | A stage ID that use to keep execution output or getting by job owner. |
|
45
|
-
| name | | str | | A stage name that want to log when start execution. |
|
46
|
-
| condition | if | str \| None | `None` | A stage condition statement to allow stage executable. |
|
47
|
-
| extras | | dict | `dict()` | An extra parameter that override core config values. |
|
48
|
-
|
49
39
|
It has a special base class is `BaseRetryStage` that inherit from `AsyncBaseStage`
|
50
40
|
that use to handle retry execution when it got any error with `retry` field.
|
51
41
|
"""
|
@@ -81,7 +71,9 @@ from typing import (
|
|
81
71
|
Annotated,
|
82
72
|
Any,
|
83
73
|
Callable,
|
74
|
+
ClassVar,
|
84
75
|
Optional,
|
76
|
+
TypedDict,
|
85
77
|
TypeVar,
|
86
78
|
Union,
|
87
79
|
get_type_hints,
|
@@ -90,11 +82,20 @@ from typing import (
|
|
90
82
|
from ddeutil.core import str2list
|
91
83
|
from pydantic import BaseModel, Field, ValidationError
|
92
84
|
from pydantic.functional_validators import field_validator, model_validator
|
93
|
-
from typing_extensions import Self
|
85
|
+
from typing_extensions import NotRequired, Self
|
94
86
|
|
95
|
-
from .
|
87
|
+
from .__about__ import __python_version__
|
88
|
+
from .__types import DictData, DictStr, StrOrInt, StrOrNone, TupleStr, cast_dict
|
96
89
|
from .conf import dynamic, pass_env
|
97
|
-
from .errors import
|
90
|
+
from .errors import (
|
91
|
+
StageCancelError,
|
92
|
+
StageError,
|
93
|
+
StageNestedCancelError,
|
94
|
+
StageNestedError,
|
95
|
+
StageNestedSkipError,
|
96
|
+
StageSkipError,
|
97
|
+
to_dict,
|
98
|
+
)
|
98
99
|
from .result import (
|
99
100
|
CANCEL,
|
100
101
|
FAILED,
|
@@ -114,10 +115,11 @@ from .reusables import (
|
|
114
115
|
not_in_template,
|
115
116
|
param2template,
|
116
117
|
)
|
117
|
-
from .traces import
|
118
|
+
from .traces import Trace, get_trace
|
118
119
|
from .utils import (
|
119
120
|
delay,
|
120
121
|
dump_all,
|
122
|
+
extract_id,
|
121
123
|
filter_func,
|
122
124
|
gen_id,
|
123
125
|
make_exec,
|
@@ -155,16 +157,15 @@ class BaseStage(BaseModel, ABC):
|
|
155
157
|
process: Main execution logic that must be implemented by subclasses
|
156
158
|
|
157
159
|
Example:
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
# Custom execution logic
|
164
|
-
return Result(status=SUCCESS)
|
160
|
+
>>> class CustomStage(BaseStage):
|
161
|
+
... custom_param: str = Field(description="Custom parameter")
|
162
|
+
...
|
163
|
+
... def process(self, params: DictData, **kwargs) -> Result:
|
164
|
+
... return Result(status=SUCCESS)
|
165
165
|
```
|
166
166
|
"""
|
167
167
|
|
168
|
+
action_stage: ClassVar[bool] = False
|
168
169
|
extras: DictData = Field(
|
169
170
|
default_factory=dict,
|
170
171
|
description="An extra parameter that override core config values.",
|
@@ -208,7 +209,8 @@ class BaseStage(BaseModel, ABC):
|
|
208
209
|
def ___prepare_desc__(cls, value: str) -> str:
|
209
210
|
"""Prepare description string that was created on a template.
|
210
211
|
|
211
|
-
:
|
212
|
+
Returns:
|
213
|
+
str: A dedent and left strip newline of description string.
|
212
214
|
"""
|
213
215
|
return dedent(value.lstrip("\n"))
|
214
216
|
|
@@ -218,19 +220,33 @@ class BaseStage(BaseModel, ABC):
|
|
218
220
|
method will validate name and id fields should not contain any template
|
219
221
|
parameter (exclude matrix template).
|
220
222
|
|
221
|
-
:
|
222
|
-
|
223
|
+
Raises:
|
224
|
+
ValueError: When the ID and name fields include matrix parameter
|
225
|
+
template with the 'matrix.' string value.
|
223
226
|
|
224
|
-
:
|
227
|
+
Returns: Self
|
225
228
|
"""
|
226
229
|
# VALIDATE: Validate stage id and name should not dynamic with params
|
227
230
|
# template. (allow only matrix)
|
228
231
|
if not_in_template(self.id) or not_in_template(self.name):
|
229
232
|
raise ValueError(
|
230
|
-
"Stage name and ID should only template with 'matrix.
|
233
|
+
"Stage name and ID should only template with 'matrix.?'."
|
231
234
|
)
|
232
235
|
return self
|
233
236
|
|
237
|
+
def pass_template(self, value: Any, params: DictData) -> Any:
|
238
|
+
"""Pass template and environment variable to any value that can
|
239
|
+
templating.
|
240
|
+
|
241
|
+
Args:
|
242
|
+
value (Any): An any value.
|
243
|
+
params (DictData):
|
244
|
+
|
245
|
+
Returns:
|
246
|
+
Any: A templated value.
|
247
|
+
"""
|
248
|
+
return pass_env(param2template(value, params, extras=self.extras))
|
249
|
+
|
234
250
|
@abstractmethod
|
235
251
|
def process(
|
236
252
|
self,
|
@@ -245,10 +261,10 @@ class BaseStage(BaseModel, ABC):
|
|
245
261
|
This is important method that make this class is able to be the stage.
|
246
262
|
|
247
263
|
Args:
|
248
|
-
params: A parameter data that want to use in this
|
264
|
+
params (DictData): A parameter data that want to use in this
|
249
265
|
execution.
|
250
|
-
run_id: A running stage ID.
|
251
|
-
context: A context data.
|
266
|
+
run_id (str): A running stage ID.
|
267
|
+
context (DictData): A context data.
|
252
268
|
parent_run_id: A parent running ID. (Default is None)
|
253
269
|
event: An event manager that use to track parent process
|
254
270
|
was not force stopped.
|
@@ -301,10 +317,11 @@ class BaseStage(BaseModel, ABC):
|
|
301
317
|
Result: The execution result with updated status and context.
|
302
318
|
"""
|
303
319
|
ts: float = time.monotonic()
|
304
|
-
parent_run_id
|
305
|
-
|
320
|
+
parent_run_id, run_id = extract_id(
|
321
|
+
self.iden, run_id=run_id, extras=self.extras
|
322
|
+
)
|
306
323
|
context: DictData = {"status": WAIT}
|
307
|
-
trace:
|
324
|
+
trace: Trace = get_trace(
|
308
325
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
309
326
|
)
|
310
327
|
try:
|
@@ -349,13 +366,18 @@ class BaseStage(BaseModel, ABC):
|
|
349
366
|
# this exception class at other location.
|
350
367
|
except (
|
351
368
|
StageSkipError,
|
352
|
-
|
369
|
+
StageNestedSkipError,
|
370
|
+
StageNestedError,
|
353
371
|
StageError,
|
354
372
|
) as e: # pragma: no cov
|
355
|
-
|
356
|
-
f"[STAGE]:
|
357
|
-
|
358
|
-
|
373
|
+
if isinstance(e, StageNestedError):
|
374
|
+
trace.info(f"[STAGE]: Nested: {e}")
|
375
|
+
elif isinstance(e, (StageSkipError, StageNestedSkipError)):
|
376
|
+
trace.info(f"[STAGE]: ⏭️ Skip: {e}")
|
377
|
+
else:
|
378
|
+
trace.info(
|
379
|
+
f"[STAGE]: Stage Failed:||🚨 {traceback.format_exc()}||"
|
380
|
+
)
|
359
381
|
st: Status = get_status_from_error(e)
|
360
382
|
return Result(
|
361
383
|
run_id=run_id,
|
@@ -366,7 +388,7 @@ class BaseStage(BaseModel, ABC):
|
|
366
388
|
status=st,
|
367
389
|
updated=(
|
368
390
|
None
|
369
|
-
if isinstance(e, StageSkipError)
|
391
|
+
if isinstance(e, (StageSkipError, StageNestedSkipError))
|
370
392
|
else {"errors": e.to_dict()}
|
371
393
|
),
|
372
394
|
),
|
@@ -375,8 +397,7 @@ class BaseStage(BaseModel, ABC):
|
|
375
397
|
)
|
376
398
|
except Exception as e:
|
377
399
|
trace.error(
|
378
|
-
f"[STAGE]: Error
|
379
|
-
f"{traceback.format_exc()}"
|
400
|
+
f"[STAGE]: Error Failed:||🚨 {traceback.format_exc()}||"
|
380
401
|
)
|
381
402
|
return Result(
|
382
403
|
run_id=run_id,
|
@@ -488,10 +509,12 @@ class BaseStage(BaseModel, ABC):
|
|
488
509
|
"""Get the outputs from stages data. It will get this stage ID from
|
489
510
|
the stage outputs mapping.
|
490
511
|
|
491
|
-
:
|
492
|
-
stage
|
512
|
+
Args:
|
513
|
+
output (DictData): A stage output context that want to get this
|
514
|
+
stage ID `outputs` key.
|
493
515
|
|
494
|
-
:
|
516
|
+
Returns:
|
517
|
+
DictData: An output value that have get with its identity.
|
495
518
|
"""
|
496
519
|
if self.id is None and not dynamic(
|
497
520
|
"stage_default_id", extras=self.extras
|
@@ -564,13 +587,53 @@ class BaseStage(BaseModel, ABC):
|
|
564
587
|
"""
|
565
588
|
return False
|
566
589
|
|
567
|
-
def
|
590
|
+
def detail(self) -> DictData: # pragma: no cov
|
591
|
+
"""Return the detail of this stage for generate markdown.
|
592
|
+
|
593
|
+
Returns:
|
594
|
+
DictData: A dict that was dumped from this model with alias mode.
|
595
|
+
"""
|
596
|
+
return self.model_dump(by_alias=True)
|
597
|
+
|
598
|
+
def md(self) -> str: # pragma: no cov
|
568
599
|
"""Return generated document that will be the interface of this stage.
|
569
600
|
|
570
601
|
:rtype: str
|
571
602
|
"""
|
572
603
|
return self.desc
|
573
604
|
|
605
|
+
def dryrun(
|
606
|
+
self,
|
607
|
+
params: DictData,
|
608
|
+
run_id: str,
|
609
|
+
context: DictData,
|
610
|
+
*,
|
611
|
+
parent_run_id: Optional[str] = None,
|
612
|
+
event: Optional[Event] = None,
|
613
|
+
) -> Optional[Result]: # pragma: no cov
|
614
|
+
"""Pre-process method that will use to run with dry-run mode, and it
|
615
|
+
should be used before process method.
|
616
|
+
"""
|
617
|
+
|
618
|
+
def to_empty(self, sleep: int = 0.35) -> EmptyStage: # pragma: no cov
|
619
|
+
"""Convert the current Stage model to the EmptyStage model for dry-run
|
620
|
+
mode if the `action_stage` class attribute has set.
|
621
|
+
|
622
|
+
Returns:
|
623
|
+
EmptyStage: An EmptyStage model that passing itself model data to
|
624
|
+
message.
|
625
|
+
"""
|
626
|
+
return EmptyStage.model_validate(
|
627
|
+
{
|
628
|
+
"name": self.name,
|
629
|
+
"id": self.id,
|
630
|
+
"desc": self.desc,
|
631
|
+
"if": self.condition,
|
632
|
+
"echo": f"Convert from {self.__class__.__name__}",
|
633
|
+
"sleep": sleep,
|
634
|
+
}
|
635
|
+
)
|
636
|
+
|
574
637
|
|
575
638
|
class BaseAsyncStage(BaseStage, ABC):
|
576
639
|
"""Base Async Stage model to make any stage model allow async execution for
|
@@ -632,9 +695,9 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
632
695
|
"""
|
633
696
|
ts: float = time.monotonic()
|
634
697
|
parent_run_id: StrOrNone = run_id
|
635
|
-
run_id: str =
|
698
|
+
run_id: str = gen_id(self.iden, unique=True, extras=self.extras)
|
636
699
|
context: DictData = {}
|
637
|
-
trace:
|
700
|
+
trace: Trace = get_trace(
|
638
701
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
639
702
|
)
|
640
703
|
try:
|
@@ -677,13 +740,18 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
677
740
|
# this exception class at other location.
|
678
741
|
except (
|
679
742
|
StageSkipError,
|
680
|
-
|
743
|
+
StageNestedSkipError,
|
744
|
+
StageNestedError,
|
681
745
|
StageError,
|
682
746
|
) as e: # pragma: no cov
|
683
|
-
|
684
|
-
f"[STAGE]:
|
685
|
-
|
686
|
-
|
747
|
+
if isinstance(e, StageNestedError):
|
748
|
+
await trace.ainfo(f"[STAGE]: Nested: {e}")
|
749
|
+
elif isinstance(e, (StageSkipError, StageNestedSkipError)):
|
750
|
+
await trace.ainfo(f"[STAGE]: ⏭️ Skip: {e}")
|
751
|
+
else:
|
752
|
+
await trace.ainfo(
|
753
|
+
f"[STAGE]: Stage Failed:||🚨 {traceback.format_exc()}||"
|
754
|
+
)
|
687
755
|
st: Status = get_status_from_error(e)
|
688
756
|
return Result(
|
689
757
|
run_id=run_id,
|
@@ -694,8 +762,8 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
694
762
|
status=st,
|
695
763
|
updated=(
|
696
764
|
None
|
697
|
-
if isinstance(e, StageSkipError)
|
698
|
-
else {"
|
765
|
+
if isinstance(e, (StageSkipError, StageNestedSkipError))
|
766
|
+
else {"errors": e.to_dict()}
|
699
767
|
),
|
700
768
|
),
|
701
769
|
info={"execution_time": time.monotonic() - ts},
|
@@ -703,8 +771,7 @@ class BaseAsyncStage(BaseStage, ABC):
|
|
703
771
|
)
|
704
772
|
except Exception as e:
|
705
773
|
await trace.aerror(
|
706
|
-
f"[STAGE]: Error
|
707
|
-
f"{traceback.format_exc()}"
|
774
|
+
f"[STAGE]: Error Failed:||🚨 {traceback.format_exc()}||"
|
708
775
|
)
|
709
776
|
return Result(
|
710
777
|
run_id=run_id,
|
@@ -777,7 +844,7 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
777
844
|
current_retry: int = 0
|
778
845
|
exception: Exception
|
779
846
|
catch(context, status=WAIT)
|
780
|
-
trace:
|
847
|
+
trace: Trace = get_trace(
|
781
848
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
782
849
|
)
|
783
850
|
|
@@ -816,6 +883,14 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
816
883
|
parent_run_id=parent_run_id,
|
817
884
|
event=event,
|
818
885
|
)
|
886
|
+
except (
|
887
|
+
StageSkipError,
|
888
|
+
StageNestedSkipError,
|
889
|
+
StageCancelError,
|
890
|
+
StageNestedCancelError,
|
891
|
+
):
|
892
|
+
trace.debug("[STAGE]: process raise skip or cancel error.")
|
893
|
+
raise
|
819
894
|
except Exception as e:
|
820
895
|
current_retry += 1
|
821
896
|
trace.warning(
|
@@ -823,6 +898,7 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
823
898
|
f"( {e.__class__.__name__} )"
|
824
899
|
)
|
825
900
|
exception = e
|
901
|
+
time.sleep(1.2**current_retry)
|
826
902
|
|
827
903
|
trace.error(
|
828
904
|
f"[STAGE]: Reach the maximum of retry number: {self.retry}."
|
@@ -850,7 +926,7 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
850
926
|
current_retry: int = 0
|
851
927
|
exception: Exception
|
852
928
|
catch(context, status=WAIT)
|
853
|
-
trace:
|
929
|
+
trace: Trace = get_trace(
|
854
930
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
855
931
|
)
|
856
932
|
|
@@ -889,6 +965,16 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
889
965
|
parent_run_id=parent_run_id,
|
890
966
|
event=event,
|
891
967
|
)
|
968
|
+
except (
|
969
|
+
StageSkipError,
|
970
|
+
StageNestedSkipError,
|
971
|
+
StageCancelError,
|
972
|
+
StageNestedCancelError,
|
973
|
+
):
|
974
|
+
await trace.adebug(
|
975
|
+
"[STAGE]: process raise skip or cancel error."
|
976
|
+
)
|
977
|
+
raise
|
892
978
|
except Exception as e:
|
893
979
|
current_retry += 1
|
894
980
|
await trace.awarning(
|
@@ -896,6 +982,7 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
896
982
|
f"( {e.__class__.__name__} )"
|
897
983
|
)
|
898
984
|
exception = e
|
985
|
+
await asyncio.sleep(1.2**current_retry)
|
899
986
|
|
900
987
|
await trace.aerror(
|
901
988
|
f"[STAGE]: Reach the maximum of retry number: {self.retry}."
|
@@ -908,19 +995,15 @@ class EmptyStage(BaseAsyncStage):
|
|
908
995
|
|
909
996
|
EmptyStage is a utility stage that performs no actual work but provides
|
910
997
|
logging output and optional delays. It's commonly used for:
|
911
|
-
|
912
|
-
|
913
|
-
|
914
|
-
|
998
|
+
- Debugging workflow execution flow
|
999
|
+
- Adding informational messages to workflows
|
1000
|
+
- Creating delays between stages
|
1001
|
+
- Testing template parameter resolution
|
915
1002
|
|
916
1003
|
The stage outputs the echo message to stdout and can optionally sleep
|
917
1004
|
for a specified duration, making it useful for workflow timing control
|
918
1005
|
and debugging scenarios.
|
919
1006
|
|
920
|
-
Attributes:
|
921
|
-
echo (str, optional): Message to display during execution
|
922
|
-
sleep (float): Duration to sleep after logging (0-1800 seconds)
|
923
|
-
|
924
1007
|
Example:
|
925
1008
|
```yaml
|
926
1009
|
stages:
|
@@ -932,24 +1015,25 @@ class EmptyStage(BaseAsyncStage):
|
|
932
1015
|
echo: "Processing file: ${{ params.filename }}"
|
933
1016
|
```
|
934
1017
|
|
935
|
-
|
936
|
-
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
)
|
941
|
-
```
|
1018
|
+
>>> stage = EmptyStage(
|
1019
|
+
... name="Status Update",
|
1020
|
+
... echo="Processing completed successfully",
|
1021
|
+
... sleep=1.0
|
1022
|
+
... )
|
942
1023
|
"""
|
943
1024
|
|
944
1025
|
echo: StrOrNone = Field(
|
945
1026
|
default=None,
|
946
|
-
description=
|
1027
|
+
description=(
|
1028
|
+
"A message that want to display on the stdout during execution. "
|
1029
|
+
"By default, it do not show any message."
|
1030
|
+
),
|
947
1031
|
)
|
948
1032
|
sleep: float = Field(
|
949
1033
|
default=0,
|
950
1034
|
description=(
|
951
|
-
"A second value to sleep
|
952
|
-
"
|
1035
|
+
"A duration in second value to sleep after logging. This value "
|
1036
|
+
"should between 0 - 1800 seconds."
|
953
1037
|
),
|
954
1038
|
ge=0,
|
955
1039
|
lt=1800,
|
@@ -982,7 +1066,7 @@ class EmptyStage(BaseAsyncStage):
|
|
982
1066
|
Returns:
|
983
1067
|
Result: The execution result with status and context data.
|
984
1068
|
"""
|
985
|
-
trace:
|
1069
|
+
trace: Trace = get_trace(
|
986
1070
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
987
1071
|
)
|
988
1072
|
message: str = (
|
@@ -994,9 +1078,7 @@ class EmptyStage(BaseAsyncStage):
|
|
994
1078
|
)
|
995
1079
|
|
996
1080
|
if event and event.is_set():
|
997
|
-
raise StageCancelError(
|
998
|
-
"Execution was canceled from the event before start parallel."
|
999
|
-
)
|
1081
|
+
raise StageCancelError("Cancel before start empty process.")
|
1000
1082
|
|
1001
1083
|
trace.info(f"[STAGE]: Message: ( {message} )")
|
1002
1084
|
if self.sleep > 0:
|
@@ -1035,7 +1117,7 @@ class EmptyStage(BaseAsyncStage):
|
|
1035
1117
|
Returns:
|
1036
1118
|
Result: The execution result with status and context data.
|
1037
1119
|
"""
|
1038
|
-
trace:
|
1120
|
+
trace: Trace = get_trace(
|
1039
1121
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
1040
1122
|
)
|
1041
1123
|
message: str = (
|
@@ -1047,9 +1129,7 @@ class EmptyStage(BaseAsyncStage):
|
|
1047
1129
|
)
|
1048
1130
|
|
1049
1131
|
if event and event.is_set():
|
1050
|
-
raise StageCancelError(
|
1051
|
-
"Execution was canceled from the event before start parallel."
|
1052
|
-
)
|
1132
|
+
raise StageCancelError("Cancel before start empty process.")
|
1053
1133
|
|
1054
1134
|
trace.info(f"[STAGE]: Message: ( {message} )")
|
1055
1135
|
if self.sleep > 0:
|
@@ -1200,7 +1280,7 @@ class BashStage(BaseRetryStage):
|
|
1200
1280
|
Returns:
|
1201
1281
|
Result: The execution result with status and context data.
|
1202
1282
|
"""
|
1203
|
-
trace:
|
1283
|
+
trace: Trace = get_trace(
|
1204
1284
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
1205
1285
|
)
|
1206
1286
|
bash: str = param2template(
|
@@ -1264,7 +1344,7 @@ class BashStage(BaseRetryStage):
|
|
1264
1344
|
Returns:
|
1265
1345
|
Result: The execution result with status and context data.
|
1266
1346
|
"""
|
1267
|
-
trace:
|
1347
|
+
trace: Trace = get_trace(
|
1268
1348
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
1269
1349
|
)
|
1270
1350
|
bash: str = param2template(
|
@@ -1394,21 +1474,21 @@ class PyStage(BaseRetryStage):
|
|
1394
1474
|
to globals argument on `exec` build-in function.
|
1395
1475
|
|
1396
1476
|
Args:
|
1397
|
-
params: A parameter data that want to use in this
|
1477
|
+
params (DictData): A parameter data that want to use in this
|
1398
1478
|
execution.
|
1399
|
-
run_id: A running stage ID.
|
1479
|
+
run_id (str): A running stage ID.
|
1400
1480
|
context: A context data.
|
1401
|
-
parent_run_id: A parent running ID.
|
1481
|
+
parent_run_id (str | None, default None): A parent running ID.
|
1402
1482
|
event: An event manager that use to track parent process
|
1403
1483
|
was not force stopped.
|
1404
1484
|
|
1405
1485
|
Returns:
|
1406
1486
|
Result: The execution result with status and context data.
|
1407
1487
|
"""
|
1408
|
-
trace:
|
1488
|
+
trace: Trace = get_trace(
|
1409
1489
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
1410
1490
|
)
|
1411
|
-
trace.
|
1491
|
+
trace.debug("[STAGE]: Prepare `globals` and `locals` variables.")
|
1412
1492
|
lc: DictData = {}
|
1413
1493
|
gb: DictData = (
|
1414
1494
|
globals()
|
@@ -1486,7 +1566,7 @@ class PyStage(BaseRetryStage):
|
|
1486
1566
|
Returns:
|
1487
1567
|
Result: The execution result with status and context data.
|
1488
1568
|
"""
|
1489
|
-
trace:
|
1569
|
+
trace: Trace = get_trace(
|
1490
1570
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
1491
1571
|
)
|
1492
1572
|
await trace.ainfo("[STAGE]: Prepare `globals` and `locals` variables.")
|
@@ -1631,7 +1711,7 @@ class CallStage(BaseRetryStage):
|
|
1631
1711
|
Returns:
|
1632
1712
|
Result: The execution result with status and context data.
|
1633
1713
|
"""
|
1634
|
-
trace:
|
1714
|
+
trace: Trace = get_trace(
|
1635
1715
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
1636
1716
|
)
|
1637
1717
|
call_func: TagFunc = self.get_caller(params=params)()
|
@@ -1687,9 +1767,7 @@ class CallStage(BaseRetryStage):
|
|
1687
1767
|
args.pop("extras")
|
1688
1768
|
|
1689
1769
|
if event and event.is_set():
|
1690
|
-
raise StageCancelError(
|
1691
|
-
"Execution was canceled from the event before start parallel."
|
1692
|
-
)
|
1770
|
+
raise StageCancelError("Cancel before start call process.")
|
1693
1771
|
|
1694
1772
|
args: DictData = self.validate_model_args(
|
1695
1773
|
call_func, args, run_id, parent_run_id, extras=self.extras
|
@@ -1750,7 +1828,7 @@ class CallStage(BaseRetryStage):
|
|
1750
1828
|
Returns:
|
1751
1829
|
Result: The execution result with status and context data.
|
1752
1830
|
"""
|
1753
|
-
trace:
|
1831
|
+
trace: Trace = get_trace(
|
1754
1832
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
1755
1833
|
)
|
1756
1834
|
call_func: TagFunc = self.get_caller(params=params)()
|
@@ -1807,9 +1885,7 @@ class CallStage(BaseRetryStage):
|
|
1807
1885
|
args.pop("extras")
|
1808
1886
|
|
1809
1887
|
if event and event.is_set():
|
1810
|
-
raise StageCancelError(
|
1811
|
-
"Execution was canceled from the event before start parallel."
|
1812
|
-
)
|
1888
|
+
raise StageCancelError("Cancel before start call process.")
|
1813
1889
|
|
1814
1890
|
args: DictData = self.validate_model_args(
|
1815
1891
|
call_func, args, run_id, parent_run_id, extras=self.extras
|
@@ -1884,7 +1960,7 @@ class CallStage(BaseRetryStage):
|
|
1884
1960
|
"Validate argument from the caller function raise invalid type."
|
1885
1961
|
) from e
|
1886
1962
|
except TypeError as e:
|
1887
|
-
trace:
|
1963
|
+
trace: Trace = get_trace(
|
1888
1964
|
run_id, parent_run_id=parent_run_id, extras=extras
|
1889
1965
|
)
|
1890
1966
|
trace.warning(
|
@@ -1922,8 +1998,9 @@ class BaseNestedStage(BaseRetryStage, ABC):
|
|
1922
1998
|
"""Make the errors context result with the refs value depends on the nested
|
1923
1999
|
execute func.
|
1924
2000
|
|
1925
|
-
:
|
1926
|
-
|
2001
|
+
Args:
|
2002
|
+
context: (DictData) A context data.
|
2003
|
+
error: (StageError) A stage exception object.
|
1927
2004
|
"""
|
1928
2005
|
if "errors" in context:
|
1929
2006
|
context["errors"][error.refs] = error.to_dict()
|
@@ -1963,6 +2040,9 @@ class TriggerStage(BaseNestedStage):
|
|
1963
2040
|
execute method. This is the stage that allow you to create the reusable
|
1964
2041
|
Workflow template with dynamic parameters.
|
1965
2042
|
|
2043
|
+
This stage does not allow to pass the workflow model directly to the
|
2044
|
+
trigger field. A trigger workflow name should exist on the config path only.
|
2045
|
+
|
1966
2046
|
Data Validate:
|
1967
2047
|
>>> stage = {
|
1968
2048
|
... "name": "Trigger workflow stage execution",
|
@@ -2009,11 +2089,13 @@ class TriggerStage(BaseNestedStage):
|
|
2009
2089
|
"""
|
2010
2090
|
from .workflow import Workflow
|
2011
2091
|
|
2012
|
-
trace:
|
2092
|
+
trace: Trace = get_trace(
|
2013
2093
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2014
2094
|
)
|
2015
2095
|
_trigger: str = param2template(self.trigger, params, extras=self.extras)
|
2016
|
-
|
2096
|
+
if _trigger == self.extras.get("__sys_break_circle_exec", "NOTSET"):
|
2097
|
+
raise StageError("Circle execute via trigger itself workflow name.")
|
2098
|
+
trace.info(f"[NESTED]: Load Workflow Config: {_trigger!r}")
|
2017
2099
|
result: Result = Workflow.from_conf(
|
2018
2100
|
name=pass_env(_trigger),
|
2019
2101
|
extras=self.extras,
|
@@ -2024,19 +2106,48 @@ class TriggerStage(BaseNestedStage):
|
|
2024
2106
|
event=event,
|
2025
2107
|
)
|
2026
2108
|
if result.status == FAILED:
|
2027
|
-
err_msg:
|
2109
|
+
err_msg: str = (
|
2028
2110
|
f" with:\n{msg}"
|
2029
2111
|
if (msg := result.context.get("errors", {}).get("message"))
|
2030
2112
|
else "."
|
2031
2113
|
)
|
2032
|
-
|
2114
|
+
return result.catch(
|
2115
|
+
status=FAILED,
|
2116
|
+
context={
|
2117
|
+
"status": FAILED,
|
2118
|
+
"errors": StageError(
|
2119
|
+
f"Trigger workflow was failed{err_msg}"
|
2120
|
+
).to_dict(),
|
2121
|
+
},
|
2122
|
+
)
|
2033
2123
|
elif result.status == CANCEL:
|
2034
|
-
|
2124
|
+
return result.catch(
|
2125
|
+
status=CANCEL,
|
2126
|
+
context={
|
2127
|
+
"status": CANCEL,
|
2128
|
+
"errors": StageCancelError(
|
2129
|
+
"Trigger workflow was cancel."
|
2130
|
+
).to_dict(),
|
2131
|
+
},
|
2132
|
+
)
|
2035
2133
|
elif result.status == SKIP:
|
2036
|
-
|
2134
|
+
return result.catch(
|
2135
|
+
status=SKIP,
|
2136
|
+
context={
|
2137
|
+
"status": SKIP,
|
2138
|
+
"errors": StageSkipError(
|
2139
|
+
"Trigger workflow was skipped."
|
2140
|
+
).to_dict(),
|
2141
|
+
},
|
2142
|
+
)
|
2037
2143
|
return result
|
2038
2144
|
|
2039
2145
|
|
2146
|
+
class ParallelContext(TypedDict):
|
2147
|
+
branch: str
|
2148
|
+
stages: NotRequired[dict[str, Any]]
|
2149
|
+
|
2150
|
+
|
2040
2151
|
class ParallelStage(BaseNestedStage):
|
2041
2152
|
"""Parallel stage executor that execute branch stages with multithreading.
|
2042
2153
|
This stage let you set the fix branches for running child stage inside it on
|
@@ -2074,10 +2185,8 @@ class ParallelStage(BaseNestedStage):
|
|
2074
2185
|
parallel: dict[str, list[Stage]] = Field(
|
2075
2186
|
description="A mapping of branch name and its stages.",
|
2076
2187
|
)
|
2077
|
-
max_workers: int = Field(
|
2188
|
+
max_workers: Union[int, str] = Field(
|
2078
2189
|
default=2,
|
2079
|
-
ge=1,
|
2080
|
-
lt=20,
|
2081
2190
|
description=(
|
2082
2191
|
"The maximum multi-thread pool worker size for execution parallel. "
|
2083
2192
|
"This value should be gather or equal than 1, and less than 20."
|
@@ -2085,44 +2194,47 @@ class ParallelStage(BaseNestedStage):
|
|
2085
2194
|
alias="max-workers",
|
2086
2195
|
)
|
2087
2196
|
|
2088
|
-
|
2197
|
+
@field_validator("max_workers")
|
2198
|
+
def __validate_max_workers(cls, value: Union[int, str]) -> Union[int, str]:
|
2199
|
+
"""Validate `max_workers` field that should has value between 1 and 19."""
|
2200
|
+
if isinstance(value, int) and (value < 1 or value >= 20):
|
2201
|
+
raise ValueError("A max-workers value should between 1 and 19.")
|
2202
|
+
return value
|
2203
|
+
|
2204
|
+
def _process_nested(
|
2089
2205
|
self,
|
2090
2206
|
branch: str,
|
2091
2207
|
params: DictData,
|
2092
|
-
|
2208
|
+
trace: Trace,
|
2093
2209
|
context: DictData,
|
2094
2210
|
*,
|
2095
|
-
parent_run_id: Optional[str] = None,
|
2096
2211
|
event: Optional[Event] = None,
|
2097
2212
|
) -> tuple[Status, DictData]:
|
2098
2213
|
"""Execute branch that will execute all nested-stage that was set in
|
2099
2214
|
this stage with specific branch ID.
|
2100
2215
|
|
2101
|
-
:
|
2102
|
-
|
2103
|
-
|
2104
|
-
|
2105
|
-
|
2106
|
-
|
2107
|
-
|
2108
|
-
|
2216
|
+
Args:
|
2217
|
+
branch (str): A branch ID.
|
2218
|
+
params (DictData): A parameter data.
|
2219
|
+
trace (Trace): A Trace model.
|
2220
|
+
context (DictData):
|
2221
|
+
event: (Event) An Event manager instance that use to cancel this
|
2222
|
+
execution if it forces stopped by parent execution.
|
2223
|
+
(Default is None)
|
2109
2224
|
|
2110
|
-
:
|
2111
|
-
|
2112
|
-
|
2113
|
-
|
2225
|
+
Raises:
|
2226
|
+
StageCancelError: If event was set before start stage execution.
|
2227
|
+
StageCancelError: If result from a nested-stage return canceled
|
2228
|
+
status.
|
2229
|
+
StageError: If result from a nested-stage return failed status.
|
2114
2230
|
|
2115
|
-
:
|
2231
|
+
Returns:
|
2232
|
+
tuple[Status, DictData]: A pair of status and result context data.
|
2116
2233
|
"""
|
2117
|
-
trace:
|
2118
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2119
|
-
)
|
2120
|
-
trace.debug(f"[STAGE]: Execute Branch: {branch!r}")
|
2121
|
-
|
2122
|
-
# NOTE: Create nested-context
|
2234
|
+
trace.info(f"[NESTED]: Execute Branch: {branch!r}")
|
2123
2235
|
current_context: DictData = copy.deepcopy(params)
|
2124
2236
|
current_context.update({"branch": branch})
|
2125
|
-
nestet_context:
|
2237
|
+
nestet_context: ParallelContext = {"branch": branch, "stages": {}}
|
2126
2238
|
|
2127
2239
|
total_stage: int = len(self.parallel[branch])
|
2128
2240
|
skips: list[bool] = [False] * total_stage
|
@@ -2133,8 +2245,7 @@ class ParallelStage(BaseNestedStage):
|
|
2133
2245
|
|
2134
2246
|
if event and event.is_set():
|
2135
2247
|
error_msg: str = (
|
2136
|
-
"
|
2137
|
-
"start branch execution."
|
2248
|
+
f"Cancel branch: {branch!r} before start nested process."
|
2138
2249
|
)
|
2139
2250
|
catch(
|
2140
2251
|
context=context,
|
@@ -2154,12 +2265,12 @@ class ParallelStage(BaseNestedStage):
|
|
2154
2265
|
|
2155
2266
|
rs: Result = stage.execute(
|
2156
2267
|
params=current_context,
|
2157
|
-
run_id=parent_run_id,
|
2268
|
+
run_id=trace.parent_run_id,
|
2158
2269
|
event=event,
|
2159
2270
|
)
|
2160
|
-
stage.set_outputs(rs.context, to=nestet_context)
|
2271
|
+
stage.set_outputs(rs.context, to=cast_dict(nestet_context))
|
2161
2272
|
stage.set_outputs(
|
2162
|
-
stage.get_outputs(nestet_context), to=current_context
|
2273
|
+
stage.get_outputs(cast_dict(nestet_context)), to=current_context
|
2163
2274
|
)
|
2164
2275
|
|
2165
2276
|
if rs.status == SKIP:
|
@@ -2168,7 +2279,7 @@ class ParallelStage(BaseNestedStage):
|
|
2168
2279
|
|
2169
2280
|
elif rs.status == FAILED: # pragma: no cov
|
2170
2281
|
error_msg: str = (
|
2171
|
-
f"
|
2282
|
+
f"Break branch: {branch!r} because nested stage: "
|
2172
2283
|
f"{stage.iden!r}, failed."
|
2173
2284
|
)
|
2174
2285
|
catch(
|
@@ -2189,8 +2300,7 @@ class ParallelStage(BaseNestedStage):
|
|
2189
2300
|
|
2190
2301
|
elif rs.status == CANCEL:
|
2191
2302
|
error_msg: str = (
|
2192
|
-
"
|
2193
|
-
"end branch execution."
|
2303
|
+
f"Cancel branch: {branch!r} after end nested process."
|
2194
2304
|
)
|
2195
2305
|
catch(
|
2196
2306
|
context=context,
|
@@ -2230,7 +2340,9 @@ class ParallelStage(BaseNestedStage):
|
|
2230
2340
|
parent_run_id: Optional[str] = None,
|
2231
2341
|
event: Optional[Event] = None,
|
2232
2342
|
) -> Result:
|
2233
|
-
"""Execute parallel each branch via multi-threading pool.
|
2343
|
+
"""Execute parallel each branch via multi-threading pool. The parallel
|
2344
|
+
process will use all-completed strategy to handle result from each
|
2345
|
+
branch.
|
2234
2346
|
|
2235
2347
|
Args:
|
2236
2348
|
params: A parameter data that want to use in this
|
@@ -2241,34 +2353,46 @@ class ParallelStage(BaseNestedStage):
|
|
2241
2353
|
event: An event manager that use to track parent process
|
2242
2354
|
was not force stopped.
|
2243
2355
|
|
2356
|
+
Raises:
|
2357
|
+
StageCancelError: If event was set before start parallel process.
|
2358
|
+
|
2244
2359
|
Returns:
|
2245
2360
|
Result: The execution result with status and context data.
|
2246
2361
|
"""
|
2247
|
-
trace:
|
2362
|
+
trace: Trace = get_trace(
|
2248
2363
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2249
2364
|
)
|
2250
2365
|
event: Event = event or Event()
|
2251
|
-
|
2366
|
+
|
2367
|
+
# NOTE: Start prepare max_workers field if it is string type.
|
2368
|
+
if isinstance(self.max_workers, str):
|
2369
|
+
max_workers: int = self.__validate_max_workers(
|
2370
|
+
pass_env(
|
2371
|
+
param2template(
|
2372
|
+
self.max_workers, params=params, extras=self.extras
|
2373
|
+
)
|
2374
|
+
)
|
2375
|
+
)
|
2376
|
+
else:
|
2377
|
+
max_workers: int = self.max_workers
|
2378
|
+
trace.info(f"[NESTED]: Parallel with {max_workers} workers.")
|
2252
2379
|
catch(
|
2253
2380
|
context=context,
|
2254
2381
|
status=WAIT,
|
2255
|
-
updated={"workers":
|
2382
|
+
updated={"workers": max_workers, "parallel": {}},
|
2256
2383
|
)
|
2257
2384
|
len_parallel: int = len(self.parallel)
|
2258
2385
|
if event and event.is_set():
|
2259
|
-
raise StageCancelError(
|
2260
|
-
"Execution was canceled from the event before start parallel."
|
2261
|
-
)
|
2386
|
+
raise StageCancelError("Cancel before start parallel process.")
|
2262
2387
|
|
2263
|
-
with ThreadPoolExecutor(
|
2388
|
+
with ThreadPoolExecutor(max_workers, "stp") as executor:
|
2264
2389
|
futures: list[Future] = [
|
2265
2390
|
executor.submit(
|
2266
|
-
self.
|
2391
|
+
self._process_nested,
|
2267
2392
|
branch=branch,
|
2268
2393
|
params=params,
|
2269
|
-
|
2394
|
+
trace=trace,
|
2270
2395
|
context=context,
|
2271
|
-
parent_run_id=parent_run_id,
|
2272
2396
|
event=event,
|
2273
2397
|
)
|
2274
2398
|
for branch in self.parallel
|
@@ -2283,15 +2407,21 @@ class ParallelStage(BaseNestedStage):
|
|
2283
2407
|
self.mark_errors(errors, e)
|
2284
2408
|
|
2285
2409
|
st: Status = validate_statuses(statuses)
|
2286
|
-
return Result(
|
2287
|
-
run_id=run_id,
|
2288
|
-
parent_run_id=parent_run_id,
|
2410
|
+
return Result.from_trace(trace).catch(
|
2289
2411
|
status=st,
|
2290
2412
|
context=catch(context, status=st, updated=errors),
|
2291
|
-
extras=self.extras,
|
2292
2413
|
)
|
2293
2414
|
|
2294
2415
|
|
2416
|
+
EachType = Union[
|
2417
|
+
list[str],
|
2418
|
+
list[int],
|
2419
|
+
str,
|
2420
|
+
dict[str, Any],
|
2421
|
+
dict[int, Any],
|
2422
|
+
]
|
2423
|
+
|
2424
|
+
|
2295
2425
|
class ForEachStage(BaseNestedStage):
|
2296
2426
|
"""For-Each stage executor that execute all stages with each item in the
|
2297
2427
|
foreach list.
|
@@ -2312,13 +2442,7 @@ class ForEachStage(BaseNestedStage):
|
|
2312
2442
|
... }
|
2313
2443
|
"""
|
2314
2444
|
|
2315
|
-
foreach:
|
2316
|
-
list[str],
|
2317
|
-
list[int],
|
2318
|
-
str,
|
2319
|
-
dict[str, Any],
|
2320
|
-
dict[int, Any],
|
2321
|
-
] = Field(
|
2445
|
+
foreach: EachType = Field(
|
2322
2446
|
description=(
|
2323
2447
|
"A items for passing to stages via ${{ item }} template parameter."
|
2324
2448
|
),
|
@@ -2347,15 +2471,14 @@ class ForEachStage(BaseNestedStage):
|
|
2347
2471
|
),
|
2348
2472
|
)
|
2349
2473
|
|
2350
|
-
def
|
2474
|
+
def _process_nested(
|
2351
2475
|
self,
|
2352
2476
|
index: int,
|
2353
2477
|
item: StrOrInt,
|
2354
2478
|
params: DictData,
|
2355
|
-
|
2479
|
+
trace: Trace,
|
2356
2480
|
context: DictData,
|
2357
2481
|
*,
|
2358
|
-
parent_run_id: Optional[str] = None,
|
2359
2482
|
event: Optional[Event] = None,
|
2360
2483
|
) -> tuple[Status, DictData]:
|
2361
2484
|
"""Execute item that will execute all nested-stage that was set in this
|
@@ -2364,32 +2487,29 @@ class ForEachStage(BaseNestedStage):
|
|
2364
2487
|
This method will create the nested-context from an input context
|
2365
2488
|
data and use it instead the context data.
|
2366
2489
|
|
2367
|
-
:
|
2368
|
-
|
2369
|
-
|
2370
|
-
|
2371
|
-
|
2372
|
-
|
2373
|
-
|
2374
|
-
|
2375
|
-
|
2490
|
+
Args:
|
2491
|
+
index: (int) An index value of foreach loop.
|
2492
|
+
item: (str | int) An item that want to execution.
|
2493
|
+
params: (DictData) A parameter data.
|
2494
|
+
trace (Trace): A Trace model.
|
2495
|
+
context: (DictData)
|
2496
|
+
event: (Event) An Event manager instance that use to cancel this
|
2497
|
+
execution if it forces stopped by parent execution.
|
2498
|
+
(Default is None)
|
2376
2499
|
|
2377
2500
|
This method should raise error when it wants to stop the foreach
|
2378
2501
|
loop such as cancel event or getting the failed status.
|
2379
2502
|
|
2380
|
-
:
|
2381
|
-
|
2382
|
-
|
2503
|
+
Raises:
|
2504
|
+
StageCancelError: If event was set.
|
2505
|
+
StageError: If the stage execution raise any Exception error.
|
2506
|
+
StageError: If the result from execution has `FAILED` status.
|
2383
2507
|
|
2384
|
-
:
|
2508
|
+
Returns:
|
2509
|
+
tuple[Status, DictData]
|
2385
2510
|
"""
|
2386
|
-
trace:
|
2387
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2388
|
-
)
|
2389
|
-
trace.debug(f"[STAGE]: Execute Item: {item!r}")
|
2511
|
+
trace.info(f"[NESTED]: Execute Item: {item!r}")
|
2390
2512
|
key: StrOrInt = index if self.use_index_as_key else item
|
2391
|
-
|
2392
|
-
# NOTE: Create nested-context data from the passing context.
|
2393
2513
|
current_context: DictData = copy.deepcopy(params)
|
2394
2514
|
current_context.update({"item": item, "loop": index})
|
2395
2515
|
nestet_context: DictData = {"item": item, "stages": {}}
|
@@ -2403,8 +2523,7 @@ class ForEachStage(BaseNestedStage):
|
|
2403
2523
|
|
2404
2524
|
if event and event.is_set():
|
2405
2525
|
error_msg: str = (
|
2406
|
-
"
|
2407
|
-
"item execution."
|
2526
|
+
f"Cancel item: {key!r} before start nested process."
|
2408
2527
|
)
|
2409
2528
|
catch(
|
2410
2529
|
context=context,
|
@@ -2422,10 +2541,9 @@ class ForEachStage(BaseNestedStage):
|
|
2422
2541
|
)
|
2423
2542
|
raise StageCancelError(error_msg, refs=key)
|
2424
2543
|
|
2425
|
-
# NOTE: Nested-stage execute will pass only params and context only.
|
2426
2544
|
rs: Result = stage.execute(
|
2427
2545
|
params=current_context,
|
2428
|
-
run_id=parent_run_id,
|
2546
|
+
run_id=trace.parent_run_id,
|
2429
2547
|
event=event,
|
2430
2548
|
)
|
2431
2549
|
stage.set_outputs(rs.context, to=nestet_context)
|
@@ -2439,10 +2557,10 @@ class ForEachStage(BaseNestedStage):
|
|
2439
2557
|
|
2440
2558
|
elif rs.status == FAILED: # pragma: no cov
|
2441
2559
|
error_msg: str = (
|
2442
|
-
f"
|
2560
|
+
f"Break item: {key!r} because nested stage: "
|
2443
2561
|
f"{stage.iden!r}, failed."
|
2444
2562
|
)
|
2445
|
-
trace.warning(f"[
|
2563
|
+
trace.warning(f"[NESTED]: {error_msg}")
|
2446
2564
|
catch(
|
2447
2565
|
context=context,
|
2448
2566
|
status=FAILED,
|
@@ -2461,8 +2579,7 @@ class ForEachStage(BaseNestedStage):
|
|
2461
2579
|
|
2462
2580
|
elif rs.status == CANCEL:
|
2463
2581
|
error_msg: str = (
|
2464
|
-
"
|
2465
|
-
"end item execution."
|
2582
|
+
f"Cancel item: {key!r} after end nested process."
|
2466
2583
|
)
|
2467
2584
|
catch(
|
2468
2585
|
context=context,
|
@@ -2493,6 +2610,42 @@ class ForEachStage(BaseNestedStage):
|
|
2493
2610
|
},
|
2494
2611
|
)
|
2495
2612
|
|
2613
|
+
def validate_foreach(self, value: Any) -> list[Any]:
|
2614
|
+
"""Validate foreach value that already passed to this model.
|
2615
|
+
|
2616
|
+
Args:
|
2617
|
+
value:
|
2618
|
+
|
2619
|
+
Raises:
|
2620
|
+
TypeError: If value can not try-convert to list type.
|
2621
|
+
ValueError:
|
2622
|
+
|
2623
|
+
Returns:
|
2624
|
+
list[Any]: list of item.
|
2625
|
+
"""
|
2626
|
+
if isinstance(value, str):
|
2627
|
+
try:
|
2628
|
+
value: list[Any] = str2list(value)
|
2629
|
+
except ValueError as e:
|
2630
|
+
raise TypeError(
|
2631
|
+
f"Does not support string foreach: {value!r} that can "
|
2632
|
+
f"not convert to list."
|
2633
|
+
) from e
|
2634
|
+
# [VALIDATE]: Type of the foreach should be `list` type.
|
2635
|
+
elif isinstance(value, dict):
|
2636
|
+
raise TypeError(
|
2637
|
+
f"Does not support dict foreach: {value!r} ({type(value)}) "
|
2638
|
+
f"yet."
|
2639
|
+
)
|
2640
|
+
# [Validate]: Value in the foreach item should not be duplicate when the
|
2641
|
+
# `use_index_as_key` field did not set.
|
2642
|
+
elif len(set(value)) != len(value) and not self.use_index_as_key:
|
2643
|
+
raise ValueError(
|
2644
|
+
"Foreach item should not duplicate. If this stage must to pass "
|
2645
|
+
"duplicate item, it should set `use_index_as_key: true`."
|
2646
|
+
)
|
2647
|
+
return value
|
2648
|
+
|
2496
2649
|
def process(
|
2497
2650
|
self,
|
2498
2651
|
params: DictData,
|
@@ -2520,39 +2673,13 @@ class ForEachStage(BaseNestedStage):
|
|
2520
2673
|
Returns:
|
2521
2674
|
Result: The execution result with status and context data.
|
2522
2675
|
"""
|
2523
|
-
trace:
|
2676
|
+
trace: Trace = get_trace(
|
2524
2677
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2525
2678
|
)
|
2526
2679
|
event: Event = event or Event()
|
2527
|
-
foreach:
|
2528
|
-
|
2529
|
-
)
|
2530
|
-
|
2531
|
-
# [NOTE]: Force convert str to list.
|
2532
|
-
if isinstance(foreach, str):
|
2533
|
-
try:
|
2534
|
-
foreach: list[Any] = str2list(foreach)
|
2535
|
-
except ValueError as e:
|
2536
|
-
raise TypeError(
|
2537
|
-
f"Does not support string foreach: {foreach!r} that can "
|
2538
|
-
f"not convert to list."
|
2539
|
-
) from e
|
2540
|
-
|
2541
|
-
# [VALIDATE]: Type of the foreach should be `list` type.
|
2542
|
-
elif isinstance(foreach, dict):
|
2543
|
-
raise TypeError(
|
2544
|
-
f"Does not support dict foreach: {foreach!r} ({type(foreach)}) "
|
2545
|
-
f"yet."
|
2546
|
-
)
|
2547
|
-
# [Validate]: Value in the foreach item should not be duplicate when the
|
2548
|
-
# `use_index_as_key` field did not set.
|
2549
|
-
elif len(set(foreach)) != len(foreach) and not self.use_index_as_key:
|
2550
|
-
raise ValueError(
|
2551
|
-
"Foreach item should not duplicate. If this stage must to pass "
|
2552
|
-
"duplicate item, it should set `use_index_as_key: true`."
|
2553
|
-
)
|
2554
|
-
|
2555
|
-
trace.info(f"[STAGE]: Foreach: {foreach!r}.")
|
2680
|
+
foreach: EachType = self.pass_template(self.foreach, params=params)
|
2681
|
+
foreach: list[Any] = self.validate_foreach(foreach)
|
2682
|
+
trace.info(f"[NESTED]: Foreach: {foreach!r}.")
|
2556
2683
|
catch(
|
2557
2684
|
context=context,
|
2558
2685
|
status=WAIT,
|
@@ -2560,39 +2687,35 @@ class ForEachStage(BaseNestedStage):
|
|
2560
2687
|
)
|
2561
2688
|
len_foreach: int = len(foreach)
|
2562
2689
|
if event and event.is_set():
|
2563
|
-
raise StageCancelError(
|
2564
|
-
"Execution was canceled from the event before start foreach."
|
2565
|
-
)
|
2690
|
+
raise StageCancelError("Cancel before start foreach process.")
|
2566
2691
|
|
2567
2692
|
with ThreadPoolExecutor(self.concurrent, "stf") as executor:
|
2568
2693
|
futures: list[Future] = [
|
2569
2694
|
executor.submit(
|
2570
|
-
self.
|
2571
|
-
index=
|
2695
|
+
self._process_nested,
|
2696
|
+
index=index,
|
2572
2697
|
item=item,
|
2573
2698
|
params=params,
|
2574
|
-
|
2699
|
+
trace=trace,
|
2575
2700
|
context=context,
|
2576
|
-
parent_run_id=parent_run_id,
|
2577
2701
|
event=event,
|
2578
2702
|
)
|
2579
|
-
for
|
2703
|
+
for index, item in enumerate(foreach, start=0)
|
2580
2704
|
]
|
2581
2705
|
|
2582
2706
|
errors: DictData = {}
|
2583
2707
|
statuses: list[Status] = [WAIT] * len_foreach
|
2584
|
-
fail_fast: bool = False
|
2585
2708
|
|
2586
2709
|
done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
|
2587
2710
|
if len(list(done)) != len(futures):
|
2588
2711
|
trace.warning(
|
2589
|
-
"[
|
2712
|
+
"[NESTED]: Set the event for stop pending for-each stage."
|
2590
2713
|
)
|
2591
2714
|
event.set()
|
2592
2715
|
for future in not_done:
|
2593
2716
|
future.cancel()
|
2594
2717
|
|
2595
|
-
time.sleep(0.
|
2718
|
+
time.sleep(0.025)
|
2596
2719
|
nd: str = (
|
2597
2720
|
(
|
2598
2721
|
f", {len(not_done)} item"
|
@@ -2601,9 +2724,8 @@ class ForEachStage(BaseNestedStage):
|
|
2601
2724
|
if not_done
|
2602
2725
|
else ""
|
2603
2726
|
)
|
2604
|
-
trace.debug(f"[
|
2727
|
+
trace.debug(f"[NESTED]: ... Foreach-Stage set failed event{nd}")
|
2605
2728
|
done: Iterator[Future] = as_completed(futures)
|
2606
|
-
fail_fast = True
|
2607
2729
|
|
2608
2730
|
for i, future in enumerate(done, start=0):
|
2609
2731
|
try:
|
@@ -2613,21 +2735,13 @@ class ForEachStage(BaseNestedStage):
|
|
2613
2735
|
statuses[i] = get_status_from_error(e)
|
2614
2736
|
self.mark_errors(errors, e)
|
2615
2737
|
except CancelledError:
|
2738
|
+
statuses[i] = CANCEL
|
2616
2739
|
pass
|
2617
2740
|
|
2618
2741
|
status: Status = validate_statuses(statuses)
|
2619
|
-
|
2620
|
-
# NOTE: Prepare status because it does not cancel from parent event but
|
2621
|
-
# cancel from failed item execution.
|
2622
|
-
if fail_fast and status == CANCEL:
|
2623
|
-
status = FAILED
|
2624
|
-
|
2625
|
-
return Result(
|
2626
|
-
run_id=run_id,
|
2627
|
-
parent_run_id=parent_run_id,
|
2742
|
+
return Result.from_trace(trace).catch(
|
2628
2743
|
status=status,
|
2629
2744
|
context=catch(context, status=status, updated=errors),
|
2630
|
-
extras=self.extras,
|
2631
2745
|
)
|
2632
2746
|
|
2633
2747
|
|
@@ -2662,7 +2776,7 @@ class UntilStage(BaseNestedStage):
|
|
2662
2776
|
),
|
2663
2777
|
)
|
2664
2778
|
until: str = Field(description="A until condition for stop the while loop.")
|
2665
|
-
stages: list[
|
2779
|
+
stages: list[NestedStage] = Field(
|
2666
2780
|
default_factory=list,
|
2667
2781
|
description=(
|
2668
2782
|
"A list of stage that will run with each item in until loop."
|
@@ -2679,38 +2793,33 @@ class UntilStage(BaseNestedStage):
|
|
2679
2793
|
alias="max-loop",
|
2680
2794
|
)
|
2681
2795
|
|
2682
|
-
def
|
2796
|
+
def _process_nested(
|
2683
2797
|
self,
|
2684
2798
|
item: T,
|
2685
2799
|
loop: int,
|
2686
2800
|
params: DictData,
|
2687
|
-
|
2801
|
+
trace: Trace,
|
2688
2802
|
context: DictData,
|
2689
2803
|
*,
|
2690
|
-
parent_run_id: Optional[str] = None,
|
2691
2804
|
event: Optional[Event] = None,
|
2692
2805
|
) -> tuple[Status, DictData, T]:
|
2693
2806
|
"""Execute loop that will execute all nested-stage that was set in this
|
2694
2807
|
stage with specific loop and item.
|
2695
2808
|
|
2696
|
-
:
|
2697
|
-
|
2698
|
-
|
2699
|
-
|
2700
|
-
|
2701
|
-
|
2702
|
-
|
2703
|
-
|
2809
|
+
Args:
|
2810
|
+
item: (T) An item that want to execution.
|
2811
|
+
loop: (int) A number of loop.
|
2812
|
+
params: (DictData) A parameter data.
|
2813
|
+
trace: (Trace)
|
2814
|
+
context: (DictData)
|
2815
|
+
event: (Event) An Event manager instance that use to cancel this
|
2816
|
+
execution if it forces stopped by parent execution.
|
2704
2817
|
|
2705
|
-
:
|
2706
|
-
|
2818
|
+
Returns:
|
2819
|
+
tuple[Status, DictData, T]: Return a pair of Result and changed
|
2820
|
+
item.
|
2707
2821
|
"""
|
2708
|
-
trace:
|
2709
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2710
|
-
)
|
2711
|
-
trace.debug(f"[STAGE]: Execute Loop: {loop} (Item {item!r})")
|
2712
|
-
|
2713
|
-
# NOTE: Create nested-context
|
2822
|
+
trace.debug(f"[NESTED]: Execute Loop: {loop} (Item {item!r})")
|
2714
2823
|
current_context: DictData = copy.deepcopy(params)
|
2715
2824
|
current_context.update({"item": item, "loop": loop})
|
2716
2825
|
nestet_context: DictData = {"loop": loop, "item": item, "stages": {}}
|
@@ -2725,8 +2834,7 @@ class UntilStage(BaseNestedStage):
|
|
2725
2834
|
|
2726
2835
|
if event and event.is_set():
|
2727
2836
|
error_msg: str = (
|
2728
|
-
"
|
2729
|
-
"loop execution."
|
2837
|
+
f"Cancel loop: {i!r} before start nested process."
|
2730
2838
|
)
|
2731
2839
|
catch(
|
2732
2840
|
context=context,
|
@@ -2747,7 +2855,7 @@ class UntilStage(BaseNestedStage):
|
|
2747
2855
|
|
2748
2856
|
rs: Result = stage.execute(
|
2749
2857
|
params=current_context,
|
2750
|
-
run_id=parent_run_id,
|
2858
|
+
run_id=trace.parent_run_id,
|
2751
2859
|
event=event,
|
2752
2860
|
)
|
2753
2861
|
stage.set_outputs(rs.context, to=nestet_context)
|
@@ -2763,8 +2871,8 @@ class UntilStage(BaseNestedStage):
|
|
2763
2871
|
|
2764
2872
|
elif rs.status == FAILED:
|
2765
2873
|
error_msg: str = (
|
2766
|
-
f"
|
2767
|
-
f"
|
2874
|
+
f"Break loop: {i!r} because nested stage: {stage.iden!r}, "
|
2875
|
+
f"failed."
|
2768
2876
|
)
|
2769
2877
|
catch(
|
2770
2878
|
context=context,
|
@@ -2777,17 +2885,14 @@ class UntilStage(BaseNestedStage):
|
|
2777
2885
|
"stages": filter_func(
|
2778
2886
|
nestet_context.pop("stages", {})
|
2779
2887
|
),
|
2780
|
-
"errors":
|
2888
|
+
"errors": StageNestedError(error_msg).to_dict(),
|
2781
2889
|
}
|
2782
2890
|
},
|
2783
2891
|
)
|
2784
|
-
raise
|
2892
|
+
raise StageNestedError(error_msg, refs=loop)
|
2785
2893
|
|
2786
2894
|
elif rs.status == CANCEL:
|
2787
|
-
error_msg: str =
|
2788
|
-
"Loop execution was canceled from the event after "
|
2789
|
-
"end loop execution."
|
2790
|
-
)
|
2895
|
+
error_msg: str = f"Cancel loop: {i!r} after end nested process."
|
2791
2896
|
catch(
|
2792
2897
|
context=context,
|
2793
2898
|
status=CANCEL,
|
@@ -2799,11 +2904,13 @@ class UntilStage(BaseNestedStage):
|
|
2799
2904
|
"stages": filter_func(
|
2800
2905
|
nestet_context.pop("stages", {})
|
2801
2906
|
),
|
2802
|
-
"errors":
|
2907
|
+
"errors": StageNestedCancelError(
|
2908
|
+
error_msg
|
2909
|
+
).to_dict(),
|
2803
2910
|
}
|
2804
2911
|
},
|
2805
2912
|
)
|
2806
|
-
raise
|
2913
|
+
raise StageNestedCancelError(error_msg, refs=loop)
|
2807
2914
|
|
2808
2915
|
status: Status = SKIP if sum(skips) == total_stage else SUCCESS
|
2809
2916
|
return (
|
@@ -2847,41 +2954,39 @@ class UntilStage(BaseNestedStage):
|
|
2847
2954
|
Returns:
|
2848
2955
|
Result: The execution result with status and context data.
|
2849
2956
|
"""
|
2850
|
-
trace:
|
2957
|
+
trace: Trace = get_trace(
|
2851
2958
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
2852
2959
|
)
|
2853
2960
|
event: Event = event or Event()
|
2854
|
-
trace.info(f"[
|
2855
|
-
item: Union[str, int, bool] =
|
2856
|
-
param2template(self.item, params, extras=self.extras)
|
2857
|
-
)
|
2961
|
+
trace.info(f"[NESTED]: Until: {self.until!r}")
|
2962
|
+
item: Union[str, int, bool] = self.pass_template(self.item, params)
|
2858
2963
|
loop: int = 1
|
2859
2964
|
until_rs: bool = True
|
2860
2965
|
exceed_loop: bool = False
|
2861
2966
|
catch(context=context, status=WAIT, updated={"until": {}})
|
2862
2967
|
statuses: list[Status] = []
|
2968
|
+
|
2863
2969
|
while until_rs and not (exceed_loop := (loop > self.max_loop)):
|
2864
2970
|
|
2865
2971
|
if event and event.is_set():
|
2866
2972
|
raise StageCancelError(
|
2867
|
-
"
|
2973
|
+
f"Cancel before start loop process, (loop: {loop})."
|
2868
2974
|
)
|
2869
2975
|
|
2870
|
-
status, context, item = self.
|
2976
|
+
status, context, item = self._process_nested(
|
2871
2977
|
item=item,
|
2872
2978
|
loop=loop,
|
2873
2979
|
params=params,
|
2874
|
-
|
2980
|
+
trace=trace,
|
2875
2981
|
context=context,
|
2876
|
-
parent_run_id=parent_run_id,
|
2877
2982
|
event=event,
|
2878
2983
|
)
|
2879
2984
|
|
2880
2985
|
loop += 1
|
2881
2986
|
if item is None:
|
2882
2987
|
item: int = loop
|
2883
|
-
trace.
|
2884
|
-
f"[
|
2988
|
+
trace.debug(
|
2989
|
+
f"[NESTED]: Return loop not set the item. It uses loop: "
|
2885
2990
|
f"{loop} by default."
|
2886
2991
|
)
|
2887
2992
|
|
@@ -2931,6 +3036,13 @@ class Match(BaseModel):
|
|
2931
3036
|
)
|
2932
3037
|
|
2933
3038
|
|
3039
|
+
class Else(BaseModel):
|
3040
|
+
other: list[Stage] = Field(
|
3041
|
+
description="A list of stage that does not match any case.",
|
3042
|
+
alias="else",
|
3043
|
+
)
|
3044
|
+
|
3045
|
+
|
2934
3046
|
class CaseStage(BaseNestedStage):
|
2935
3047
|
"""Case stage executor that execute all stages if the condition was matched.
|
2936
3048
|
|
@@ -2960,10 +3072,34 @@ class CaseStage(BaseNestedStage):
|
|
2960
3072
|
... ],
|
2961
3073
|
... }
|
2962
3074
|
|
3075
|
+
>>> stage = {
|
3076
|
+
... "name": "If stage execution.",
|
3077
|
+
... "case": "${{ param.test }}",
|
3078
|
+
... "match": [
|
3079
|
+
... {
|
3080
|
+
... "case": "1",
|
3081
|
+
... "stages": [
|
3082
|
+
... {
|
3083
|
+
... "name": "Stage case 1",
|
3084
|
+
... "eche": "Hello case 1",
|
3085
|
+
... },
|
3086
|
+
... ],
|
3087
|
+
... },
|
3088
|
+
... {
|
3089
|
+
... "else": [
|
3090
|
+
... {
|
3091
|
+
... "name": "Stage else",
|
3092
|
+
... "eche": "Hello case else",
|
3093
|
+
... },
|
3094
|
+
... ],
|
3095
|
+
... },
|
3096
|
+
... ],
|
3097
|
+
... }
|
3098
|
+
|
2963
3099
|
"""
|
2964
3100
|
|
2965
3101
|
case: str = Field(description="A case condition for routing.")
|
2966
|
-
match: list[Match] = Field(
|
3102
|
+
match: list[Union[Match, Else]] = Field(
|
2967
3103
|
description="A list of Match model that should not be an empty list.",
|
2968
3104
|
)
|
2969
3105
|
skip_not_match: bool = Field(
|
@@ -2975,46 +3111,117 @@ class CaseStage(BaseNestedStage):
|
|
2975
3111
|
alias="skip-not-match",
|
2976
3112
|
)
|
2977
3113
|
|
2978
|
-
|
3114
|
+
@field_validator("match", mode="after")
|
3115
|
+
def __validate_match(
|
3116
|
+
cls, match: list[Union[Match, Else]]
|
3117
|
+
) -> list[Union[Match, Else]]:
|
3118
|
+
"""Validate the match field should contain only one Else model."""
|
3119
|
+
c_else_case: int = 0
|
3120
|
+
c_else_model: int = 0
|
3121
|
+
for m in match:
|
3122
|
+
if isinstance(m, Else):
|
3123
|
+
if c_else_model:
|
3124
|
+
raise ValueError(
|
3125
|
+
"Match field should contain only one `Else` model."
|
3126
|
+
)
|
3127
|
+
c_else_model += 1
|
3128
|
+
continue
|
3129
|
+
if isinstance(m, Match) and m.case == "_":
|
3130
|
+
if c_else_case:
|
3131
|
+
raise ValueError(
|
3132
|
+
"Match field should contain only one else, '_', case."
|
3133
|
+
)
|
3134
|
+
c_else_case += 1
|
3135
|
+
continue
|
3136
|
+
return match
|
3137
|
+
|
3138
|
+
def extract_stages_from_case(
|
3139
|
+
self, case: StrOrNone, params: DictData
|
3140
|
+
) -> tuple[StrOrNone, list[Stage]]:
|
3141
|
+
"""Extract stage from case.
|
3142
|
+
|
3143
|
+
Args:
|
3144
|
+
case (StrOrNone):
|
3145
|
+
params (DictData):
|
3146
|
+
|
3147
|
+
Returns:
|
3148
|
+
tuple[StrOrNone, list[Stage]]: A pair of case and stages.
|
3149
|
+
"""
|
3150
|
+
_else_stages: Optional[list[Stage]] = None
|
3151
|
+
stages: Optional[list[Stage]] = None
|
3152
|
+
|
3153
|
+
# NOTE: Start check the condition of each stage match with this case.
|
3154
|
+
for match in self.match:
|
3155
|
+
|
3156
|
+
if isinstance(match, Else):
|
3157
|
+
_else_stages: list[Stage] = match.other
|
3158
|
+
continue
|
3159
|
+
|
3160
|
+
# NOTE: Store the else case.
|
3161
|
+
if (c := match.case) == "_":
|
3162
|
+
_else_stages: list[Stage] = match.stages
|
3163
|
+
continue
|
3164
|
+
|
3165
|
+
_condition: str = param2template(c, params, extras=self.extras)
|
3166
|
+
if pass_env(case) == pass_env(_condition):
|
3167
|
+
stages: list[Stage] = match.stages
|
3168
|
+
break
|
3169
|
+
|
3170
|
+
if stages is not None:
|
3171
|
+
return case, stages
|
3172
|
+
|
3173
|
+
if _else_stages is None:
|
3174
|
+
if not self.skip_not_match:
|
3175
|
+
raise StageError(
|
3176
|
+
"This stage does not set else for support not match "
|
3177
|
+
"any case."
|
3178
|
+
)
|
3179
|
+
raise StageSkipError(
|
3180
|
+
"Execution was skipped because it does not match any "
|
3181
|
+
"case and the else condition does not set too."
|
3182
|
+
)
|
3183
|
+
|
3184
|
+
# NOTE: Force to use the else when it does not match any case.
|
3185
|
+
return "_", _else_stages
|
3186
|
+
|
3187
|
+
def _process_nested(
|
2979
3188
|
self,
|
2980
3189
|
case: str,
|
2981
3190
|
stages: list[Stage],
|
2982
3191
|
params: DictData,
|
2983
|
-
|
3192
|
+
trace: Trace,
|
2984
3193
|
context: DictData,
|
2985
3194
|
*,
|
2986
|
-
parent_run_id: Optional[str] = None,
|
2987
3195
|
event: Optional[Event] = None,
|
2988
3196
|
) -> tuple[Status, DictData]:
|
2989
3197
|
"""Execute case.
|
2990
3198
|
|
2991
|
-
:
|
2992
|
-
|
2993
|
-
|
2994
|
-
|
2995
|
-
|
2996
|
-
|
2997
|
-
|
2998
|
-
|
3199
|
+
Args:
|
3200
|
+
case: (str) A case that want to execution.
|
3201
|
+
stages: (list[Stage]) A list of stage.
|
3202
|
+
params: (DictData) A parameter data.
|
3203
|
+
trace: (Trace)
|
3204
|
+
context: (DictData)
|
3205
|
+
event: (Event) An Event manager instance that use to cancel this
|
3206
|
+
execution if it forces stopped by parent execution.
|
2999
3207
|
|
3000
|
-
:
|
3208
|
+
Returns:
|
3209
|
+
DictData
|
3001
3210
|
"""
|
3002
|
-
trace:
|
3003
|
-
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3004
|
-
)
|
3005
|
-
trace.debug(f"[STAGE]: Execute Case: {case!r}")
|
3211
|
+
trace.info(f"[NESTED]: Case: {case!r}")
|
3006
3212
|
current_context: DictData = copy.deepcopy(params)
|
3007
3213
|
current_context.update({"case": case})
|
3008
3214
|
output: DictData = {"case": case, "stages": {}}
|
3009
|
-
|
3215
|
+
total_stage: int = len(stages)
|
3216
|
+
skips: list[bool] = [False] * total_stage
|
3217
|
+
for i, stage in enumerate(stages, start=0):
|
3010
3218
|
|
3011
3219
|
if self.extras:
|
3012
3220
|
stage.extras = self.extras
|
3013
3221
|
|
3014
3222
|
if event and event.is_set():
|
3015
3223
|
error_msg: str = (
|
3016
|
-
"
|
3017
|
-
"stage case execution."
|
3224
|
+
f"Cancel case: {case!r} before start nested process."
|
3018
3225
|
)
|
3019
3226
|
return CANCEL, catch(
|
3020
3227
|
context=context,
|
@@ -3028,16 +3235,20 @@ class CaseStage(BaseNestedStage):
|
|
3028
3235
|
|
3029
3236
|
rs: Result = stage.execute(
|
3030
3237
|
params=current_context,
|
3031
|
-
run_id=parent_run_id,
|
3238
|
+
run_id=trace.parent_run_id,
|
3032
3239
|
event=event,
|
3033
3240
|
)
|
3034
3241
|
stage.set_outputs(rs.context, to=output)
|
3035
3242
|
stage.set_outputs(stage.get_outputs(output), to=current_context)
|
3036
3243
|
|
3037
|
-
if rs.status ==
|
3244
|
+
if rs.status == SKIP:
|
3245
|
+
skips[i] = True
|
3246
|
+
continue
|
3247
|
+
|
3248
|
+
elif rs.status == FAILED:
|
3038
3249
|
error_msg: str = (
|
3039
|
-
f"
|
3040
|
-
f"
|
3250
|
+
f"Break case: {case!r} because nested stage: {stage.iden}, "
|
3251
|
+
f"failed."
|
3041
3252
|
)
|
3042
3253
|
return FAILED, catch(
|
3043
3254
|
context=context,
|
@@ -3048,9 +3259,25 @@ class CaseStage(BaseNestedStage):
|
|
3048
3259
|
"errors": StageError(error_msg).to_dict(),
|
3049
3260
|
},
|
3050
3261
|
)
|
3051
|
-
|
3262
|
+
|
3263
|
+
elif rs.status == CANCEL:
|
3264
|
+
error_msg: str = (
|
3265
|
+
f"Cancel case {case!r} after end nested process."
|
3266
|
+
)
|
3267
|
+
return CANCEL, catch(
|
3268
|
+
context=context,
|
3269
|
+
status=CANCEL,
|
3270
|
+
updated={
|
3271
|
+
"case": case,
|
3272
|
+
"stages": filter_func(output.pop("stages", {})),
|
3273
|
+
"errors": StageCancelError(error_msg).to_dict(),
|
3274
|
+
},
|
3275
|
+
)
|
3276
|
+
|
3277
|
+
status: Status = SKIP if sum(skips) == total_stage else SUCCESS
|
3278
|
+
return status, catch(
|
3052
3279
|
context=context,
|
3053
|
-
status=
|
3280
|
+
status=status,
|
3054
3281
|
updated={
|
3055
3282
|
"case": case,
|
3056
3283
|
"stages": filter_func(output.pop("stages", {})),
|
@@ -3080,51 +3307,21 @@ class CaseStage(BaseNestedStage):
|
|
3080
3307
|
Returns:
|
3081
3308
|
Result: The execution result with status and context data.
|
3082
3309
|
"""
|
3083
|
-
trace:
|
3310
|
+
trace: Trace = get_trace(
|
3084
3311
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3085
3312
|
)
|
3086
3313
|
|
3087
|
-
|
3088
|
-
|
3089
|
-
|
3090
|
-
_else: Optional[Match] = None
|
3091
|
-
stages: Optional[list[Stage]] = None
|
3092
|
-
for match in self.match:
|
3093
|
-
if (c := match.case) == "_":
|
3094
|
-
_else: Match = match
|
3095
|
-
continue
|
3096
|
-
|
3097
|
-
_condition: str = param2template(c, params, extras=self.extras)
|
3098
|
-
if stages is None and pass_env(_case) == pass_env(_condition):
|
3099
|
-
stages: list[Stage] = match.stages
|
3100
|
-
|
3101
|
-
if stages is None:
|
3102
|
-
if _else is None:
|
3103
|
-
if not self.skip_not_match:
|
3104
|
-
raise StageError(
|
3105
|
-
"This stage does not set else for support not match "
|
3106
|
-
"any case."
|
3107
|
-
)
|
3108
|
-
raise StageSkipError(
|
3109
|
-
"Execution was skipped because it does not match any "
|
3110
|
-
"case and the else condition does not set too."
|
3111
|
-
)
|
3112
|
-
|
3113
|
-
_case: str = "_"
|
3114
|
-
stages: list[Stage] = _else.stages
|
3115
|
-
|
3314
|
+
case: StrOrNone = param2template(self.case, params, extras=self.extras)
|
3315
|
+
trace.info(f"[NESTED]: Get Case: {case!r}.")
|
3316
|
+
case, stages = self.extract_stages_from_case(case, params=params)
|
3116
3317
|
if event and event.is_set():
|
3117
|
-
raise StageCancelError(
|
3118
|
-
|
3119
|
-
|
3120
|
-
)
|
3121
|
-
status, context = self._process_case(
|
3122
|
-
case=_case,
|
3318
|
+
raise StageCancelError("Cancel before start case process.")
|
3319
|
+
status, context = self._process_nested(
|
3320
|
+
case=case,
|
3123
3321
|
stages=stages,
|
3124
3322
|
params=params,
|
3125
|
-
|
3323
|
+
trace=trace,
|
3126
3324
|
context=context,
|
3127
|
-
parent_run_id=parent_run_id,
|
3128
3325
|
event=event,
|
3129
3326
|
)
|
3130
3327
|
return Result(
|
@@ -3178,7 +3375,7 @@ class RaiseStage(BaseAsyncStage):
|
|
3178
3375
|
Returns:
|
3179
3376
|
Result: The execution result with status and context data.
|
3180
3377
|
"""
|
3181
|
-
trace:
|
3378
|
+
trace: Trace = get_trace(
|
3182
3379
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3183
3380
|
)
|
3184
3381
|
message: str = param2template(self.message, params, extras=self.extras)
|
@@ -3209,7 +3406,7 @@ class RaiseStage(BaseAsyncStage):
|
|
3209
3406
|
Returns:
|
3210
3407
|
Result: The execution result with status and context data.
|
3211
3408
|
"""
|
3212
|
-
trace:
|
3409
|
+
trace: Trace = get_trace(
|
3213
3410
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3214
3411
|
)
|
3215
3412
|
message: str = param2template(self.message, params, extras=self.extras)
|
@@ -3290,7 +3487,7 @@ class DockerStage(BaseStage): # pragma: no cov
|
|
3290
3487
|
"by `pip install docker` first."
|
3291
3488
|
) from None
|
3292
3489
|
|
3293
|
-
trace:
|
3490
|
+
trace: Trace = get_trace(
|
3294
3491
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3295
3492
|
)
|
3296
3493
|
client = DockerClient(
|
@@ -3390,7 +3587,7 @@ class DockerStage(BaseStage): # pragma: no cov
|
|
3390
3587
|
Returns:
|
3391
3588
|
Result: The execution result with status and context data.
|
3392
3589
|
"""
|
3393
|
-
trace:
|
3590
|
+
trace: Trace = get_trace(
|
3394
3591
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3395
3592
|
)
|
3396
3593
|
trace.info(f"[STAGE]: Docker: {self.image}:{self.tag}")
|
@@ -3403,8 +3600,11 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3403
3600
|
"""
|
3404
3601
|
|
3405
3602
|
version: str = Field(
|
3406
|
-
default=
|
3407
|
-
description=
|
3603
|
+
default=__python_version__,
|
3604
|
+
description=(
|
3605
|
+
"A Python version that want to run. It will use supported version "
|
3606
|
+
f"of this package by default, {__python_version__}."
|
3607
|
+
),
|
3408
3608
|
)
|
3409
3609
|
deps: list[str] = Field(
|
3410
3610
|
description=(
|
@@ -3427,11 +3627,12 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3427
3627
|
The format of Python dependency was followed by the `uv`
|
3428
3628
|
recommended.
|
3429
3629
|
|
3430
|
-
:
|
3431
|
-
|
3432
|
-
|
3433
|
-
|
3434
|
-
|
3630
|
+
Args:
|
3631
|
+
py: A Python string statement.
|
3632
|
+
values: A variable that want to set before running this
|
3633
|
+
deps: An additional Python dependencies that want install before
|
3634
|
+
run this python stage.
|
3635
|
+
run_id: (StrOrNone) A running ID of this stage execution.
|
3435
3636
|
"""
|
3436
3637
|
run_id: str = run_id or uuid.uuid4()
|
3437
3638
|
f_name: str = f"{run_id}.py"
|
@@ -3500,7 +3701,7 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3500
3701
|
Returns:
|
3501
3702
|
Result: The execution result with status and context data.
|
3502
3703
|
"""
|
3503
|
-
trace:
|
3704
|
+
trace: Trace = get_trace(
|
3504
3705
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
3505
3706
|
)
|
3506
3707
|
run: str = param2template(dedent(self.run), params, extras=self.extras)
|
@@ -3546,6 +3747,59 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3546
3747
|
extras=self.extras,
|
3547
3748
|
)
|
3548
3749
|
|
3750
|
+
async def async_process(
|
3751
|
+
self,
|
3752
|
+
params: DictData,
|
3753
|
+
run_id: str,
|
3754
|
+
context: DictData,
|
3755
|
+
*,
|
3756
|
+
parent_run_id: Optional[str] = None,
|
3757
|
+
event: Optional[Event] = None,
|
3758
|
+
) -> Result:
|
3759
|
+
raise NotImplementedError(
|
3760
|
+
"Async process of Virtual Python stage does not implement yet."
|
3761
|
+
)
|
3762
|
+
|
3763
|
+
|
3764
|
+
NestedStage = Annotated[
|
3765
|
+
Union[
|
3766
|
+
BashStage,
|
3767
|
+
CallStage,
|
3768
|
+
PyStage,
|
3769
|
+
VirtualPyStage,
|
3770
|
+
RaiseStage,
|
3771
|
+
DockerStage,
|
3772
|
+
TriggerStage,
|
3773
|
+
EmptyStage,
|
3774
|
+
CaseStage,
|
3775
|
+
ForEachStage,
|
3776
|
+
UntilStage,
|
3777
|
+
],
|
3778
|
+
Field(
|
3779
|
+
union_mode="smart",
|
3780
|
+
description="A nested-stage allow list",
|
3781
|
+
),
|
3782
|
+
] # pragma: no cov
|
3783
|
+
|
3784
|
+
|
3785
|
+
ActionStage = Annotated[
|
3786
|
+
Union[
|
3787
|
+
BashStage,
|
3788
|
+
CallStage,
|
3789
|
+
VirtualPyStage,
|
3790
|
+
PyStage,
|
3791
|
+
RaiseStage,
|
3792
|
+
DockerStage,
|
3793
|
+
EmptyStage,
|
3794
|
+
],
|
3795
|
+
Field(
|
3796
|
+
union_mode="smart",
|
3797
|
+
description=(
|
3798
|
+
"An action stage model that allow to use with nested-stage model."
|
3799
|
+
),
|
3800
|
+
),
|
3801
|
+
] # pragma: no cov
|
3802
|
+
|
3549
3803
|
|
3550
3804
|
# NOTE:
|
3551
3805
|
# An order of parsing stage model on the Job model with `stages` field.
|
@@ -3554,18 +3808,14 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
3554
3808
|
#
|
3555
3809
|
Stage = Annotated[
|
3556
3810
|
Union[
|
3557
|
-
|
3558
|
-
BashStage,
|
3559
|
-
CallStage,
|
3560
|
-
TriggerStage,
|
3811
|
+
# NOTE: Nested Stage.
|
3561
3812
|
ForEachStage,
|
3562
3813
|
UntilStage,
|
3563
3814
|
ParallelStage,
|
3564
3815
|
CaseStage,
|
3565
|
-
|
3566
|
-
|
3567
|
-
|
3568
|
-
EmptyStage,
|
3816
|
+
TriggerStage,
|
3817
|
+
# NOTE: Union with the action stage.
|
3818
|
+
ActionStage,
|
3569
3819
|
],
|
3570
3820
|
Field(
|
3571
3821
|
union_mode="smart",
|