ddeutil-workflow 0.0.51__py3-none-any.whl → 0.0.53__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.51"
1
+ __version__: str = "0.0.53"
@@ -60,33 +60,6 @@ from .scheduler import (
60
60
  schedule_runner,
61
61
  schedule_task,
62
62
  )
63
- from .stages import (
64
- BashStage,
65
- CallStage,
66
- EmptyStage,
67
- ForEachStage,
68
- ParallelStage,
69
- PyStage,
70
- Stage,
71
- TriggerStage,
72
- )
73
- from .utils import (
74
- batch,
75
- cross_product,
76
- default_gen_id,
77
- delay,
78
- filter_func,
79
- gen_id,
80
- get_diff_sec,
81
- get_dt_now,
82
- make_exec,
83
- reach_next_minute,
84
- replace_sec,
85
- wait_to_next_minute,
86
- )
87
- from .workflow import (
88
- Release,
89
- ReleaseQueue,
90
- Workflow,
91
- WorkflowTask,
92
- )
63
+ from .stages import *
64
+ from .utils import *
65
+ from .workflow import *
ddeutil/workflow/job.py CHANGED
@@ -27,7 +27,7 @@ from threading import Event
27
27
  from typing import Annotated, Any, Literal, Optional, Union
28
28
 
29
29
  from ddeutil.core import freeze_args
30
- from pydantic import BaseModel, ConfigDict, Discriminator, Field, SecretStr, Tag
30
+ from pydantic import BaseModel, Discriminator, Field, SecretStr, Tag
31
31
  from pydantic.functional_validators import field_validator, model_validator
32
32
  from typing_extensions import Self
33
33
 
@@ -178,7 +178,7 @@ class Strategy(BaseModel):
178
178
 
179
179
 
180
180
  class Rule(str, Enum):
181
- """Trigger rules enum object."""
181
+ """Rule enum object for assign trigger option."""
182
182
 
183
183
  ALL_SUCCESS: str = "all_success"
184
184
  ALL_FAILED: str = "all_failed"
@@ -203,8 +203,6 @@ class BaseRunsOn(BaseModel): # pragma: no cov
203
203
  object and override execute method.
204
204
  """
205
205
 
206
- model_config = ConfigDict(use_enum_values=True)
207
-
208
206
  type: RunsOn = Field(description="A runs-on type.")
209
207
  args: DictData = Field(
210
208
  default_factory=dict,
@@ -219,7 +217,9 @@ class BaseRunsOn(BaseModel): # pragma: no cov
219
217
  class OnLocal(BaseRunsOn): # pragma: no cov
220
218
  """Runs-on local."""
221
219
 
222
- type: Literal[RunsOn.LOCAL] = Field(default=RunsOn.LOCAL)
220
+ type: Literal[RunsOn.LOCAL] = Field(
221
+ default=RunsOn.LOCAL, validate_default=True
222
+ )
223
223
 
224
224
 
225
225
  class SelfHostedArgs(BaseModel):
@@ -231,7 +231,9 @@ class SelfHostedArgs(BaseModel):
231
231
  class OnSelfHosted(BaseRunsOn): # pragma: no cov
232
232
  """Runs-on self-hosted."""
233
233
 
234
- type: Literal[RunsOn.SELF_HOSTED] = Field(default=RunsOn.SELF_HOSTED)
234
+ type: Literal[RunsOn.SELF_HOSTED] = Field(
235
+ default=RunsOn.SELF_HOSTED, validate_default=True
236
+ )
235
237
  args: SelfHostedArgs = Field(alias="with")
236
238
 
237
239
 
@@ -245,7 +247,9 @@ class AzBatchArgs(BaseModel):
245
247
 
246
248
  class OnAzBatch(BaseRunsOn): # pragma: no cov
247
249
 
248
- type: Literal[RunsOn.AZ_BATCH] = Field(default=RunsOn.AZ_BATCH)
250
+ type: Literal[RunsOn.AZ_BATCH] = Field(
251
+ default=RunsOn.AZ_BATCH, validate_default=True
252
+ )
249
253
  args: AzBatchArgs = Field(alias="with")
250
254
 
251
255
 
@@ -264,13 +268,16 @@ class DockerArgs(BaseModel):
264
268
  class OnDocker(BaseRunsOn): # pragma: no cov
265
269
  """Runs-on Docker container."""
266
270
 
267
- type: Literal[RunsOn.DOCKER] = Field(default=RunsOn.DOCKER)
271
+ type: Literal[RunsOn.DOCKER] = Field(
272
+ default=RunsOn.DOCKER, validate_default=True
273
+ )
268
274
  args: DockerArgs = Field(alias="with", default_factory=DockerArgs)
269
275
 
270
276
 
271
- def get_discriminator_runs_on(model: dict[str, Any]) -> str:
277
+ def get_discriminator_runs_on(model: dict[str, Any]) -> RunsOn:
272
278
  """Get discriminator of the RunsOn models."""
273
- return model.get("type", "local")
279
+ t = model.get("type")
280
+ return RunsOn(t) if t else RunsOn.LOCAL
274
281
 
275
282
 
276
283
  RunsOnModel = Annotated[
@@ -336,6 +343,7 @@ class Job(BaseModel):
336
343
  )
337
344
  trigger_rule: Rule = Field(
338
345
  default=Rule.ALL_SUCCESS,
346
+ validate_default=True,
339
347
  description=(
340
348
  "A trigger rule of tracking needed jobs if feature will use when "
341
349
  "the `raise_error` did not set from job and stage executions."
@@ -547,6 +555,7 @@ class Job(BaseModel):
547
555
  )
548
556
 
549
557
  _id: str = self.id or job_id
558
+ output: DictData = copy.deepcopy(output)
550
559
  errors: DictData = (
551
560
  {"errors": output.pop("errors", {})} if "errors" in output else {}
552
561
  )
@@ -609,7 +618,7 @@ class Job(BaseModel):
609
618
  )
610
619
 
611
620
  result.trace.info(
612
- f"[JOB]: Execute: {self.id!r} on {self.runs_on.type!r}"
621
+ f"[JOB]: Execute: {self.id!r} on {self.runs_on.type.value!r}"
613
622
  )
614
623
  if self.runs_on.type == RunsOn.LOCAL:
615
624
  return local_execute(
@@ -42,7 +42,7 @@ from concurrent.futures import (
42
42
  wait,
43
43
  )
44
44
  from datetime import datetime
45
- from inspect import Parameter
45
+ from inspect import Parameter, isclass, isfunction, ismodule
46
46
  from pathlib import Path
47
47
  from subprocess import CompletedProcess
48
48
  from textwrap import dedent
@@ -266,7 +266,7 @@ class BaseStage(BaseModel, ABC):
266
266
  param2template(self.name, params=to, extras=self.extras)
267
267
  )
268
268
  )
269
-
269
+ output: DictData = output.copy()
270
270
  errors: DictData = (
271
271
  {"errors": output.pop("errors", {})} if "errors" in output else {}
272
272
  )
@@ -302,7 +302,7 @@ class BaseStage(BaseModel, ABC):
302
302
  param2template(self.name, params=outputs, extras=self.extras)
303
303
  )
304
304
  )
305
- return outputs.get("stages", {}).get(_id, {})
305
+ return outputs.get("stages", {}).get(_id, {}).get("outputs", {})
306
306
 
307
307
  def is_skipped(self, params: DictData | None = None) -> bool:
308
308
  """Return true if condition of this stage do not correct. This process
@@ -704,12 +704,11 @@ class PyStage(BaseStage):
704
704
 
705
705
  :rtype: Iterator[str]
706
706
  """
707
- from inspect import isclass, ismodule
708
-
709
707
  for value in values:
710
708
 
711
709
  if (
712
710
  value == "__annotations__"
711
+ or (value.startswith("__") and value.endswith("__"))
713
712
  or ismodule(values[value])
714
713
  or isclass(values[value])
715
714
  ):
@@ -727,11 +726,10 @@ class PyStage(BaseStage):
727
726
 
728
727
  :rtype: DictData
729
728
  """
729
+ output: DictData = output.copy()
730
730
  lc: DictData = output.pop("locals", {})
731
731
  gb: DictData = output.pop("globals", {})
732
- super().set_outputs(
733
- {k: lc[k] for k in self.filter_locals(lc)} | output, to=to
734
- )
732
+ super().set_outputs(lc | output, to=to)
735
733
  to.update({k: gb[k] for k in to if k in gb})
736
734
  return to
737
735
 
@@ -762,26 +760,36 @@ class PyStage(BaseStage):
762
760
  lc: DictData = {}
763
761
  gb: DictData = (
764
762
  globals()
765
- | params
766
763
  | param2template(self.vars, params, extras=self.extras)
767
764
  | {"result": result}
768
765
  )
769
766
 
770
- # NOTE: Start exec the run statement.
771
767
  result.trace.info(f"[STAGE]: Py-Execute: {self.name}")
772
- # result.trace.warning(
773
- # "[STAGE]: This stage allow use `eval` function, so, please "
774
- # "check your statement be safe before execute."
775
- # )
776
- #
768
+
777
769
  # WARNING: The exec build-in function is very dangerous. So, it
778
770
  # should use the re module to validate exec-string before running.
779
771
  exec(
780
- param2template(dedent(self.run), params, extras=self.extras), gb, lc
772
+ param2template(dedent(self.run), params, extras=self.extras),
773
+ gb,
774
+ lc,
781
775
  )
782
776
 
783
777
  return result.catch(
784
- status=SUCCESS, context={"locals": lc, "globals": gb}
778
+ status=SUCCESS,
779
+ context={
780
+ "locals": {k: lc[k] for k in self.filter_locals(lc)},
781
+ "globals": {
782
+ k: gb[k]
783
+ for k in gb
784
+ if (
785
+ not k.startswith("__")
786
+ and k != "annotations"
787
+ and not ismodule(gb[k])
788
+ and not isclass(gb[k])
789
+ and not isfunction(gb[k])
790
+ )
791
+ },
792
+ },
785
793
  )
786
794
 
787
795
 
@@ -841,8 +849,8 @@ class CallStage(BaseStage):
841
849
 
842
850
  :raise ValueError: If necessary arguments does not pass from the `args`
843
851
  field.
844
- :raise TypeError: If the result from the caller function does not by
845
- a `dict` type.
852
+ :raise TypeError: If the result from the caller function does not match
853
+ with a `dict` type.
846
854
 
847
855
  :rtype: Result
848
856
  """
@@ -1100,7 +1108,8 @@ class ParallelStage(BaseStage): # pragma: no cov
1100
1108
  """
1101
1109
  result.trace.debug(f"... Execute branch: {branch!r}")
1102
1110
  context: DictData = copy.deepcopy(params)
1103
- context.update({"branch": branch, "stages": {}})
1111
+ context.update({"branch": branch})
1112
+ output: DictData = {"branch": branch, "stages": {}}
1104
1113
  for stage in self.parallel[branch]:
1105
1114
 
1106
1115
  if extras:
@@ -1108,7 +1117,7 @@ class ParallelStage(BaseStage): # pragma: no cov
1108
1117
 
1109
1118
  if stage.is_skipped(params=context):
1110
1119
  result.trace.info(f"... Skip stage: {stage.iden!r}")
1111
- stage.set_outputs(output={"skipped": True}, to=context)
1120
+ stage.set_outputs(output={"skipped": True}, to=output)
1112
1121
  continue
1113
1122
 
1114
1123
  if event and event.is_set():
@@ -1121,7 +1130,7 @@ class ParallelStage(BaseStage): # pragma: no cov
1121
1130
  parallel={
1122
1131
  branch: {
1123
1132
  "branch": branch,
1124
- "stages": filter_func(context.pop("stages", {})),
1133
+ "stages": filter_func(output.pop("stages", {})),
1125
1134
  "errors": StageException(error_msg).to_dict(),
1126
1135
  }
1127
1136
  },
@@ -1135,8 +1144,9 @@ class ParallelStage(BaseStage): # pragma: no cov
1135
1144
  raise_error=True,
1136
1145
  event=event,
1137
1146
  )
1138
- stage.set_outputs(rs.context, to=context)
1139
- except StageException as e: # pragma: no cov
1147
+ stage.set_outputs(rs.context, to=output)
1148
+ stage.set_outputs(stage.get_outputs(output), to=context)
1149
+ except (StageException, UtilException) as e: # pragma: no cov
1140
1150
  result.trace.error(f"[STAGE]: {e.__class__.__name__}: {e}")
1141
1151
  raise StageException(
1142
1152
  f"Sub-Stage execution error: {e.__class__.__name__}: {e}"
@@ -1152,7 +1162,7 @@ class ParallelStage(BaseStage): # pragma: no cov
1152
1162
  parallel={
1153
1163
  branch: {
1154
1164
  "branch": branch,
1155
- "stages": filter_func(context.pop("stages", {})),
1165
+ "stages": filter_func(output.pop("stages", {})),
1156
1166
  "errors": StageException(error_msg).to_dict(),
1157
1167
  },
1158
1168
  },
@@ -1163,7 +1173,7 @@ class ParallelStage(BaseStage): # pragma: no cov
1163
1173
  parallel={
1164
1174
  branch: {
1165
1175
  "branch": branch,
1166
- "stages": filter_func(context.pop("stages", {})),
1176
+ "stages": filter_func(output.pop("stages", {})),
1167
1177
  },
1168
1178
  },
1169
1179
  )
@@ -1293,9 +1303,10 @@ class ForEachStage(BaseStage):
1293
1303
 
1294
1304
  :rtype: Result
1295
1305
  """
1296
- result.trace.debug(f"... Execute item: {item!r}")
1306
+ result.trace.debug(f"[STAGE]: Execute item: {item!r}")
1297
1307
  context: DictData = copy.deepcopy(params)
1298
- context.update({"item": item, "stages": {}})
1308
+ context.update({"item": item})
1309
+ output: DictData = {"item": item, "stages": {}}
1299
1310
  for stage in self.stages:
1300
1311
 
1301
1312
  if self.extras:
@@ -1303,7 +1314,7 @@ class ForEachStage(BaseStage):
1303
1314
 
1304
1315
  if stage.is_skipped(params=context):
1305
1316
  result.trace.info(f"... Skip stage: {stage.iden!r}")
1306
- stage.set_outputs(output={"skipped": True}, to=context)
1317
+ stage.set_outputs(output={"skipped": True}, to=output)
1307
1318
  continue
1308
1319
 
1309
1320
  if event and event.is_set(): # pragma: no cov
@@ -1316,7 +1327,7 @@ class ForEachStage(BaseStage):
1316
1327
  foreach={
1317
1328
  item: {
1318
1329
  "item": item,
1319
- "stages": filter_func(context.pop("stages", {})),
1330
+ "stages": filter_func(output.pop("stages", {})),
1320
1331
  "errors": StageException(error_msg).to_dict(),
1321
1332
  }
1322
1333
  },
@@ -1330,7 +1341,8 @@ class ForEachStage(BaseStage):
1330
1341
  raise_error=True,
1331
1342
  event=event,
1332
1343
  )
1333
- stage.set_outputs(rs.context, to=context)
1344
+ stage.set_outputs(rs.context, to=output)
1345
+ stage.set_outputs(stage.get_outputs(output), to=context)
1334
1346
  except (StageException, UtilException) as e:
1335
1347
  result.trace.error(f"[STAGE]: {e.__class__.__name__}: {e}")
1336
1348
  raise StageException(
@@ -1347,18 +1359,17 @@ class ForEachStage(BaseStage):
1347
1359
  foreach={
1348
1360
  item: {
1349
1361
  "item": item,
1350
- "stages": filter_func(context.pop("stages", {})),
1362
+ "stages": filter_func(output.pop("stages", {})),
1351
1363
  "errors": StageException(error_msg).to_dict(),
1352
1364
  },
1353
1365
  },
1354
1366
  )
1355
-
1356
1367
  return result.catch(
1357
1368
  status=SUCCESS,
1358
1369
  foreach={
1359
1370
  item: {
1360
1371
  "item": item,
1361
- "stages": filter_func(context.pop("stages", {})),
1372
+ "stages": filter_func(output.pop("stages", {})),
1362
1373
  },
1363
1374
  },
1364
1375
  )
@@ -1514,7 +1525,8 @@ class UntilStage(BaseStage): # pragma: no cov
1514
1525
  """
1515
1526
  result.trace.debug(f"... Execute until item: {item!r}")
1516
1527
  context: DictData = copy.deepcopy(params)
1517
- context.update({"loop": loop, "item": item, "stages": {}})
1528
+ context.update({"item": item})
1529
+ output: DictData = {"loop": loop, "item": item, "stages": {}}
1518
1530
  next_item: T = None
1519
1531
  for stage in self.stages:
1520
1532
 
@@ -1523,7 +1535,7 @@ class UntilStage(BaseStage): # pragma: no cov
1523
1535
 
1524
1536
  if stage.is_skipped(params=context):
1525
1537
  result.trace.info(f"... Skip stage: {stage.iden!r}")
1526
- stage.set_outputs(output={"skipped": True}, to=context)
1538
+ stage.set_outputs(output={"skipped": True}, to=output)
1527
1539
  continue
1528
1540
 
1529
1541
  if event and event.is_set():
@@ -1538,9 +1550,7 @@ class UntilStage(BaseStage): # pragma: no cov
1538
1550
  loop: {
1539
1551
  "loop": loop,
1540
1552
  "item": item,
1541
- "stages": filter_func(
1542
- context.pop("stages", {})
1543
- ),
1553
+ "stages": filter_func(output.pop("stages", {})),
1544
1554
  "errors": StageException(error_msg).to_dict(),
1545
1555
  }
1546
1556
  },
@@ -1556,11 +1566,12 @@ class UntilStage(BaseStage): # pragma: no cov
1556
1566
  raise_error=True,
1557
1567
  event=event,
1558
1568
  )
1559
- stage.set_outputs(rs.context, to=context)
1560
- if "item" in (
1561
- outputs := stage.get_outputs(context).get("outputs", {})
1562
- ):
1563
- next_item = outputs["item"]
1569
+ stage.set_outputs(rs.context, to=output)
1570
+
1571
+ if "item" in (_output := stage.get_outputs(output)):
1572
+ next_item = _output["item"]
1573
+
1574
+ stage.set_outputs(_output, to=context)
1564
1575
  except (StageException, UtilException) as e:
1565
1576
  result.trace.error(f"[STAGE]: {e.__class__.__name__}: {e}")
1566
1577
  raise StageException(
@@ -1574,7 +1585,7 @@ class UntilStage(BaseStage): # pragma: no cov
1574
1585
  loop: {
1575
1586
  "loop": loop,
1576
1587
  "item": item,
1577
- "stages": filter_func(context.pop("stages", {})),
1588
+ "stages": filter_func(output.pop("stages", {})),
1578
1589
  }
1579
1590
  },
1580
1591
  ),
@@ -1669,7 +1680,9 @@ class Match(BaseModel):
1669
1680
  """Match model for the Case Stage."""
1670
1681
 
1671
1682
  case: Union[str, int] = Field(description="A match case.")
1672
- stage: Stage = Field(description="A stage to execution for this case.")
1683
+ stages: list[Stage] = Field(
1684
+ description="A list of stage to execution for this case."
1685
+ )
1673
1686
 
1674
1687
 
1675
1688
  class CaseStage(BaseStage):
@@ -1682,24 +1695,21 @@ class CaseStage(BaseStage):
1682
1695
  ... "match": [
1683
1696
  ... {
1684
1697
  ... "case": "1",
1685
- ... "stage": {
1686
- ... "name": "Stage case 1",
1687
- ... "eche": "Hello case 1",
1688
- ... },
1689
- ... },
1690
- ... {
1691
- ... "case": "2",
1692
- ... "stage": {
1693
- ... "name": "Stage case 2",
1694
- ... "eche": "Hello case 2",
1695
- ... },
1698
+ ... "stages": [
1699
+ ... {
1700
+ ... "name": "Stage case 1",
1701
+ ... "eche": "Hello case 1",
1702
+ ... },
1703
+ ... ],
1696
1704
  ... },
1697
1705
  ... {
1698
1706
  ... "case": "_",
1699
- ... "stage": {
1700
- ... "name": "Stage else",
1701
- ... "eche": "Hello case else",
1702
- ... },
1707
+ ... "stages": [
1708
+ ... {
1709
+ ... "name": "Stage else",
1710
+ ... "eche": "Hello case else",
1711
+ ... },
1712
+ ... ],
1703
1713
  ... },
1704
1714
  ... ],
1705
1715
  ... }
@@ -1719,6 +1729,98 @@ class CaseStage(BaseStage):
1719
1729
  alias="skip-not-match",
1720
1730
  )
1721
1731
 
1732
+ def execute_case(
1733
+ self,
1734
+ case: str,
1735
+ stages: list[Stage],
1736
+ params: DictData,
1737
+ result: Result,
1738
+ *,
1739
+ event: Event | None = None,
1740
+ ) -> Result:
1741
+ """Execute case.
1742
+
1743
+ :param case: (str) A case that want to execution.
1744
+ :param stages: (list[Stage]) A list of stage.
1745
+ :param params: (DictData) A parameter that want to pass to stage
1746
+ execution.
1747
+ :param result: (Result) A result object for keeping context and status
1748
+ data.
1749
+ :param event: (Event) An event manager that use to track parent execute
1750
+ was not force stopped.
1751
+
1752
+ :rtype: Result
1753
+ """
1754
+ context: DictData = copy.deepcopy(params)
1755
+ context.update({"case": case})
1756
+ output: DictData = {"case": case, "stages": {}}
1757
+
1758
+ for stage in stages:
1759
+
1760
+ if self.extras:
1761
+ stage.extras = self.extras
1762
+
1763
+ if stage.is_skipped(params=context):
1764
+ result.trace.info(f"... Skip stage: {stage.iden!r}")
1765
+ stage.set_outputs(output={"skipped": True}, to=output)
1766
+ continue
1767
+
1768
+ if event and event.is_set(): # pragma: no cov
1769
+ error_msg: str = (
1770
+ "Case-Stage was canceled from event that had set before "
1771
+ "stage case execution."
1772
+ )
1773
+ return result.catch(
1774
+ status=CANCEL,
1775
+ context={
1776
+ "case": case,
1777
+ "stages": filter_func(output.pop("stages", {})),
1778
+ "errors": StageException(error_msg).to_dict(),
1779
+ },
1780
+ )
1781
+
1782
+ try:
1783
+ rs: Result = stage.handler_execute(
1784
+ params=context,
1785
+ run_id=result.run_id,
1786
+ parent_run_id=result.parent_run_id,
1787
+ raise_error=True,
1788
+ event=event,
1789
+ )
1790
+ stage.set_outputs(rs.context, to=output)
1791
+ stage.set_outputs(stage.get_outputs(output), to=context)
1792
+ except (StageException, UtilException) as e: # pragma: no cov
1793
+ result.trace.error(f"[STAGE]: {e.__class__.__name__}: {e}")
1794
+ return result.catch(
1795
+ status=FAILED,
1796
+ context={
1797
+ "case": case,
1798
+ "stages": filter_func(output.pop("stages", {})),
1799
+ "errors": e.to_dict(),
1800
+ },
1801
+ )
1802
+
1803
+ if rs.status == FAILED:
1804
+ error_msg: str = (
1805
+ f"Case-Stage was break because it has a sub stage, "
1806
+ f"{stage.iden}, failed without raise error."
1807
+ )
1808
+ return result.catch(
1809
+ status=FAILED,
1810
+ context={
1811
+ "case": case,
1812
+ "stages": filter_func(output.pop("stages", {})),
1813
+ "errors": StageException(error_msg).to_dict(),
1814
+ },
1815
+ )
1816
+ return result.catch(
1817
+ status=SUCCESS,
1818
+ context={
1819
+ "case": case,
1820
+ "stages": filter_func(output.pop("stages", {})),
1821
+ },
1822
+ )
1823
+
1722
1824
  def execute(
1723
1825
  self,
1724
1826
  params: DictData,
@@ -1748,17 +1850,17 @@ class CaseStage(BaseStage):
1748
1850
 
1749
1851
  result.trace.info(f"[STAGE]: Case-Execute: {_case!r}.")
1750
1852
  _else: Optional[Match] = None
1751
- stage: Optional[Stage] = None
1853
+ stages: Optional[list[Stage]] = None
1752
1854
  for match in self.match:
1753
1855
  if (c := match.case) == "_":
1754
1856
  _else: Match = match
1755
1857
  continue
1756
1858
 
1757
1859
  _condition: str = param2template(c, params, extras=self.extras)
1758
- if stage is None and _case == _condition:
1759
- stage: Stage = match.stage
1860
+ if stages is None and _case == _condition:
1861
+ stages: list[Stage] = match.stages
1760
1862
 
1761
- if stage is None:
1863
+ if stages is None:
1762
1864
  if _else is None:
1763
1865
  if not self.skip_not_match:
1764
1866
  raise StageException(
@@ -1776,10 +1878,8 @@ class CaseStage(BaseStage):
1776
1878
  status=CANCEL,
1777
1879
  context={"errors": StageException(error_msg).to_dict()},
1778
1880
  )
1779
- stage: Stage = _else.stage
1780
-
1781
- if self.extras:
1782
- stage.extras = self.extras
1881
+ _case: str = "_"
1882
+ stages: list[Stage] = _else.stages
1783
1883
 
1784
1884
  if event and event.is_set(): # pragma: no cov
1785
1885
  return result.catch(
@@ -1792,19 +1892,9 @@ class CaseStage(BaseStage):
1792
1892
  },
1793
1893
  )
1794
1894
 
1795
- try:
1796
- return result.catch(
1797
- status=SUCCESS,
1798
- context=stage.handler_execute(
1799
- params=params,
1800
- run_id=result.run_id,
1801
- parent_run_id=result.parent_run_id,
1802
- event=event,
1803
- ).context,
1804
- )
1805
- except StageException as e: # pragma: no cov
1806
- result.trace.error(f"[STAGE]: {e.__class__.__name__}:" f"\n\t{e}")
1807
- return result.catch(status=FAILED, context={"errors": e.to_dict()})
1895
+ return self.execute_case(
1896
+ case=_case, stages=stages, params=params, result=result, event=event
1897
+ )
1808
1898
 
1809
1899
 
1810
1900
  class RaiseStage(BaseStage): # pragma: no cov
@@ -371,7 +371,9 @@ class Workflow(BaseModel):
371
371
  def from_conf(
372
372
  cls,
373
373
  name: str,
374
+ *,
374
375
  extras: DictData | None = None,
376
+ loader: type[Loader] = None,
375
377
  ) -> Self:
376
378
  """Create Workflow instance from the Loader object that only receive
377
379
  an input workflow name. The loader object will use this workflow name to
@@ -380,12 +382,13 @@ class Workflow(BaseModel):
380
382
  :param name: A workflow name that want to pass to Loader object.
381
383
  :param extras: An extra parameters that want to pass to Loader
382
384
  object.
385
+ :param loader: A loader class for override default loader object.
383
386
 
384
387
  :raise ValueError: If the type does not match with current object.
385
388
 
386
389
  :rtype: Self
387
390
  """
388
- loader: Loader = Loader(name, externals=(extras or {}))
391
+ loader: Loader = (loader or Loader)(name, externals=(extras or {}))
389
392
 
390
393
  # NOTE: Validate the config type match with current connection model
391
394
  if loader.type != cls.__name__:
@@ -407,6 +410,7 @@ class Workflow(BaseModel):
407
410
  path: Path,
408
411
  *,
409
412
  extras: DictData | None = None,
413
+ loader: type[Loader] = None,
410
414
  ) -> Self:
411
415
  """Create Workflow instance from the specific path. The loader object
412
416
  will use this workflow name and path to searching configuration data of
@@ -416,12 +420,13 @@ class Workflow(BaseModel):
416
420
  :param path: (Path) A config path that want to search.
417
421
  :param extras: (DictData) An extra parameters that want to override core
418
422
  config values.
423
+ :param loader: A loader class for override default loader object.
419
424
 
420
425
  :raise ValueError: If the type does not match with current object.
421
426
 
422
427
  :rtype: Self
423
428
  """
424
- loader: SimLoad = SimLoad(
429
+ loader: SimLoad = (loader or SimLoad)(
425
430
  name, conf_path=path, externals=(extras or {})
426
431
  )
427
432
  # NOTE: Validate the config type match with current connection model
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.51
3
+ Version: 0.0.53
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -1,20 +1,20 @@
1
- ddeutil/workflow/__about__.py,sha256=lV_4ewekJpV5o1pQoqnMK3g475fbmyO855YKAFNFKF8,28
1
+ ddeutil/workflow/__about__.py,sha256=fOQi-49Q8-qLVO7us5t2StkrubZvI2LQkyYSQi-3P88,28
2
2
  ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
3
- ddeutil/workflow/__init__.py,sha256=AMmM7Cq_jrO_MzIZjzQyFvcFkVj9ad7V1f3PtLrI030,1736
3
+ ddeutil/workflow/__init__.py,sha256=noE8LNRcgq32m9OnIFcQqh0P7PXWdp-SGmvBCYIXgf4,1338
4
4
  ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
6
6
  ddeutil/workflow/conf.py,sha256=80rgmJKFU7BlH5xTLnghGzGhE8C6LFAQykd9mjHSjo8,12528
7
7
  ddeutil/workflow/cron.py,sha256=WS2MInn0Sp5DKlZDZH5VFZ5AA0Q3_AnBnYEU4lZSv4I,9779
8
8
  ddeutil/workflow/exceptions.py,sha256=r4Jrf9qtVPALU4wh4bnb_OYqC-StqSQJEmFC-_QK934,1408
9
- ddeutil/workflow/job.py,sha256=LydncguMUTTdcG_fImjoPFZBulmZT_PD3jYmnmcYskk,35041
9
+ ddeutil/workflow/job.py,sha256=Z1XP_9pj-RY64z3G4LYX-MppS99zQns9wtZy7zHuWbE,35262
10
10
  ddeutil/workflow/logs.py,sha256=rsoBrUGQrooou18fg2yvPsB8NOaXnUA5ThQpBr_WVMg,26598
11
11
  ddeutil/workflow/params.py,sha256=FKY4Oo1Ze4QZKRfAk7rqKsi44YaJQAbqAtXM6vlO2hI,11392
12
12
  ddeutil/workflow/result.py,sha256=27nPQq9CETLCVczv4vvFEF9w2TllHZ_ROfyDoLFxRWM,5647
13
13
  ddeutil/workflow/reusables.py,sha256=iXcS7Gg-71qVX4ln0ILTDx03cTtUnj_rNoXHTVdVrxc,17636
14
14
  ddeutil/workflow/scheduler.py,sha256=4G5AogkmnsTKe7jKYSfU35qjubR82WQ8CLtEe9kqPTE,28304
15
- ddeutil/workflow/stages.py,sha256=Ywwky6rJJ5cMvridG-1Y4rv-wJJ_WMbXysViR3WxQrY,69108
15
+ ddeutil/workflow/stages.py,sha256=ZsGh8Wd-NqdAZC5cyJ6wXuF-UHqoCcFFedXvyHssSqc,72473
16
16
  ddeutil/workflow/utils.py,sha256=zbVttaMFMRLuuBJdSJf7D9qtz8bOnQIBq-rHI3Eqy4M,7821
17
- ddeutil/workflow/workflow.py,sha256=v_O3Gm4J6SCN_1NfGd3_ClpJkatFCyoqXR3nAIMAoF4,50575
17
+ ddeutil/workflow/workflow.py,sha256=2ZBNW3-vcP8bpKrK184wSCukq3wpT6G0z25Su5bapR0,50832
18
18
  ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
19
19
  ddeutil/workflow/api/api.py,sha256=CWtPLgOv2Jus9E7nzG5mG2Z32ZEkUK3JWQ2htZyMRpA,5244
20
20
  ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
@@ -24,8 +24,8 @@ ddeutil/workflow/api/routes/job.py,sha256=oPwBVP0Mxwxv-bGPlfmxQQ9PcVl0ev9HoPzndp
24
24
  ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
25
25
  ddeutil/workflow/api/routes/schedules.py,sha256=EgUjyRGhsm6UNaMj5luh6TcY6l571sCHcla-BL1iOfY,4829
26
26
  ddeutil/workflow/api/routes/workflows.py,sha256=JcDOrn1deK8ztFRcMTNATQejG6KMA7JxZLVc4QeBsP4,4527
27
- ddeutil_workflow-0.0.51.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
28
- ddeutil_workflow-0.0.51.dist-info/METADATA,sha256=xlAbUVZ9djYf1GImH9v0N-mR7XpxcQIkvwGuX5XmSrs,19425
29
- ddeutil_workflow-0.0.51.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
30
- ddeutil_workflow-0.0.51.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
- ddeutil_workflow-0.0.51.dist-info/RECORD,,
27
+ ddeutil_workflow-0.0.53.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
28
+ ddeutil_workflow-0.0.53.dist-info/METADATA,sha256=xNgYIlFQvS9VsF0agSPsYbJWin_s9c_llkRFnEUxyC0,19425
29
+ ddeutil_workflow-0.0.53.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
30
+ ddeutil_workflow-0.0.53.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
+ ddeutil_workflow-0.0.53.dist-info/RECORD,,