ddeutil-workflow 0.0.65__py3-none-any.whl → 0.0.67__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.65"
1
+ __version__: str = "0.0.67"
@@ -1,30 +1,4 @@
1
- import typer
2
-
3
- app = typer.Typer()
4
-
5
-
6
- @app.callback()
7
- def callback():
8
- """
9
- Awesome Portal Gun
10
- """
11
-
12
-
13
- @app.command()
14
- def provision():
15
- """
16
- Shoot the portal gun
17
- """
18
- typer.echo("Shooting portal gun")
19
-
20
-
21
- @app.command()
22
- def job():
23
- """
24
- Load the portal gun
25
- """
26
- typer.echo("Loading portal gun")
27
-
1
+ from .cli import app
28
2
 
29
3
  if __name__ == "__main__":
30
4
  app()
@@ -0,0 +1,68 @@
1
+ import json
2
+ from typing import Annotated, Any
3
+
4
+ import typer
5
+ import uvicorn
6
+
7
+ from .__about__ import __version__
8
+ from .api import app as fastapp
9
+ from .api.logs import LOGGING_CONFIG
10
+
11
+ app = typer.Typer(
12
+ pretty_exceptions_enable=True,
13
+ )
14
+
15
+
16
+ @app.callback()
17
+ def callback():
18
+ """
19
+ Awesome Portal Gun
20
+ """
21
+ typer.echo("Start call from callback function")
22
+
23
+
24
+ @app.command()
25
+ def version():
26
+ """Get the ddeutil-workflow package version."""
27
+ typer.echo(__version__)
28
+
29
+
30
+ @app.command()
31
+ def job(
32
+ params: Annotated[str, typer.Option(help="A job execute parameters")],
33
+ ):
34
+ """Job execution on the local.
35
+
36
+ Example:
37
+ ... workflow-cli job --params "{\"test\": 1}"
38
+ """
39
+ try:
40
+ params_dict: dict[str, Any] = json.loads(params)
41
+ except json.JSONDecodeError as e:
42
+ raise ValueError(f"params does not support format: {params!r}.") from e
43
+ typer.echo(f"Job params: {params_dict}")
44
+
45
+
46
+ @app.command()
47
+ def api(
48
+ host: Annotated[str, typer.Option(help="A host url.")] = "0.0.0.0",
49
+ port: Annotated[int, typer.Option(help="A port url.")] = 80,
50
+ debug: Annotated[bool, typer.Option(help="A debug mode flag")] = True,
51
+ worker: Annotated[int, typer.Option(help="A worker number")] = None,
52
+ ):
53
+ """
54
+ Provision API application from the FastAPI.
55
+ """
56
+
57
+ uvicorn.run(
58
+ fastapp,
59
+ host=host,
60
+ port=port,
61
+ log_config=uvicorn.config.LOGGING_CONFIG | LOGGING_CONFIG,
62
+ log_level=("DEBUG" if debug else "INFO"),
63
+ workers=worker,
64
+ )
65
+
66
+
67
+ if __name__ == "__main__":
68
+ app()
ddeutil/workflow/conf.py CHANGED
@@ -18,7 +18,7 @@ from zoneinfo import ZoneInfo
18
18
  from ddeutil.core import str2bool
19
19
  from ddeutil.io import YamlFlResolve, search_env_replace
20
20
  from ddeutil.io.paths import glob_files, is_ignored, read_ignore
21
- from pydantic import SecretStr, TypeAdapter
21
+ from pydantic import SecretStr
22
22
 
23
23
  from .__types import DictData
24
24
 
@@ -470,7 +470,3 @@ class CallerSecret(SecretStr): # pragma: no cov
470
470
  :rtype: str
471
471
  """
472
472
  return pass_env(super().get_secret_value())
473
-
474
-
475
- # NOTE: Define the caller secret type for use it directly in the caller func.
476
- CallerSecretType = TypeAdapter(CallerSecret)
@@ -90,9 +90,6 @@ class ResultError(UtilError): ...
90
90
  class StageError(BaseError): ...
91
91
 
92
92
 
93
- class StageRetryError(StageError): ...
94
-
95
-
96
93
  class StageCancelError(StageError): ...
97
94
 
98
95
 
@@ -114,9 +111,6 @@ class WorkflowError(BaseError): ...
114
111
  class WorkflowCancelError(WorkflowError): ...
115
112
 
116
113
 
117
- class WorkflowSkipError(WorkflowError): ...
118
-
119
-
120
114
  class WorkflowTimeoutError(WorkflowError): ...
121
115
 
122
116
 
ddeutil/workflow/job.py CHANGED
@@ -39,7 +39,6 @@ from pydantic import BaseModel, Discriminator, Field, SecretStr, Tag
39
39
  from pydantic.functional_validators import field_validator, model_validator
40
40
  from typing_extensions import Self
41
41
 
42
- from . import JobSkipError
43
42
  from .__types import DictData, DictStr, Matrix, StrOrNone
44
43
  from .errors import JobCancelError, JobError, to_dict
45
44
  from .result import (
@@ -774,7 +773,7 @@ def local_execute_strategy(
774
773
  *,
775
774
  result: Optional[Result] = None,
776
775
  event: Optional[Event] = None,
777
- ) -> Result:
776
+ ) -> tuple[Status, Result]:
778
777
  """Local strategy execution with passing dynamic parameters from the
779
778
  job execution and strategy matrix.
780
779
 
@@ -799,7 +798,7 @@ def local_execute_strategy(
799
798
  :raise JobError: If stage execution raise any error as `StageError`.
800
799
  :raise JobError: If the result from execution has `FAILED` status.
801
800
 
802
- :rtype: Result
801
+ :rtype: tuple[Status, Result]
803
802
  """
804
803
  result: Result = result or Result(
805
804
  run_id=gen_id(job.id or "EMPTY", unique=True),
@@ -899,9 +898,7 @@ def local_execute_strategy(
899
898
  },
900
899
  },
901
900
  )
902
- if status == SKIP:
903
- raise JobSkipError("All stage was skipped.")
904
- return result
901
+ return status, result
905
902
 
906
903
 
907
904
  def local_execute(
@@ -1017,14 +1014,13 @@ def local_execute(
1017
1014
 
1018
1015
  for i, future in enumerate(done, start=0):
1019
1016
  try:
1020
- statuses[i] = future.result().status
1017
+ statuses[i], _ = future.result()
1021
1018
  except JobError as e:
1022
1019
  statuses[i] = get_status_from_error(e)
1023
1020
  result.trace.error(
1024
- f"[JOB]: {ls} Error Handler:||{e.__class__.__name__}: {e}"
1021
+ f"[JOB]: {ls} Handler:||{e.__class__.__name__}: {e}"
1025
1022
  )
1026
- if not isinstance(e, JobSkipError):
1027
- mark_errors(context, e)
1023
+ mark_errors(context, e)
1028
1024
  except CancelledError:
1029
1025
  pass
1030
1026
 
ddeutil/workflow/logs.py CHANGED
@@ -848,7 +848,7 @@ class FileAudit(BaseAudit):
848
848
  "audit_path", extras=self.extras
849
849
  ) / self.filename_fmt.format(name=self.name, release=self.release)
850
850
 
851
- def save(self, excluded: Optional[list[str]]) -> Self:
851
+ def save(self, excluded: Optional[list[str]] = None) -> Self:
852
852
  """Save logging data that receive a context data from a workflow
853
853
  execution result.
854
854
 
@@ -11,7 +11,7 @@ from __future__ import annotations
11
11
 
12
12
  from dataclasses import field
13
13
  from datetime import datetime
14
- from enum import IntEnum, auto
14
+ from enum import Enum
15
15
  from typing import Optional, Union
16
16
 
17
17
  from pydantic import ConfigDict
@@ -28,7 +28,6 @@ from . import (
28
28
  StageSkipError,
29
29
  WorkflowCancelError,
30
30
  WorkflowError,
31
- WorkflowSkipError,
32
31
  )
33
32
  from .__types import DictData
34
33
  from .conf import dynamic
@@ -37,16 +36,16 @@ from .logs import TraceModel, get_dt_tznow, get_trace
37
36
  from .utils import default_gen_id, gen_id, get_dt_now
38
37
 
39
38
 
40
- class Status(IntEnum):
39
+ class Status(str, Enum):
41
40
  """Status Int Enum object that use for tracking execution status to the
42
41
  Result dataclass object.
43
42
  """
44
43
 
45
- SUCCESS = auto()
46
- FAILED = auto()
47
- WAIT = auto()
48
- SKIP = auto()
49
- CANCEL = auto()
44
+ SUCCESS = "SUCCESS"
45
+ FAILED = "FAILED"
46
+ WAIT = "WAIT"
47
+ SKIP = "SKIP"
48
+ CANCEL = "CANCEL"
50
49
 
51
50
  @property
52
51
  def emoji(self) -> str: # pragma: no cov
@@ -68,6 +67,9 @@ class Status(IntEnum):
68
67
  def __str__(self) -> str:
69
68
  return self.name
70
69
 
70
+ def is_result(self) -> bool:
71
+ return self in ResultStatuses
72
+
71
73
 
72
74
  SUCCESS = Status.SUCCESS
73
75
  FAILED = Status.FAILED
@@ -75,6 +77,8 @@ WAIT = Status.WAIT
75
77
  SKIP = Status.SKIP
76
78
  CANCEL = Status.CANCEL
77
79
 
80
+ ResultStatuses: list[Status] = [SUCCESS, FAILED, CANCEL, SKIP]
81
+
78
82
 
79
83
  def validate_statuses(statuses: list[Status]) -> Status:
80
84
  """Validate the final status from list of Status object.
@@ -106,13 +110,12 @@ def get_status_from_error(
106
110
  JobSkipError,
107
111
  WorkflowError,
108
112
  WorkflowCancelError,
109
- WorkflowSkipError,
110
113
  Exception,
111
114
  BaseException,
112
115
  ]
113
116
  ) -> Status:
114
117
  """Get the Status from the error object."""
115
- if isinstance(error, (StageSkipError, JobSkipError, WorkflowSkipError)):
118
+ if isinstance(error, (StageSkipError, JobSkipError)):
116
119
  return SKIP
117
120
  elif isinstance(
118
121
  error, (StageCancelError, JobCancelError, WorkflowCancelError)
@@ -62,10 +62,9 @@ from pydantic import BaseModel, Field, ValidationError
62
62
  from pydantic.functional_validators import field_validator, model_validator
63
63
  from typing_extensions import Self
64
64
 
65
- from . import StageCancelError, StageRetryError
66
65
  from .__types import DictData, DictStr, StrOrInt, StrOrNone, TupleStr
67
66
  from .conf import dynamic, pass_env
68
- from .errors import StageError, StageSkipError, to_dict
67
+ from .errors import StageCancelError, StageError, StageSkipError, to_dict
69
68
  from .result import (
70
69
  CANCEL,
71
70
  FAILED,
@@ -252,16 +251,20 @@ class BaseStage(BaseModel, ABC):
252
251
  f"[STAGE]: Handler {to_train(self.__class__.__name__)}: "
253
252
  f"{self.name!r}."
254
253
  )
254
+
255
+ # NOTE: Show the description of this stage before execution.
255
256
  if self.desc:
256
257
  result.trace.debug(f"[STAGE]: Description:||{self.desc}||")
257
258
 
259
+ # VALIDATE: Checking stage condition before execution.
258
260
  if self.is_skipped(params):
259
261
  raise StageSkipError(
260
262
  f"Skip because condition {self.condition} was valid."
261
263
  )
264
+
262
265
  # NOTE: Start call wrapped execution method that will use custom
263
266
  # execution before the real execution from inherit stage model.
264
- result_caught: Result = self.__execute(
267
+ result_caught: Result = self._execute(
265
268
  params, result=result, event=event
266
269
  )
267
270
  if result_caught.status == WAIT:
@@ -296,7 +299,7 @@ class BaseStage(BaseModel, ABC):
296
299
  )
297
300
  return result.catch(status=FAILED, context={"errors": to_dict(e)})
298
301
 
299
- def __execute(
302
+ def _execute(
300
303
  self, params: DictData, result: Result, event: Optional[Event]
301
304
  ) -> Result:
302
305
  """Wrapped the execute method before returning to handler execution.
@@ -514,11 +517,14 @@ class BaseAsyncStage(BaseStage, ABC):
514
517
  f"[STAGE]: Handler {to_train(self.__class__.__name__)}: "
515
518
  f"{self.name!r}."
516
519
  )
520
+
521
+ # NOTE: Show the description of this stage before execution.
517
522
  if self.desc:
518
523
  await result.trace.adebug(
519
524
  f"[STAGE]: Description:||{self.desc}||"
520
525
  )
521
526
 
527
+ # VALIDATE: Checking stage condition before execution.
522
528
  if self.is_skipped(params=params):
523
529
  raise StageSkipError(
524
530
  f"Skip because condition {self.condition} was valid."
@@ -526,7 +532,7 @@ class BaseAsyncStage(BaseStage, ABC):
526
532
 
527
533
  # NOTE: Start call wrapped execution method that will use custom
528
534
  # execution before the real execution from inherit stage model.
529
- result_caught: Result = await self.__axecute(
535
+ result_caught: Result = await self._axecute(
530
536
  params, result=result, event=event
531
537
  )
532
538
  if result_caught.status == WAIT:
@@ -561,7 +567,7 @@ class BaseAsyncStage(BaseStage, ABC):
561
567
  )
562
568
  return result.catch(status=FAILED, context={"errors": to_dict(e)})
563
569
 
564
- async def __axecute(
570
+ async def _axecute(
565
571
  self, params: DictData, result: Result, event: Optional[Event]
566
572
  ) -> Result:
567
573
  """Wrapped the axecute method before returning to handler axecute.
@@ -591,7 +597,7 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
591
597
  description="Retry number if stage execution get the error.",
592
598
  )
593
599
 
594
- def __execute(
600
+ def _execute(
595
601
  self,
596
602
  params: DictData,
597
603
  result: Result,
@@ -610,15 +616,50 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
610
616
  :rtype: Result
611
617
  """
612
618
  current_retry: int = 0
613
- with current_retry < (self.retry + 1):
619
+ exception: Exception
620
+
621
+ # NOTE: First execution for not pass to retry step if it passes.
622
+ try:
623
+ result.catch(status=WAIT)
624
+ return self.execute(
625
+ params | {"retry": current_retry},
626
+ result=result,
627
+ event=event,
628
+ )
629
+ except Exception as e:
630
+ current_retry += 1
631
+ exception = e
632
+
633
+ if self.retry == 0:
634
+ raise exception
635
+
636
+ result.trace.warning(
637
+ f"[STAGE]: Retry count: {current_retry} ... "
638
+ f"( {exception.__class__.__name__} )"
639
+ )
640
+
641
+ while current_retry < (self.retry + 1):
614
642
  try:
615
643
  result.catch(status=WAIT, context={"retry": current_retry})
616
- return self.execute(params, result=result, event=event)
617
- except StageRetryError:
644
+ return self.execute(
645
+ params | {"retry": current_retry},
646
+ result=result,
647
+ event=event,
648
+ )
649
+ except Exception as e:
618
650
  current_retry += 1
619
- raise StageError(f"Reach the maximum of retry number: {self.retry}.")
651
+ result.trace.warning(
652
+ f"[STAGE]: Retry count: {current_retry} ... "
653
+ f"( {e.__class__.__name__} )"
654
+ )
655
+ exception = e
656
+
657
+ result.trace.error(
658
+ f"[STAGE]: Reach the maximum of retry number: {self.retry}."
659
+ )
660
+ raise exception
620
661
 
621
- async def __axecute(
662
+ async def _axecute(
622
663
  self,
623
664
  params: DictData,
624
665
  result: Result,
@@ -637,13 +678,48 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
637
678
  :rtype: Result
638
679
  """
639
680
  current_retry: int = 0
640
- with current_retry < (self.retry + 1):
681
+ exception: Exception
682
+
683
+ # NOTE: First execution for not pass to retry step if it passes.
684
+ try:
685
+ result.catch(status=WAIT)
686
+ return await self.axecute(
687
+ params | {"retry": current_retry},
688
+ result=result,
689
+ event=event,
690
+ )
691
+ except Exception as e:
692
+ current_retry += 1
693
+ exception = e
694
+
695
+ if self.retry == 0:
696
+ raise exception
697
+
698
+ await result.trace.awarning(
699
+ f"[STAGE]: Retry count: {current_retry} ... "
700
+ f"( {exception.__class__.__name__} )"
701
+ )
702
+
703
+ while current_retry < (self.retry + 1):
641
704
  try:
642
705
  result.catch(status=WAIT, context={"retry": current_retry})
643
- return await self.axecute(params, result=result, event=event)
644
- except StageRetryError:
706
+ return await self.axecute(
707
+ params | {"retry": current_retry},
708
+ result=result,
709
+ event=event,
710
+ )
711
+ except Exception as e:
645
712
  current_retry += 1
646
- raise StageError(f"Reach the maximum of retry number: {self.retry}.")
713
+ await result.trace.awarning(
714
+ f"[STAGE]: Retry count: {current_retry} ... "
715
+ f"( {e.__class__.__name__} )"
716
+ )
717
+ exception = e
718
+
719
+ await result.trace.aerror(
720
+ f"[STAGE]: Reach the maximum of retry number: {self.retry}."
721
+ )
722
+ raise exception
647
723
 
648
724
 
649
725
  class EmptyStage(BaseAsyncStage):
@@ -765,7 +841,7 @@ class EmptyStage(BaseAsyncStage):
765
841
  return result.catch(status=SUCCESS)
766
842
 
767
843
 
768
- class BashStage(BaseAsyncStage):
844
+ class BashStage(BaseRetryStage):
769
845
  """Bash stage executor that execute bash script on the current OS.
770
846
  If your current OS is Windows, it will run on the bash from the current WSL.
771
847
  It will use `bash` for Windows OS and use `sh` for Linux OS.
@@ -911,9 +987,8 @@ class BashStage(BaseAsyncStage):
911
987
  )
912
988
  if rs.returncode > 0:
913
989
  e: str = rs.stderr.removesuffix("\n")
914
- raise StageError(
915
- f"Subprocess: {e}\n---( statement )---\n```bash\n{bash}\n```"
916
- )
990
+ e_bash: str = bash.replace("\n", "\n\t")
991
+ raise StageError(f"Subprocess: {e}\n\t```bash\n\t{e_bash}\n\t```")
917
992
  return result.catch(
918
993
  status=SUCCESS,
919
994
  context={
@@ -964,9 +1039,8 @@ class BashStage(BaseAsyncStage):
964
1039
 
965
1040
  if rs.returncode > 0:
966
1041
  e: str = rs.stderr.removesuffix("\n")
967
- raise StageError(
968
- f"Subprocess: {e}\n---( statement )---\n```bash\n{bash}\n```"
969
- )
1042
+ e_bash: str = bash.replace("\n", "\n\t")
1043
+ raise StageError(f"Subprocess: {e}\n\t```bash\n\t{e_bash}\n\t```")
970
1044
  return result.catch(
971
1045
  status=SUCCESS,
972
1046
  context={
@@ -977,7 +1051,7 @@ class BashStage(BaseAsyncStage):
977
1051
  )
978
1052
 
979
1053
 
980
- class PyStage(BaseAsyncStage):
1054
+ class PyStage(BaseRetryStage):
981
1055
  """Python stage that running the Python statement with the current globals
982
1056
  and passing an input additional variables via `exec` built-in function.
983
1057
 
@@ -1164,7 +1238,7 @@ class PyStage(BaseAsyncStage):
1164
1238
  )
1165
1239
 
1166
1240
 
1167
- class CallStage(BaseAsyncStage):
1241
+ class CallStage(BaseRetryStage):
1168
1242
  """Call stage executor that call the Python function from registry with tag
1169
1243
  decorator function in `reusables` module and run it with input arguments.
1170
1244
 
@@ -1433,7 +1507,7 @@ class CallStage(BaseAsyncStage):
1433
1507
  return args
1434
1508
 
1435
1509
 
1436
- class BaseNestedStage(BaseStage, ABC):
1510
+ class BaseNestedStage(BaseRetryStage, ABC):
1437
1511
  """Base Nested Stage model. This model is use for checking the child stage
1438
1512
  is the nested stage or not.
1439
1513
  """
@@ -1467,6 +1541,17 @@ class BaseNestedStage(BaseStage, ABC):
1467
1541
  else:
1468
1542
  context["errors"] = error.to_dict(with_refs=True)
1469
1543
 
1544
+ async def axecute(
1545
+ self,
1546
+ params: DictData,
1547
+ *,
1548
+ result: Optional[Result] = None,
1549
+ event: Optional[Event] = None,
1550
+ ) -> Result:
1551
+ raise NotImplementedError(
1552
+ "The nested-stage does not implement the `axecute` method yet."
1553
+ )
1554
+
1470
1555
 
1471
1556
  class TriggerStage(BaseNestedStage):
1472
1557
  """Trigger workflow executor stage that run an input trigger Workflow
@@ -1610,6 +1695,9 @@ class ParallelStage(BaseNestedStage):
1610
1695
  (Default is None)
1611
1696
 
1612
1697
  :raise StageCancelError: If event was set.
1698
+ :raise StageCancelError: If result from a nested-stage return canceled
1699
+ status.
1700
+ :raise StageError: If result from a nested-stage return failed status.
1613
1701
 
1614
1702
  :rtype: tuple[Status, Result]
1615
1703
  """
@@ -1854,9 +1942,11 @@ class ForEachStage(BaseNestedStage):
1854
1942
  result.trace.debug(f"[STAGE]: Execute Item: {item!r}")
1855
1943
  key: StrOrInt = index if self.use_index_as_key else item
1856
1944
 
1945
+ # NOTE: Create nested-context data from the passing context.
1857
1946
  context: DictData = copy.deepcopy(params)
1858
1947
  context.update({"item": item, "loop": index})
1859
1948
  nestet_context: DictData = {"item": item, "stages": {}}
1949
+
1860
1950
  total_stage: int = len(self.stages)
1861
1951
  skips: list[bool] = [False] * total_stage
1862
1952
  for i, stage in enumerate(self.stages, start=0):
@@ -1959,6 +2049,10 @@ class ForEachStage(BaseNestedStage):
1959
2049
  ) -> Result:
1960
2050
  """Execute the stages that pass each item form the foreach field.
1961
2051
 
2052
+ This stage will use fail-fast strategy if it was set concurrency
2053
+ value more than 1. It will cancel all nested-stage execution when it has
2054
+ any item loop raise failed or canceled error.
2055
+
1962
2056
  :param params: (DictData) A parameter data.
1963
2057
  :param result: (Result) A Result instance for return context and status.
1964
2058
  :param event: (Event) An Event manager instance that use to cancel this
@@ -379,6 +379,7 @@ class Workflow(BaseModel):
379
379
  override_log_name: Optional[str] = None,
380
380
  result: Optional[Result] = None,
381
381
  timeout: int = 600,
382
+ excluded: Optional[list[str]] = None,
382
383
  ) -> Result:
383
384
  """Release the workflow which is executes workflow with writing audit
384
385
  log tracking. The method is overriding parameter with the release
@@ -405,6 +406,8 @@ class Workflow(BaseModel):
405
406
  :param result: (Result) A result object for keeping context and status
406
407
  data.
407
408
  :param timeout: (int) A workflow execution time out in second unit.
409
+ :param excluded: (list[str]) A list of key that want to exclude from
410
+ audit data.
408
411
 
409
412
  :rtype: Result
410
413
  """
@@ -453,7 +456,7 @@ class Workflow(BaseModel):
453
456
  run_id=result.run_id,
454
457
  execution_time=result.alive_time(),
455
458
  extras=self.extras,
456
- ).save(excluded=None)
459
+ ).save(excluded=excluded)
457
460
  )
458
461
  return result.catch(
459
462
  status=rs.status,
@@ -586,6 +589,19 @@ class Workflow(BaseModel):
586
589
  |-name: ...
587
590
  ╰-message: ...
588
591
 
592
+ --> Ok --> Result
593
+ |-status: FAILED
594
+ ╰-context:
595
+ ╰-errors:
596
+ |-name: ...
597
+ ╰-message: ...
598
+
599
+ --> Ok --> Result
600
+ ╰-status: SKIP
601
+
602
+ --> Ok --> Result
603
+ ╰-status: SUCCESS
604
+
589
605
  :param params: A parameter data that will parameterize before execution.
590
606
  :param run_id: (Optional[str]) A workflow running ID.
591
607
  :param parent_run_id: (Optional[str]) A parent workflow running ID.
@@ -725,25 +741,23 @@ class Workflow(BaseModel):
725
741
 
726
742
  if not_timeout_flag:
727
743
  job_queue.join()
728
- total_future: int = 0
729
- for i, future in enumerate(as_completed(futures), start=0):
744
+ for total, future in enumerate(as_completed(futures), start=0):
730
745
  try:
731
- statuses[i], _ = future.result()
746
+ statuses[total], _ = future.result()
732
747
  except WorkflowError as e:
733
- statuses[i] = get_status_from_error(e)
734
- total_future += 1
748
+ statuses[total] = get_status_from_error(e)
735
749
 
736
750
  # NOTE: Update skipped status from the job trigger.
737
751
  for i in range(skip_count):
738
- statuses[total_future + i] = SKIP
752
+ statuses[total + 1 + i] = SKIP
739
753
 
740
754
  # NOTE: Update status from none-parallel job execution.
741
755
  for i, s in enumerate(sequence_statuses, start=0):
742
- statuses[total_future + skip_count + i] = s
756
+ statuses[total + 1 + skip_count + i] = s
743
757
 
744
- status: Status = validate_statuses(statuses)
745
-
746
- return result.catch(status=status, context=context)
758
+ return result.catch(
759
+ status=validate_statuses(statuses), context=context
760
+ )
747
761
 
748
762
  event.set()
749
763
  for future in futures:
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.65
4
- Summary: Lightweight workflow orchestration
3
+ Version: 0.0.67
4
+ Summary: Lightweight workflow orchestration with YAML template
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
7
7
  Project-URL: Homepage, https://github.com/ddeutils/ddeutil-workflow/
@@ -27,7 +27,7 @@ Requires-Dist: ddeutil-io[toml,yaml]>=0.2.14
27
27
  Requires-Dist: pydantic==2.11.4
28
28
  Requires-Dist: pydantic-extra-types==2.10.4
29
29
  Requires-Dist: python-dotenv==1.1.0
30
- Requires-Dist: schedule<2.0.0,==1.2.2
30
+ Requires-Dist: typer==0.15.4
31
31
  Provides-Extra: all
32
32
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
33
33
  Requires-Dist: uvicorn; extra == "all"
@@ -35,18 +35,9 @@ Requires-Dist: httpx; extra == "all"
35
35
  Requires-Dist: ujson; extra == "all"
36
36
  Requires-Dist: aiofiles; extra == "all"
37
37
  Requires-Dist: aiohttp; extra == "all"
38
- Provides-Extra: api
39
- Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
40
- Requires-Dist: uvicorn; extra == "api"
41
- Requires-Dist: httpx; extra == "api"
42
- Requires-Dist: ujson; extra == "api"
43
- Provides-Extra: async
44
- Requires-Dist: aiofiles; extra == "async"
45
- Requires-Dist: aiohttp; extra == "async"
38
+ Requires-Dist: requests==2.32.3; extra == "all"
46
39
  Provides-Extra: docker
47
40
  Requires-Dist: docker==7.1.0; extra == "docker"
48
- Provides-Extra: self-hosted
49
- Requires-Dist: requests==2.32.3; extra == "self-hosted"
50
41
  Dynamic: license-file
51
42
 
52
43
  # Workflow Orchestration
@@ -142,10 +133,10 @@ the base deps.
142
133
  If you want to install this package with application add-ons, you should add
143
134
  `app` in installation;
144
135
 
145
- | Use-case | Install Optional | Support |
146
- |----------------|--------------------------|:-------------------:|
147
- | Python | `ddeutil-workflow` | :heavy_check_mark: |
148
- | FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
136
+ | Use-case | Install Optional | Support |
137
+ |----------------|-------------------------|:-------:|
138
+ | Python | `ddeutil-workflow` ||
139
+ | FastAPI Server | `ddeutil-workflow[all]` | ✅ |
149
140
 
150
141
  ## 🎯 Usage
151
142
 
@@ -300,40 +291,27 @@ it will use default value and do not raise any error to you.
300
291
  ## :rocket: Deployment
301
292
 
302
293
  This package able to run as an application service for receive manual trigger
303
- from any node via RestAPI or use to be Scheduler background application
304
- like crontab job but via Python API or FastAPI app.
294
+ from any node via RestAPI with the FastAPI package.
305
295
 
306
296
  ### API Server
307
297
 
308
298
  This server use FastAPI package to be the base application.
309
299
 
310
300
  ```shell
311
- (.venv) $ uvicorn ddeutil.workflow.api:app \
312
- --host 127.0.0.1 \
313
- --port 80 \
314
- --no-access-log
301
+ (.venv) $ workflow-cli api --host 127.0.0.1 --port 80
315
302
  ```
316
303
 
317
304
  > [!NOTE]
318
305
  > If this package already deploy, it is able to use multiprocess;
319
- > `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
320
-
321
- ### Local Schedule
322
-
323
- > [!WARNING]
324
- > This CLI does not implement yet.
325
-
326
- ```shell
327
- (.venv) $ ddeutil-workflow schedule
328
- ```
306
+ > `$ workflow-cli api --host 127.0.0.1 --port 80 --workers 4`
329
307
 
330
308
  ### Docker Container
331
309
 
332
310
  Build a Docker container from this package.
333
311
 
334
312
  ```shell
335
- $ docker build -t ddeutil-workflow:latest -f .container/Dockerfile .
336
- $ docker run -i ddeutil-workflow:latest ddeutil-workflow
313
+ $ docker pull ghcr.io/ddeutils/ddeutil-workflow:latest
314
+ $ docker run --rm ghcr.io/ddeutils/ddeutil-workflow:latest ddeutil-worker
337
315
  ```
338
316
 
339
317
  ## :speech_balloon: Contribute
@@ -1,28 +1,29 @@
1
- ddeutil/workflow/__about__.py,sha256=HQ_82TqpLyt4QUTSUNDK94y4NkrRcrX9-AKR-fDwuWU,28
1
+ ddeutil/workflow/__about__.py,sha256=JZ9Er-4hkPGd0SSb_wI8VFJvPCjm8q09g7oG_MshBMo,28
2
2
  ddeutil/workflow/__cron.py,sha256=BOKQcreiex0SAigrK1gnLxpvOeF3aca_rQwyz9Kfve4,28751
3
3
  ddeutil/workflow/__init__.py,sha256=JfFZlPRDgR2J0rb0SRejt1OSrOrD3GGv9Um14z8MMfs,901
4
- ddeutil/workflow/__main__.py,sha256=x-sYedl4T8p6054aySk-EQX6vhytvPR0HvaBNYxMzp0,364
4
+ ddeutil/workflow/__main__.py,sha256=Qd-f8z2Q2vpiEP2x6PBFsJrpACWDVxFKQk820MhFmHo,59
5
5
  ddeutil/workflow/__types.py,sha256=uNfoRbVmNK5O37UUMVnqcmoghD9oMS1q9fXC0APnjSI,4584
6
- ddeutil/workflow/conf.py,sha256=VKtnD-Duuf_tPOCUfM6oa86-CrqeCj8kiQbzPLPUXx0,14807
7
- ddeutil/workflow/errors.py,sha256=evZYwNrvAvY1jpCqqZFkBCdaZ7GN-JbYJMYL6tJmm-0,2980
6
+ ddeutil/workflow/cli.py,sha256=YtfNfozYRvQyohhYVcZ2_8o_IBXOpmok531eYw0DScM,1555
7
+ ddeutil/workflow/conf.py,sha256=w1WDWZDCvRVDSz2HnJxeqySzpYWSubJZjTVjXO9imK0,14669
8
+ ddeutil/workflow/errors.py,sha256=4DaKnyUm8RrUyQA5qakgW0ycSQLO7j-owyoh79LWQ5c,2893
8
9
  ddeutil/workflow/event.py,sha256=S2eJAZZx_V5TuQ0l417hFVCtjWXnfNPZBgSCICzxQ48,11041
9
- ddeutil/workflow/job.py,sha256=9toh8L9MlQqyy3U3WYierdA03ohf0LOhffaPIOY2IYU,39126
10
- ddeutil/workflow/logs.py,sha256=iVtyl8i69y7t07tAuWkihc54WlkHCcBy_Ur0WtzJ_lM,31367
10
+ ddeutil/workflow/job.py,sha256=qcbKSOa39256nfJHL0vKJsHrelcRujX5KET2IEGS8dw,38995
11
+ ddeutil/workflow/logs.py,sha256=4rL8TsRJsYVqyPfLjFW5bSoWtRwUgwmaRONu7nnVxQ8,31374
11
12
  ddeutil/workflow/params.py,sha256=Pco3DyjptC5Jkx53dhLL9xlIQdJvNAZs4FLzMUfXpbQ,12402
12
- ddeutil/workflow/result.py,sha256=aUMIXw2nYbCDfFZqj9ABr_b7ZLo1GftTaaW8ATh618g,7855
13
+ ddeutil/workflow/result.py,sha256=GU84psZFiJ4LRf_HXgz-R98YN4lOUkER0VR7x9DDdOU,7922
13
14
  ddeutil/workflow/reusables.py,sha256=jPrOCbxagqRvRFGXJzIyDa1wKV5AZ4crZyJ10cldQP0,21620
14
- ddeutil/workflow/stages.py,sha256=rorKBjdyUAxALtelNJrvc5plJp1WCV35NMkii3XMw2A,102094
15
+ ddeutil/workflow/stages.py,sha256=xsJactN-Qk5Yg7ooXfoq-JVdlduIAdXXJUzCKFJuWGA,105093
15
16
  ddeutil/workflow/utils.py,sha256=slhBbsBNl0yaSk9EOiCK6UL-o7smgHVsLT7svRqAWXU,10436
16
- ddeutil/workflow/workflow.py,sha256=YP1st2y3YCUscsuFpjf3fQgYMOnQbBhiY0s6PW1Lpng,27637
17
+ ddeutil/workflow/workflow.py,sha256=AcSGqsH1N4LqWhYIcCPy9CoV_AGlXUrBgjpl-gniv6g,28267
17
18
  ddeutil/workflow/api/__init__.py,sha256=0UIilYwW29RL6HrCRHACSWvnATJVLSJzXiCMny0bHQk,2627
18
19
  ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
19
20
  ddeutil/workflow/api/routes/__init__.py,sha256=jC1pM7q4_eo45IyO3hQbbe6RnL9B8ibRq_K6aCMP6Ag,434
20
21
  ddeutil/workflow/api/routes/job.py,sha256=32TkNm7QY9gt6fxIqEPjDqPgc8XqDiMPjUb7disSrCw,2143
21
22
  ddeutil/workflow/api/routes/logs.py,sha256=QJH8IF102897WLfCJ29-1g15wl29M9Yq6omroZfbahs,5305
22
23
  ddeutil/workflow/api/routes/workflows.py,sha256=Gmg3e-K5rfi95pbRtWI_aIr5C089sIde_vefZVvh3U0,4420
23
- ddeutil_workflow-0.0.65.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
24
- ddeutil_workflow-0.0.65.dist-info/METADATA,sha256=ujjqpMK3NkLSGMjPJdg30Cs3oGMJB8pBQd-eGWllYX4,16685
25
- ddeutil_workflow-0.0.65.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
26
- ddeutil_workflow-0.0.65.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
27
- ddeutil_workflow-0.0.65.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
28
- ddeutil_workflow-0.0.65.dist-info/RECORD,,
24
+ ddeutil_workflow-0.0.67.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
25
+ ddeutil_workflow-0.0.67.dist-info/METADATA,sha256=w9iP1ofTfKIdirH9WSZf5rMOA4MrqMKM5jJk1hFO3oU,16072
26
+ ddeutil_workflow-0.0.67.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
27
+ ddeutil_workflow-0.0.67.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
28
+ ddeutil_workflow-0.0.67.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
29
+ ddeutil_workflow-0.0.67.dist-info/RECORD,,