ddeutil-workflow 0.0.38__py3-none-any.whl → 0.0.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.38"
1
+ __version__: str = "0.0.39"
@@ -61,8 +61,10 @@ class Re:
61
61
  # Regular expression:
62
62
  # - Version 1:
63
63
  # \${{\s*(?P<caller>[a-zA-Z0-9_.\s'\"\[\]\(\)\-\{}]+?)\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
64
+ #
64
65
  # - Version 2: (2024-09-30):
65
66
  # \${{\s*(?P<caller>(?P<caller_prefix>(?:[a-zA-Z_-]+\.)*)(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
67
+ #
66
68
  # - Version 3: (2024-10-05):
67
69
  # \${{\s*(?P<caller>(?P<caller_prefix>(?:[a-zA-Z_-]+\??\.)*)(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+\??))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
68
70
  #
ddeutil/workflow/audit.py CHANGED
@@ -20,7 +20,7 @@ from typing_extensions import Self
20
20
 
21
21
  from .__types import DictData, TupleStr
22
22
  from .conf import config
23
- from .logs import TraceLog, get_trace
23
+ from .logs import TraceLog, get_dt_tznow, get_trace
24
24
 
25
25
  __all__: TupleStr = (
26
26
  "get_audit",
@@ -43,10 +43,12 @@ class BaseAudit(BaseModel, ABC):
43
43
  default_factory=dict,
44
44
  description="A context that receive from a workflow execution result.",
45
45
  )
46
- parent_run_id: Optional[str] = Field(default=None)
47
- run_id: str
48
- update: datetime = Field(default_factory=datetime.now)
49
- execution_time: float = Field(default=0)
46
+ parent_run_id: Optional[str] = Field(
47
+ default=None, description="A parent running ID."
48
+ )
49
+ run_id: str = Field(description="A running ID")
50
+ update: datetime = Field(default_factory=get_dt_tznow)
51
+ execution_time: float = Field(default=0, description="An execution time.")
50
52
 
51
53
  @model_validator(mode="after")
52
54
  def __model_action(self) -> Self:
@@ -48,9 +48,10 @@ def tag(
48
48
  """Tag decorator function that set function attributes, ``tag`` and ``name``
49
49
  for making registries variable.
50
50
 
51
- :param: name: A tag name for make different use-case of a function.
52
- :param: alias: A alias function name that keeping in registries. If this
53
- value does not supply, it will use original function name from __name__.
51
+ :param: name: (str) A tag name for make different use-case of a function.
52
+ :param: alias: (str) A alias function name that keeping in registries.
53
+ If this value does not supply, it will use original function name
54
+ from `__name__` argument.
54
55
 
55
56
  :rtype: Callable[P, TagFunc]
56
57
  """
@@ -78,7 +79,7 @@ Registry = dict[str, Callable[[], TagFunc]]
78
79
  def make_registry(submodule: str) -> dict[str, Registry]:
79
80
  """Return registries of all functions that able to called with task.
80
81
 
81
- :param submodule: A module prefix that want to import registry.
82
+ :param submodule: (str) A module prefix that want to import registry.
82
83
 
83
84
  :rtype: dict[str, Registry]
84
85
  """
@@ -134,12 +135,7 @@ def extract_call(call: str) -> Callable[[], TagFunc]:
134
135
  """Extract Call function from string value to call partial function that
135
136
  does run it at runtime.
136
137
 
137
- :raise NotImplementedError: When the searching call's function result does
138
- not exist in the registry.
139
- :raise NotImplementedError: When the searching call's tag result does not
140
- exist in the registry with its function key.
141
-
142
- :param call: A call value that able to match with Task regex.
138
+ :param call: (str) A call value that able to match with Task regex.
143
139
 
144
140
  The format of call value should contain 3 regular expression groups
145
141
  which match with the below config format:
@@ -152,6 +148,11 @@ def extract_call(call: str) -> Callable[[], TagFunc]:
152
148
  >>> extract_call("tasks/return-type-not-valid@raise")
153
149
  ...
154
150
 
151
+ :raise NotImplementedError: When the searching call's function result does
152
+ not exist in the registry.
153
+ :raise NotImplementedError: When the searching call's tag result does not
154
+ exist in the registry with its function key.
155
+
155
156
  :rtype: Callable[[], TagFunc]
156
157
  """
157
158
  if not (found := Re.RE_TASK_FMT.search(call)):
@@ -18,6 +18,7 @@ class ErrorContext(BaseModel): # pragma: no cov
18
18
  class OutputContext(BaseModel): # pragma: no cov
19
19
  outputs: DictData = Field(default_factory=dict)
20
20
  errors: Optional[ErrorContext] = Field(default=None)
21
+ skipped: bool = Field(default=False)
21
22
 
22
23
  def is_exception(self) -> bool:
23
24
  return self.errors is not None
@@ -57,3 +58,4 @@ class JobContext(BaseModel): # pragma: no cov
57
58
  params: DictData = Field(description="A parameterize value")
58
59
  jobs: dict[str, StrategyMatrixContext]
59
60
  errors: Optional[ErrorContext] = Field(default=None)
61
+ skipped: bool = Field(default=False)
ddeutil/workflow/job.py CHANGED
@@ -40,12 +40,8 @@ from .exceptions import (
40
40
  )
41
41
  from .result import Result, Status
42
42
  from .stages import Stage
43
- from .templates import has_template
44
- from .utils import (
45
- cross_product,
46
- filter_func,
47
- gen_id,
48
- )
43
+ from .templates import has_template, param2template
44
+ from .utils import cross_product, filter_func, gen_id
49
45
 
50
46
  MatrixFilter = list[dict[str, Union[str, int]]]
51
47
 
@@ -54,6 +50,7 @@ __all__: TupleStr = (
54
50
  "Strategy",
55
51
  "Job",
56
52
  "TriggerRules",
53
+ "TriggerState",
57
54
  "RunsOn",
58
55
  "RunsOnLocal",
59
56
  "RunsOnSelfHosted",
@@ -208,6 +205,16 @@ class TriggerRules(str, Enum):
208
205
  none_skipped: str = "none_skipped"
209
206
 
210
207
 
208
+ class TriggerState(str, Enum):
209
+ waiting: str = "waiting"
210
+ passed: str = "passed"
211
+ skipped: str = "skipped"
212
+ failed: str = "failed"
213
+
214
+ def is_waiting(self):
215
+ return self.value == "waiting"
216
+
217
+
211
218
  class RunsOnType(str, Enum):
212
219
  """Runs-On enum object."""
213
220
 
@@ -312,13 +319,21 @@ class Job(BaseModel):
312
319
  description="A target node for this job to use for execution.",
313
320
  alias="runs-on",
314
321
  )
322
+ condition: Optional[str] = Field(
323
+ default=None,
324
+ description="A job condition statement to allow job executable.",
325
+ alias="if",
326
+ )
315
327
  stages: list[Stage] = Field(
316
328
  default_factory=list,
317
329
  description="A list of Stage of this job.",
318
330
  )
319
331
  trigger_rule: TriggerRules = Field(
320
332
  default=TriggerRules.all_success,
321
- description="A trigger rule of tracking needed jobs.",
333
+ description=(
334
+ "A trigger rule of tracking needed jobs if feature will use when "
335
+ "the `raise_error` did not set from job and stage executions."
336
+ ),
322
337
  alias="trigger-rule",
323
338
  )
324
339
  needs: list[str] = Field(
@@ -382,12 +397,87 @@ class Job(BaseModel):
382
397
  return stage
383
398
  raise ValueError(f"Stage ID {stage_id} does not exists")
384
399
 
385
- def check_needs(self, jobs: dict[str, Any]) -> bool:
400
+ def check_needs(
401
+ self, jobs: dict[str, Any]
402
+ ) -> TriggerState: # pragma: no cov
386
403
  """Return True if job's need exists in an input list of job's ID.
387
404
 
405
+ :param jobs: A mapping of job model and its ID.
406
+
407
+ :rtype: TriggerState
408
+ """
409
+ if not self.needs:
410
+ return TriggerState.passed
411
+
412
+ def make_return(result: bool) -> TriggerState:
413
+ return TriggerState.passed if result else TriggerState.failed
414
+
415
+ need_exist: dict[str, Any] = {
416
+ need: jobs[need] for need in self.needs if need in jobs
417
+ }
418
+ if len(need_exist) != len(self.needs):
419
+ return TriggerState.waiting
420
+ elif all("skipped" in need_exist[job] for job in need_exist):
421
+ return TriggerState.skipped
422
+ elif self.trigger_rule == TriggerRules.all_done:
423
+ return TriggerState.passed
424
+ elif self.trigger_rule == TriggerRules.all_success:
425
+ rs = all(
426
+ k not in need_exist[job]
427
+ for k in ("errors", "skipped")
428
+ for job in need_exist
429
+ )
430
+ elif self.trigger_rule == TriggerRules.all_failed:
431
+ rs = all("errors" in need_exist[job] for job in need_exist)
432
+ elif self.trigger_rule == TriggerRules.one_success:
433
+ rs = sum(
434
+ k not in need_exist[job]
435
+ for k in ("errors", "skipped")
436
+ for job in need_exist
437
+ ) + 1 == len(self.needs)
438
+ elif self.trigger_rule == TriggerRules.one_failed:
439
+ rs = sum("errors" in need_exist[job] for job in need_exist) == 1
440
+ elif self.trigger_rule == TriggerRules.none_skipped:
441
+ rs = all("skipped" not in need_exist[job] for job in need_exist)
442
+ elif self.trigger_rule == TriggerRules.none_failed:
443
+ rs = all("errors" not in need_exist[job] for job in need_exist)
444
+ else: # pragma: no cov
445
+ raise NotImplementedError(
446
+ f"Trigger rule: {self.trigger_rule} does not support yet."
447
+ )
448
+ return make_return(rs)
449
+
450
+ def is_skipped(self, params: DictData | None = None) -> bool:
451
+ """Return true if condition of this job do not correct. This process
452
+ use build-in eval function to execute the if-condition.
453
+
454
+ :raise JobException: When it has any error raise from the eval
455
+ condition statement.
456
+ :raise JobException: When return type of the eval condition statement
457
+ does not return with boolean type.
458
+
459
+ :param params: (DictData) A parameters that want to pass to condition
460
+ template.
461
+
388
462
  :rtype: bool
389
463
  """
390
- return all(need in jobs for need in self.needs)
464
+ if self.condition is None:
465
+ return False
466
+
467
+ params: DictData = {} if params is None else params
468
+
469
+ try:
470
+ # WARNING: The eval build-in function is very dangerous. So, it
471
+ # should use the `re` module to validate eval-string before
472
+ # running.
473
+ rs: bool = eval(
474
+ param2template(self.condition, params), globals() | params, {}
475
+ )
476
+ if not isinstance(rs, bool):
477
+ raise TypeError("Return type of condition does not be boolean")
478
+ return not rs
479
+ except Exception as err:
480
+ raise JobException(f"{err.__class__.__name__}: {err}") from err
391
481
 
392
482
  def set_outputs(self, output: DictData, to: DictData) -> DictData:
393
483
  """Set an outputs from execution process to the received context. The
@@ -436,7 +526,9 @@ class Job(BaseModel):
436
526
  {"errors": output.pop("errors", {})} if "errors" in output else {}
437
527
  )
438
528
 
439
- if self.strategy.is_set():
529
+ if "SKIP" in output: # pragma: no cov
530
+ to["jobs"][_id] = output["SKIP"]
531
+ elif self.strategy.is_set():
440
532
  to["jobs"][_id] = {"strategies": output, **errors}
441
533
  else:
442
534
  _output = output.get(next(iter(output), "FIRST"), {})
@@ -458,8 +550,8 @@ class Job(BaseModel):
458
550
  multithread on this metrics to the `stages` field of this job.
459
551
 
460
552
  :param params: An input parameters that use on job execution.
461
- :param run_id: A job running ID for this execution.
462
- :param parent_run_id: A parent workflow running ID for this release.
553
+ :param run_id: (str) A job running ID.
554
+ :param parent_run_id: (str) A parent workflow running ID.
463
555
  :param result: (Result) A result object for keeping context and status
464
556
  data.
465
557
  :param event: (Event) An event manager that pass to the
@@ -559,6 +651,7 @@ def local_execute_strategy(
559
651
 
560
652
  if stage.is_skipped(params=context):
561
653
  result.trace.info(f"[STAGE]: Skip stage: {stage.iden!r}")
654
+ stage.set_outputs(output={"skipped": True}, to=context)
562
655
  continue
563
656
 
564
657
  if event and event.is_set():
@@ -623,9 +716,6 @@ def local_execute_strategy(
623
716
  },
624
717
  )
625
718
 
626
- # NOTE: Remove the current stage object for saving memory.
627
- del stage
628
-
629
719
  return result.catch(
630
720
  status=Status.SUCCESS,
631
721
  context={
@@ -680,7 +770,17 @@ def local_execute(
680
770
 
681
771
  for strategy in job.strategy.make():
682
772
 
683
- # TODO: stop and raise error if the event was set.
773
+ if event and event.is_set(): # pragma: no cov
774
+ return result.catch(
775
+ status=Status.FAILED,
776
+ context={
777
+ "errors": JobException(
778
+ "Job strategy was canceled from event that had set "
779
+ "before strategy execution."
780
+ ).to_dict()
781
+ },
782
+ )
783
+
684
784
  local_execute_strategy(
685
785
  job=job,
686
786
  strategy=strategy,
@@ -694,12 +794,22 @@ def local_execute(
694
794
 
695
795
  fail_fast_flag: bool = job.strategy.fail_fast
696
796
  ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
697
-
698
797
  result.trace.info(
699
798
  f"[JOB]: Start multithreading: {job.strategy.max_parallel} threads "
700
799
  f"with {ls} mode."
701
800
  )
702
801
 
802
+ if event and event.is_set(): # pragma: no cov
803
+ return result.catch(
804
+ status=Status.FAILED,
805
+ context={
806
+ "errors": JobException(
807
+ "Job strategy was canceled from event that had set "
808
+ "before strategy execution."
809
+ ).to_dict()
810
+ },
811
+ )
812
+
703
813
  # IMPORTANT: Start running strategy execution by multithreading because
704
814
  # it will run by strategy values without waiting previous execution.
705
815
  with ThreadPoolExecutor(
@@ -41,7 +41,7 @@ from inspect import Parameter
41
41
  from pathlib import Path
42
42
  from subprocess import CompletedProcess
43
43
  from textwrap import dedent
44
- from typing import Optional, Union
44
+ from typing import Annotated, Optional, Union
45
45
 
46
46
  from pydantic import BaseModel, Field
47
47
  from pydantic.functional_validators import model_validator
@@ -230,7 +230,10 @@ class BaseStage(BaseModel, ABC):
230
230
 
231
231
  ... (iii) to: {
232
232
  'stages': {
233
- '<stage-id>': {'outputs': {'foo': 'bar'}}
233
+ '<stage-id>': {
234
+ 'outputs': {'foo': 'bar'},
235
+ 'skipped': False
236
+ }
234
237
  }
235
238
  }
236
239
 
@@ -255,8 +258,12 @@ class BaseStage(BaseModel, ABC):
255
258
  errors: DictData = (
256
259
  {"errors": output.pop("errors", {})} if "errors" in output else {}
257
260
  )
258
-
259
- to["stages"][_id] = {"outputs": output, **errors}
261
+ skipping: dict[str, bool] = (
262
+ {"skipped": output.pop("skipped", False)}
263
+ if "skipped" in output
264
+ else {}
265
+ )
266
+ to["stages"][_id] = {"outputs": output, **skipping, **errors}
260
267
  return to
261
268
 
262
269
  def is_skipped(self, params: DictData | None = None) -> bool:
@@ -539,19 +546,11 @@ class PyStage(BaseStage):
539
546
 
540
547
  :rtype: DictData
541
548
  """
542
- # NOTE: The output will fileter unnecessary keys from locals.
543
- lc: DictData = output.get("locals", {})
549
+ lc: DictData = output.pop("locals", {})
550
+ gb: DictData = output.pop("globals", {})
544
551
  super().set_outputs(
545
- (
546
- {k: lc[k] for k in self.filter_locals(lc)}
547
- | ({"errors": output["errors"]} if "errors" in output else {})
548
- ),
549
- to=to,
552
+ {k: lc[k] for k in self.filter_locals(lc)} | output, to=to
550
553
  )
551
-
552
- # NOTE: Override value that changing from the globals that pass via the
553
- # exec function.
554
- gb: DictData = output.get("globals", {})
555
554
  to.update({k: gb[k] for k in to if k in gb})
556
555
  return to
557
556
 
@@ -572,17 +571,13 @@ class PyStage(BaseStage):
572
571
  run_id=gen_id(self.name + (self.id or ""), unique=True)
573
572
  )
574
573
 
575
- # NOTE: Replace the run statement that has templating value.
576
- run: str = param2template(dedent(self.run), params)
577
-
578
- # NOTE: create custom globals value that will pass to exec function.
579
- _globals: DictData = (
574
+ lc: DictData = {}
575
+ gb: DictData = (
580
576
  globals()
581
577
  | params
582
578
  | param2template(self.vars, params)
583
579
  | {"result": result}
584
580
  )
585
- lc: DictData = {}
586
581
 
587
582
  # NOTE: Start exec the run statement.
588
583
  result.trace.info(f"[STAGE]: Py-Execute: {self.name}")
@@ -591,14 +586,12 @@ class PyStage(BaseStage):
591
586
  "check your statement be safe before execute."
592
587
  )
593
588
 
594
- # TODO: Add Python systax wrapper for checking dangerous code before run
595
- # this statement.
596
589
  # WARNING: The exec build-in function is very dangerous. So, it
597
590
  # should use the re module to validate exec-string before running.
598
- exec(run, _globals, lc)
591
+ exec(param2template(dedent(self.run), params), gb, lc)
599
592
 
600
593
  return result.catch(
601
- status=Status.SUCCESS, context={"locals": lc, "globals": _globals}
594
+ status=Status.SUCCESS, context={"locals": lc, "globals": gb}
602
595
  )
603
596
 
604
597
 
@@ -795,7 +788,9 @@ class ParallelStage(BaseStage): # pragma: no cov
795
788
  ... }
796
789
  """
797
790
 
798
- parallel: dict[str, list[Stage]] = Field()
791
+ parallel: dict[str, list[Stage]] = Field(
792
+ description="A mapping of parallel branch ID.",
793
+ )
799
794
  max_parallel_core: int = Field(default=2)
800
795
 
801
796
  @staticmethod
@@ -807,9 +802,10 @@ class ParallelStage(BaseStage): # pragma: no cov
807
802
  ) -> DictData:
808
803
  """Task execution method for passing a branch to each thread.
809
804
 
810
- :param branch:
811
- :param params:
812
- :param result:
805
+ :param branch: A branch ID.
806
+ :param params: A parameter data that want to use in this execution.
807
+ :param result: (Result) A result object for keeping context and status
808
+ data.
813
809
  :param stages:
814
810
 
815
811
  :rtype: DictData
@@ -1008,7 +1004,7 @@ class IfStage(BaseStage): # pragma: no cov
1008
1004
 
1009
1005
  """
1010
1006
 
1011
- case: str
1007
+ case: str = Field(description="A case condition for routing.")
1012
1008
  match: list[dict[str, Union[str, Stage]]]
1013
1009
 
1014
1010
  def execute(
@@ -1016,6 +1012,18 @@ class IfStage(BaseStage): # pragma: no cov
1016
1012
  ) -> Result: ...
1017
1013
 
1018
1014
 
1015
+ class RaiseStage(BaseStage): # pragma: no cov
1016
+ message: str = Field(
1017
+ description="An error message that want to raise",
1018
+ alias="raise",
1019
+ )
1020
+
1021
+ def execute(
1022
+ self, params: DictData, *, result: Result | None = None
1023
+ ) -> Result:
1024
+ raise StageException(self.message)
1025
+
1026
+
1019
1027
  # TODO: Not implement this stages yet
1020
1028
  class HookStage(BaseStage): # pragma: no cov
1021
1029
  hook: str
@@ -1050,6 +1058,11 @@ class VirtualPyStage(PyStage): # pragma: no cov
1050
1058
 
1051
1059
  def create_py_file(self, py: str, run_id: str | None): ...
1052
1060
 
1061
+ def execute(
1062
+ self, params: DictData, *, result: Result | None = None
1063
+ ) -> Result:
1064
+ return super().execute(params, result=result)
1065
+
1053
1066
 
1054
1067
  # TODO: Not implement this stages yet
1055
1068
  class SensorStage(BaseStage): # pragma: no cov
@@ -1064,12 +1077,16 @@ class SensorStage(BaseStage): # pragma: no cov
1064
1077
  # From the current build-in stages, they do not have stage that have the same
1065
1078
  # fields that because of parsing on the Job's stages key.
1066
1079
  #
1067
- Stage = Union[
1068
- EmptyStage,
1069
- BashStage,
1070
- CallStage,
1071
- TriggerStage,
1072
- ForEachStage,
1073
- ParallelStage,
1074
- PyStage,
1080
+ Stage = Annotated[
1081
+ Union[
1082
+ EmptyStage,
1083
+ BashStage,
1084
+ CallStage,
1085
+ TriggerStage,
1086
+ ForEachStage,
1087
+ ParallelStage,
1088
+ PyStage,
1089
+ RaiseStage,
1090
+ ],
1091
+ Field(union_mode="smart"),
1075
1092
  ]
@@ -18,7 +18,7 @@ try:
18
18
  except ImportError:
19
19
  from typing_extensions import ParamSpec
20
20
 
21
- from ddeutil.core import getdot, hasdot, import_string
21
+ from ddeutil.core import getdot, import_string
22
22
  from ddeutil.io import search_env_replace
23
23
 
24
24
  from .__types import DictData, Re
@@ -59,7 +59,8 @@ def custom_filter(name: str) -> Callable[P, FilterFunc]:
59
59
  """Custom filter decorator function that set function attributes, ``filter``
60
60
  for making filter registries variable.
61
61
 
62
- :param: name: A filter name for make different use-case of a function.
62
+ :param: name: (str) A filter name for make different use-case of a function.
63
+
63
64
  :rtype: Callable[P, FilterFunc]
64
65
  """
65
66
 
@@ -108,7 +109,7 @@ def get_args_const(
108
109
  ) -> tuple[str, list[Constant], dict[str, Constant]]:
109
110
  """Get arguments and keyword-arguments from function calling string.
110
111
 
111
- :param expr: An expr string value.
112
+ :param expr: (str) An expr string value.
112
113
 
113
114
  :rtype: tuple[str, list[Constant], dict[str, Constant]]
114
115
  """
@@ -154,7 +155,7 @@ def get_args_from_filter(
154
155
  and validate it with the filter functions mapping dict.
155
156
 
156
157
  :param ft:
157
- :param filters:
158
+ :param filters: A mapping of filter registry.
158
159
 
159
160
  :rtype: tuple[str, FilterRegistry, list[Any], dict[Any, Any]]
160
161
  """
@@ -185,7 +186,7 @@ def map_post_filter(
185
186
 
186
187
  :param value: A string value that want to map with filter function.
187
188
  :param post_filter: A list of post-filter function name.
188
- :param filters: A filter registry.
189
+ :param filters: A mapping of filter registry.
189
190
 
190
191
  :rtype: T
191
192
  """
@@ -203,8 +204,8 @@ def map_post_filter(
203
204
  except Exception as err:
204
205
  logger.warning(str(err))
205
206
  raise UtilException(
206
- f"The post-filter function: {func_name} does not fit with "
207
- f"{value} (type: {type(value).__name__})."
207
+ f"The post-filter: {func_name!r} does not fit with {value!r} "
208
+ f"(type: {type(value).__name__})."
208
209
  ) from None
209
210
  return value
210
211
 
@@ -258,10 +259,10 @@ def str2template(
258
259
  with the workflow parameter types that is `str`, `int`, `datetime`, and
259
260
  `list`.
260
261
 
261
- :param value: A string value that want to map with params
262
- :param params: A parameter value that getting with matched regular
263
- expression.
264
- :param filters:
262
+ :param value: (str) A string value that want to map with params.
263
+ :param params: (DictData) A parameter value that getting with matched
264
+ regular expression.
265
+ :param filters: A mapping of filter registry.
265
266
 
266
267
  :rtype: str
267
268
  """
@@ -281,11 +282,14 @@ def str2template(
281
282
  for i in (found.post_filters.strip().removeprefix("|").split("|"))
282
283
  if i != ""
283
284
  ]
284
- if not hasdot(caller, params):
285
- raise UtilException(f"The params does not set caller: {caller!r}.")
286
285
 
287
286
  # NOTE: from validate step, it guarantees that caller exists in params.
288
- getter: Any = getdot(caller, params)
287
+ try:
288
+ getter: Any = getdot(caller, params)
289
+ except ValueError as err:
290
+ raise UtilException(
291
+ f"Params does not set caller: {caller!r}."
292
+ ) from err
289
293
 
290
294
  # NOTE:
291
295
  # If type of getter caller is not string type, and it does not use to
@@ -301,25 +305,33 @@ def str2template(
301
305
 
302
306
  value: str = value.replace(found.full, getter, 1)
303
307
 
308
+ if value == "None":
309
+ return None
310
+
304
311
  return search_env_replace(value)
305
312
 
306
313
 
307
- def param2template(value: T, params: DictData) -> T:
314
+ def param2template(
315
+ value: T,
316
+ params: DictData,
317
+ filters: dict[str, FilterRegistry] | None = None,
318
+ ) -> T:
308
319
  """Pass param to template string that can search by ``RE_CALLER`` regular
309
320
  expression.
310
321
 
311
322
  :param value: A value that want to map with params
312
323
  :param params: A parameter value that getting with matched regular
313
324
  expression.
325
+ :param filters: A filter mapping for mapping with `map_post_filter` func.
314
326
 
315
327
  :rtype: T
316
328
  :returns: An any getter value from the params input.
317
329
  """
318
- filters: dict[str, FilterRegistry] = make_filter_registry()
330
+ filters: dict[str, FilterRegistry] = filters or make_filter_registry()
319
331
  if isinstance(value, dict):
320
- return {k: param2template(value[k], params) for k in value}
332
+ return {k: param2template(value[k], params, filters) for k in value}
321
333
  elif isinstance(value, (list, tuple, set)):
322
- return type(value)([param2template(i, params) for i in value])
334
+ return type(value)([param2template(i, params, filters) for i in value])
323
335
  elif not isinstance(value, str):
324
336
  return value
325
337
  return str2template(value, params, filters=filters)
@@ -329,8 +341,9 @@ def param2template(value: T, params: DictData) -> T:
329
341
  def datetime_format(value: datetime, fmt: str = "%Y-%m-%d %H:%M:%S") -> str:
330
342
  """Format datetime object to string with the format.
331
343
 
332
- :param value: A datetime value that want to format to string value.
333
- :param fmt: A format string pattern that passing to the `dt.strftime`
344
+ :param value: (datetime) A datetime value that want to format to string
345
+ value.
346
+ :param fmt: (str) A format string pattern that passing to the `dt.strftime`
334
347
  method.
335
348
 
336
349
  :rtype: str
@@ -340,3 +353,9 @@ def datetime_format(value: datetime, fmt: str = "%Y-%m-%d %H:%M:%S") -> str:
340
353
  raise UtilException(
341
354
  "This custom function should pass input value with datetime type."
342
355
  )
356
+
357
+
358
+ @custom_filter("coalesce") # pragma: no cov
359
+ def coalesce(value: T | None, default: Any) -> T:
360
+ """Coalesce with default value if the main value is None."""
361
+ return default if value is None else value
@@ -34,7 +34,7 @@ from .audit import Audit, get_audit
34
34
  from .conf import Loader, config, get_logger
35
35
  from .cron import On
36
36
  from .exceptions import JobException, WorkflowException
37
- from .job import Job
37
+ from .job import Job, TriggerState
38
38
  from .params import Param
39
39
  from .result import Result, Status
40
40
  from .templates import has_template, param2template
@@ -882,8 +882,6 @@ class Workflow(BaseModel):
882
882
  f"workflow."
883
883
  )
884
884
 
885
- result.trace.info(f"[WORKFLOW]: Start execute job: {job_id!r}")
886
-
887
885
  if event and event.is_set(): # pragma: no cov
888
886
  raise WorkflowException(
889
887
  "Workflow job was canceled from event that had set before "
@@ -898,15 +896,20 @@ class Workflow(BaseModel):
898
896
  #
899
897
  try:
900
898
  job: Job = self.jobs[job_id]
901
- job.set_outputs(
902
- job.execute(
903
- params=params,
904
- run_id=result.run_id,
905
- parent_run_id=result.parent_run_id,
906
- event=event,
907
- ).context,
908
- to=params,
909
- )
899
+ if job.is_skipped(params=params):
900
+ result.trace.info(f"[JOB]: Skip job: {job_id!r}")
901
+ job.set_outputs(output={"SKIP": {"skipped": True}}, to=params)
902
+ else:
903
+ result.trace.info(f"[JOB]: Start execute job: {job_id!r}")
904
+ job.set_outputs(
905
+ job.execute(
906
+ params=params,
907
+ run_id=result.run_id,
908
+ parent_run_id=result.parent_run_id,
909
+ event=event,
910
+ ).context,
911
+ to=params,
912
+ )
910
913
  except JobException as err:
911
914
  result.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
912
915
  if raise_error:
@@ -1054,11 +1057,20 @@ class Workflow(BaseModel):
1054
1057
  job_id: str = job_queue.get()
1055
1058
  job: Job = self.jobs[job_id]
1056
1059
 
1057
- if not job.check_needs(context["jobs"]):
1060
+ if (check := job.check_needs(context["jobs"])).is_waiting():
1058
1061
  job_queue.task_done()
1059
1062
  job_queue.put(job_id)
1060
1063
  time.sleep(0.15)
1061
1064
  continue
1065
+ elif check == TriggerState.failed: # pragma: no cov
1066
+ raise WorkflowException(
1067
+ "Check job trigger rule was failed."
1068
+ )
1069
+ elif check == TriggerState.skipped: # pragma: no cov
1070
+ result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1071
+ job.set_outputs({"SKIP": {"skipped": True}}, to=context)
1072
+ job_queue.task_done()
1073
+ continue
1062
1074
 
1063
1075
  # NOTE: Start workflow job execution with deep copy context data
1064
1076
  # before release.
@@ -1149,11 +1161,18 @@ class Workflow(BaseModel):
1149
1161
  job: Job = self.jobs[job_id]
1150
1162
 
1151
1163
  # NOTE: Waiting dependency job run successful before release.
1152
- if not job.check_needs(context["jobs"]):
1164
+ if (check := job.check_needs(context["jobs"])).is_waiting():
1153
1165
  job_queue.task_done()
1154
1166
  job_queue.put(job_id)
1155
1167
  time.sleep(0.075)
1156
1168
  continue
1169
+ elif check == TriggerState.failed: # pragma: no cov
1170
+ raise WorkflowException("Check job trigger rule was failed.")
1171
+ elif check == TriggerState.skipped: # pragma: no cov
1172
+ result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1173
+ job.set_outputs({"SKIP": {"skipped": True}}, to=context)
1174
+ job_queue.task_done()
1175
+ continue
1157
1176
 
1158
1177
  # NOTE: Start workflow job execution with deep copy context data
1159
1178
  # before release. This job execution process will run until
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.38
3
+ Version: 0.0.39
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -130,7 +130,7 @@ flowchart LR
130
130
  > - [Google **Workflows**](https://cloud.google.com/workflows)
131
131
  > - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
132
132
 
133
- ## :round_pushpin: Installation
133
+ ## 📦 Installation
134
134
 
135
135
  This project need `ddeutil` and `ddeutil-io` extension namespace packages.
136
136
  If you want to install this package with application add-ons, you should add
@@ -1,22 +1,22 @@
1
- ddeutil/workflow/__about__.py,sha256=nUvuf0abbrBoZoHVyqcyvfYEilSx2oTLDwA7Az8m5UU,28
1
+ ddeutil/workflow/__about__.py,sha256=lWjP4jp8-3HcVlFUQBiNI9hwnZOyEL0S-RmG6USjiag,28
2
2
  ddeutil/workflow/__cron.py,sha256=3i-wmjTlh0ADCzN9pLKaWHzJkXzC72aIBmVEQSbyCCE,26895
3
3
  ddeutil/workflow/__init__.py,sha256=hIM2Ha-7F3YF3aLsu4IY3N-UvD_EE1ai6DO6WL2mILg,1908
4
- ddeutil/workflow/__types.py,sha256=CK1jfzyHP9P-MB0ElhpJZ59ZFGJC9MkQuAop5739_9k,4304
5
- ddeutil/workflow/audit.py,sha256=wx70RKRdHj1d2431ilpt9OPTInMByjqXkYff7l5pvF4,8230
6
- ddeutil/workflow/caller.py,sha256=oFjB9zpG93aeVd-T_4QMz5kk64lo-iyxaPmCo_1_AzU,5693
4
+ ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
5
+ ddeutil/workflow/audit.py,sha256=BpzLI6ZKXi6xQO8C2sAHxSi8RAfF31dMPdjn3DbYlw8,8364
6
+ ddeutil/workflow/caller.py,sha256=M7_nan1DbRUAHEIQoQc1qIX4AHgI5fKFNx0KDbzhDMk,5736
7
7
  ddeutil/workflow/conf.py,sha256=MHzBeLZukFeIQ-YhxOz5uKCnGYqbhYdpwAEh9A9h_OM,12216
8
- ddeutil/workflow/context.py,sha256=fdGUuQnsjDCDGibPHKMWc5PA3pArHpfv6Aff2nVvZ0U,1618
8
+ ddeutil/workflow/context.py,sha256=vsk4JQL7t3KsnKPfshw3O7YrPFo2h4rnnNd3B-G9Kj4,1700
9
9
  ddeutil/workflow/cron.py,sha256=j8EeoHst70toRfnD_frix41vrI-eLYVJkZ9yeJtpfnI,8871
10
10
  ddeutil/workflow/exceptions.py,sha256=fO37f9p7lOjIJgVOpKE_1X44yJTwBepyukZV9a7NNm4,1241
11
- ddeutil/workflow/job.py,sha256=3oDY-THXYMvmW0kKkHN2VMJyq0u0_4s9H0eX40Iiw6E,24267
11
+ ddeutil/workflow/job.py,sha256=LCQf_3gyTmnj6aAQ0ksz4lJxU10-F5MfVFHczkWt_VE,28669
12
12
  ddeutil/workflow/logs.py,sha256=mAKQjNri-oourd3dBLLG3Fqoo4QG27U9IO2h6IqCOU0,10196
13
13
  ddeutil/workflow/params.py,sha256=qw9XJyjh2ocf9pf6h_XiYHLOvQN4R5TMqPElmItKnRM,8019
14
14
  ddeutil/workflow/result.py,sha256=cDkItrhpzZfMS1Oj8IZX8O-KBD4KZYDi43XJZvvC3Gc,4318
15
15
  ddeutil/workflow/scheduler.py,sha256=YMebYpNjqg6RWaE17sicwM3uthupeBGSGCnDGy4aKd8,26286
16
- ddeutil/workflow/stages.py,sha256=aHMPOPxRiSo2jBFqQOlua01sQ18RMvjaKk2Dotnu7v4,36480
17
- ddeutil/workflow/templates.py,sha256=A0JgZFGkBv-AX-EskZj656nG5zFd3j1PpLpyXihf6Xg,10967
16
+ ddeutil/workflow/stages.py,sha256=ONBR7GjgLEbA21wNioBrhOucwqj6M1v2ht5JhipoWSg,36994
17
+ ddeutil/workflow/templates.py,sha256=yA2xgrSXcxfBxNT2hc6v06HkVY_0RKsc1UwdJRip9EE,11554
18
18
  ddeutil/workflow/utils.py,sha256=Fz5y-LK_JfikDfvMKcFaxad_VvCnr7UC2C9KFCbzPNA,7105
19
- ddeutil/workflow/workflow.py,sha256=v6LTbXZ34yWmddZquBxpahxCW1PFb3177q1T3Ssf5C0,46636
19
+ ddeutil/workflow/workflow.py,sha256=dbOnLx54KtPERuv5M_QU2nZiyiAnzv6pApX49rJi2eI,47833
20
20
  ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
21
21
  ddeutil/workflow/api/api.py,sha256=gGQtqkzyJNaJIfka_w2M1lrCS3Ep46re2Dznsk9RxYQ,5191
22
22
  ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
@@ -26,8 +26,8 @@ ddeutil/workflow/api/routes/job.py,sha256=YVta083i8vU8-o4WdKFwDpfdC9vN1dZ6goZSmN
26
26
  ddeutil/workflow/api/routes/logs.py,sha256=uEQ6k5PwRg-k0eSiSFArXfyeDq5xzepxILrRnwgAe1I,5373
27
27
  ddeutil/workflow/api/routes/schedules.py,sha256=uWYDOwlV8w56hKQmfkQFwdZ6t2gZSJeCdBIzMmJenAQ,4824
28
28
  ddeutil/workflow/api/routes/workflows.py,sha256=KVywA7vD9b4QrfmWBdSFF5chj34yJe1zNCzl6iBMeGI,4538
29
- ddeutil_workflow-0.0.38.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
30
- ddeutil_workflow-0.0.38.dist-info/METADATA,sha256=haHYX6Ui8EXTi2qJA5x_SOWSmVqmXRKsHLdgyMp7Two,19511
31
- ddeutil_workflow-0.0.38.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
32
- ddeutil_workflow-0.0.38.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
33
- ddeutil_workflow-0.0.38.dist-info/RECORD,,
29
+ ddeutil_workflow-0.0.39.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
30
+ ddeutil_workflow-0.0.39.dist-info/METADATA,sha256=r6e0p1K2dUtDOl5XJa-FqxtqgcRzJz7gZWcw9HuhWDc,19500
31
+ ddeutil_workflow-0.0.39.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
32
+ ddeutil_workflow-0.0.39.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
33
+ ddeutil_workflow-0.0.39.dist-info/RECORD,,