ddeutil-workflow 0.0.42__py3-none-any.whl → 0.0.43__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.42"
1
+ __version__: str = "0.0.43"
@@ -4,7 +4,7 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
- from .__types import Re
7
+ from .__types import DictData, DictStr, Matrix, Re, TupleStr
8
8
  from .conf import (
9
9
  Config,
10
10
  Loader,
@@ -47,6 +47,10 @@ from .params import (
47
47
  StrParam,
48
48
  )
49
49
  from .result import (
50
+ FAILED,
51
+ SKIP,
52
+ SUCCESS,
53
+ WAIT,
50
54
  Result,
51
55
  Status,
52
56
  )
@@ -9,10 +9,20 @@ annotate for handle error only.
9
9
  """
10
10
  from __future__ import annotations
11
11
 
12
- from typing import Any
12
+ from typing import TypedDict
13
13
 
14
+ ErrorData = TypedDict(
15
+ "ErrorData",
16
+ {
17
+ "class": Exception,
18
+ "name": str,
19
+ "message": str,
20
+ },
21
+ )
14
22
 
15
- def to_dict(exception: Exception) -> dict[str, Any]: # pragma: no cov
23
+
24
+ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
25
+ """Create dict data from exception instance."""
16
26
  return {
17
27
  "class": exception,
18
28
  "name": exception.__class__.__name__,
@@ -22,7 +32,7 @@ def to_dict(exception: Exception) -> dict[str, Any]: # pragma: no cov
22
32
 
23
33
  class BaseWorkflowException(Exception):
24
34
 
25
- def to_dict(self) -> dict[str, Any]:
35
+ def to_dict(self) -> ErrorData:
26
36
  return to_dict(self)
27
37
 
28
38
 
ddeutil/workflow/job.py CHANGED
@@ -39,7 +39,7 @@ from .exceptions import (
39
39
  StageException,
40
40
  UtilException,
41
41
  )
42
- from .result import Result, Status
42
+ from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
43
43
  from .reusables import has_template, param2template
44
44
  from .stages import Stage
45
45
  from .utils import cross_product, filter_func, gen_id
@@ -51,7 +51,6 @@ __all__: TupleStr = (
51
51
  "Strategy",
52
52
  "Job",
53
53
  "TriggerRules",
54
- "TriggerState",
55
54
  "RunsOn",
56
55
  "RunsOnLocal",
57
56
  "RunsOnSelfHosted",
@@ -206,16 +205,6 @@ class TriggerRules(str, Enum):
206
205
  none_skipped: str = "none_skipped"
207
206
 
208
207
 
209
- class TriggerState(str, Enum):
210
- waiting: str = "waiting"
211
- passed: str = "passed"
212
- skipped: str = "skipped"
213
- failed: str = "failed"
214
-
215
- def is_waiting(self):
216
- return self.value == "waiting"
217
-
218
-
219
208
  class RunsOnType(str, Enum):
220
209
  """Runs-On enum object."""
221
210
 
@@ -407,30 +396,31 @@ class Job(BaseModel):
407
396
  def check_needs(
408
397
  self,
409
398
  jobs: dict[str, Any],
410
- ) -> TriggerState: # pragma: no cov
411
- """Return True if job's need exists in an input list of job's ID.
399
+ ) -> Status: # pragma: no cov
400
+ """Return Status enum for checking job's need trigger logic in an
401
+ input list of job's ID.
412
402
 
413
403
  :param jobs: A mapping of job ID and result context.
414
404
 
415
405
  :raise NotImplementedError: If the job trigger rule out of scope.
416
406
 
417
- :rtype: TriggerState
407
+ :rtype: Status
418
408
  """
419
409
  if not self.needs:
420
- return TriggerState.passed
410
+ return SUCCESS
421
411
 
422
- def make_return(result: bool) -> TriggerState:
423
- return TriggerState.passed if result else TriggerState.failed
412
+ def make_return(result: bool) -> Status:
413
+ return SUCCESS if result else FAILED
424
414
 
425
415
  need_exist: dict[str, Any] = {
426
416
  need: jobs[need] for need in self.needs if need in jobs
427
417
  }
428
418
  if len(need_exist) != len(self.needs):
429
- return TriggerState.waiting
419
+ return WAIT
430
420
  elif all("skipped" in need_exist[job] for job in need_exist):
431
- return TriggerState.skipped
421
+ return SKIP
432
422
  elif self.trigger_rule == TriggerRules.all_done:
433
- return TriggerState.passed
423
+ return SUCCESS
434
424
  elif self.trigger_rule == TriggerRules.all_success:
435
425
  rs = all(
436
426
  k not in need_exist[job]
@@ -674,7 +664,7 @@ def local_execute_strategy(
674
664
  "strategy execution."
675
665
  )
676
666
  return result.catch(
677
- status=Status.FAILED,
667
+ status=FAILED,
678
668
  context={
679
669
  strategy_id: {
680
670
  "matrix": strategy,
@@ -724,7 +714,7 @@ def local_execute_strategy(
724
714
  ) from None
725
715
 
726
716
  return result.catch(
727
- status=Status.FAILED,
717
+ status=FAILED,
728
718
  context={
729
719
  strategy_id: {
730
720
  "matrix": strategy,
@@ -735,7 +725,7 @@ def local_execute_strategy(
735
725
  )
736
726
 
737
727
  return result.catch(
738
- status=Status.SUCCESS,
728
+ status=SUCCESS,
739
729
  context={
740
730
  strategy_id: {
741
731
  "matrix": strategy,
@@ -790,7 +780,7 @@ def local_execute(
790
780
 
791
781
  if event and event.is_set(): # pragma: no cov
792
782
  return result.catch(
793
- status=Status.FAILED,
783
+ status=FAILED,
794
784
  context={
795
785
  "errors": JobException(
796
786
  "Job strategy was canceled from event that had set "
@@ -808,7 +798,7 @@ def local_execute(
808
798
  raise_error=raise_error,
809
799
  )
810
800
 
811
- return result.catch(status=Status.SUCCESS)
801
+ return result.catch(status=SUCCESS)
812
802
 
813
803
  fail_fast_flag: bool = job.strategy.fail_fast
814
804
  ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
@@ -819,7 +809,7 @@ def local_execute(
819
809
 
820
810
  if event and event.is_set(): # pragma: no cov
821
811
  return result.catch(
822
- status=Status.FAILED,
812
+ status=FAILED,
823
813
  context={
824
814
  "errors": JobException(
825
815
  "Job strategy was canceled from event that had set "
@@ -849,7 +839,7 @@ def local_execute(
849
839
  ]
850
840
 
851
841
  context: DictData = {}
852
- status: Status = Status.SUCCESS
842
+ status: Status = SUCCESS
853
843
 
854
844
  if not fail_fast_flag:
855
845
  done = as_completed(futures, timeout=1800)
@@ -875,7 +865,7 @@ def local_execute(
875
865
  try:
876
866
  future.result()
877
867
  except JobException as err:
878
- status = Status.FAILED
868
+ status = FAILED
879
869
  result.trace.error(
880
870
  f"[JOB]: {ls} Catch:\n\t{err.__class__.__name__}:"
881
871
  f"\n\t{err}"
@@ -903,7 +893,7 @@ def self_hosted_execute(
903
893
  )
904
894
 
905
895
  if event and event.is_set():
906
- return result.catch(status=Status.FAILED)
896
+ return result.catch(status=FAILED)
907
897
 
908
898
  import requests
909
899
 
@@ -922,5 +912,5 @@ def self_hosted_execute(
922
912
  f"{job.runs_on.args.host!r}"
923
913
  )
924
914
 
925
- return result.catch(status=Status.FAILED)
926
- return result.catch(status=Status.SUCCESS)
915
+ return result.catch(status=FAILED)
916
+ return result.catch(status=SUCCESS)
@@ -3,7 +3,6 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- # [ ] Use config
7
6
  """This module include all Param Pydantic Models that use for parsing an
8
7
  incoming parameters that was passed to the Workflow and Schedule objects before
9
8
  execution or release methods.
@@ -18,6 +17,7 @@ from abc import ABC, abstractmethod
18
17
  from datetime import date, datetime
19
18
  from typing import Annotated, Any, Literal, Optional, TypeVar, Union
20
19
 
20
+ from ddeutil.core import str2dict, str2list
21
21
  from pydantic import BaseModel, Field
22
22
 
23
23
  from .__types import TupleStr
@@ -31,6 +31,8 @@ __all__: TupleStr = (
31
31
  "IntParam",
32
32
  "Param",
33
33
  "StrParam",
34
+ "ArrayParam",
35
+ "MapParam",
34
36
  )
35
37
 
36
38
  T = TypeVar("T")
@@ -42,7 +44,10 @@ class BaseParam(BaseModel, ABC):
42
44
  """
43
45
 
44
46
  desc: Optional[str] = Field(
45
- default=None, description="A description of parameter providing."
47
+ default=None,
48
+ description=(
49
+ "A description of this parameter provide to the workflow model."
50
+ ),
46
51
  )
47
52
  required: bool = Field(
48
53
  default=True,
@@ -52,6 +57,7 @@ class BaseParam(BaseModel, ABC):
52
57
 
53
58
  @abstractmethod
54
59
  def receive(self, value: Optional[T] = None) -> T:
60
+ """Abstract method receive value to this parameter model."""
55
61
  raise NotImplementedError(
56
62
  "Receive value and validate typing before return valid value."
57
63
  )
@@ -66,13 +72,14 @@ class DefaultParam(BaseParam):
66
72
  default=False,
67
73
  description="A require flag for the default-able parameter value.",
68
74
  )
69
- default: Optional[str] = Field(
75
+ default: Optional[Any] = Field(
70
76
  default=None,
71
77
  description="A default value if parameter does not pass.",
72
78
  )
73
79
 
74
80
  @abstractmethod
75
81
  def receive(self, value: Optional[Any] = None) -> Any:
82
+ """Abstract method receive value to this parameter model."""
76
83
  raise NotImplementedError(
77
84
  "Receive value and validate typing before return valid value."
78
85
  )
@@ -82,7 +89,10 @@ class DateParam(DefaultParam): # pragma: no cov
82
89
  """Date parameter model."""
83
90
 
84
91
  type: Literal["date"] = "date"
85
- default: date = Field(default_factory=get_d_now)
92
+ default: date = Field(
93
+ default_factory=get_d_now,
94
+ description="A default date that make from the current date func.",
95
+ )
86
96
 
87
97
  def receive(self, value: Optional[str | datetime | date] = None) -> date:
88
98
  """Receive value that match with date. If an input value pass with
@@ -116,7 +126,12 @@ class DatetimeParam(DefaultParam):
116
126
  """Datetime parameter model."""
117
127
 
118
128
  type: Literal["datetime"] = "datetime"
119
- default: datetime = Field(default_factory=get_dt_now)
129
+ default: datetime = Field(
130
+ default_factory=get_dt_now,
131
+ description=(
132
+ "A default datetime that make from the current datetime func."
133
+ ),
134
+ )
120
135
 
121
136
  def receive(self, value: str | datetime | date | None = None) -> datetime:
122
137
  """Receive value that match with datetime. If an input value pass with
@@ -167,10 +182,6 @@ class IntParam(DefaultParam):
167
182
  """Integer parameter."""
168
183
 
169
184
  type: Literal["int"] = "int"
170
- default: Optional[int] = Field(
171
- default=None,
172
- description="A default value if parameter does not pass.",
173
- )
174
185
 
175
186
  def receive(self, value: int | None = None) -> int | None:
176
187
  """Receive value that match with int.
@@ -222,36 +233,84 @@ class ChoiceParam(BaseParam):
222
233
  return value
223
234
 
224
235
 
225
- # TODO: Not implement this parameter yet
226
236
  class MapParam(DefaultParam): # pragma: no cov
237
+ """Map parameter."""
227
238
 
228
239
  type: Literal["map"] = "map"
229
- default: dict[Any, Any] = Field(default_factory=dict)
240
+ default: dict[Any, Any] = Field(
241
+ default_factory=dict,
242
+ description="A default dict that make from the dict built-in func.",
243
+ )
244
+
245
+ def receive(
246
+ self,
247
+ value: Optional[Union[dict[Any, Any], str]] = None,
248
+ ) -> dict[Any, Any]:
249
+ """Receive value that match with map type.
230
250
 
231
- def receive(self, value: Optional[dict[Any, Any]] = None) -> dict[Any, Any]:
251
+ :param value: A value that want to validate with map parameter type.
252
+ :rtype: dict[Any, Any]
253
+ """
232
254
  if value is None:
233
255
  return self.default
234
256
 
257
+ if isinstance(value, str):
258
+ try:
259
+ value: dict[Any, Any] = str2dict(value)
260
+ except ValueError as e:
261
+ raise ParamValueException(
262
+ f"Value that want to convert to map does not support for "
263
+ f"type: {type(value)}"
264
+ ) from e
265
+ elif not isinstance(value, dict):
266
+ raise ParamValueException(
267
+ f"Value of map param support only string-dict or dict type, "
268
+ f"not {type(value)}"
269
+ )
270
+ return value
271
+
235
272
 
236
- # TODO: Not implement this parameter yet
237
273
  class ArrayParam(DefaultParam): # pragma: no cov
274
+ """Array parameter."""
238
275
 
239
276
  type: Literal["array"] = "array"
240
- default: list[Any] = Field(default_factory=list)
277
+ default: list[Any] = Field(
278
+ default_factory=list,
279
+ description="A default list that make from the list built-in func.",
280
+ )
241
281
 
242
- def receive(self, value: Optional[list[T]] = None) -> list[T]:
282
+ def receive(
283
+ self, value: Optional[Union[list[T], tuple[T, ...], str]] = None
284
+ ) -> list[T]:
285
+ """Receive value that match with array type.
286
+
287
+ :param value: A value that want to validate with array parameter type.
288
+ :rtype: list[Any]
289
+ """
243
290
  if value is None:
244
291
  return self.default
245
- if not isinstance(value, list):
292
+ if isinstance(value, str):
293
+ try:
294
+ value: list[T] = str2list(value)
295
+ except ValueError as e:
296
+ raise ParamValueException(
297
+ f"Value that want to convert to array does not support for "
298
+ f"type: {type(value)}"
299
+ ) from e
300
+ elif isinstance(value, (tuple, set)):
301
+ return list(value)
302
+ elif not isinstance(value, list):
246
303
  raise ParamValueException(
247
- f"Value that want to convert to array does not support for "
248
- f"type: {type(value)}"
304
+ f"Value of map param support only string-list or list type, "
305
+ f"not {type(value)}"
249
306
  )
250
307
  return value
251
308
 
252
309
 
253
310
  Param = Annotated[
254
311
  Union[
312
+ MapParam,
313
+ ArrayParam,
255
314
  ChoiceParam,
256
315
  DatetimeParam,
257
316
  DateParam,
@@ -3,8 +3,8 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """This is the Result module. It is the data context transfer objects that use
7
- by all object in this package. This module provide Result dataclass.
6
+ """Result module. It is the data context transfer objects that use by all object
7
+ in this package. This module provide Status enum object and Result dataclass.
8
8
  """
9
9
  from __future__ import annotations
10
10
 
@@ -23,13 +23,19 @@ from .logs import TraceLog, get_dt_tznow, get_trace
23
23
  from .utils import default_gen_id, gen_id
24
24
 
25
25
  __all__: TupleStr = (
26
+ "SUCCESS",
27
+ "FAILED",
28
+ "WAIT",
29
+ "SKIP",
26
30
  "Result",
27
31
  "Status",
28
32
  )
29
33
 
30
34
 
31
35
  class Status(IntEnum):
32
- """Status Int Enum object."""
36
+ """Status Int Enum object that use for tracking execution status to the
37
+ Result dataclass object.
38
+ """
33
39
 
34
40
  SUCCESS: int = 0
35
41
  FAILED: int = 1
@@ -37,8 +43,17 @@ class Status(IntEnum):
37
43
  SKIP: int = 3
38
44
 
39
45
 
46
+ SUCCESS = Status.SUCCESS
47
+ FAILED = Status.FAILED
48
+ WAIT = Status.WAIT
49
+ SKIP = Status.SKIP
50
+
51
+
40
52
  @dataclass(
41
- config=ConfigDict(arbitrary_types_allowed=True, use_enum_values=True)
53
+ config=ConfigDict(
54
+ arbitrary_types_allowed=True,
55
+ use_enum_values=True,
56
+ ),
42
57
  )
43
58
  class Result:
44
59
  """Result Pydantic Model for passing and receiving data context from any
@@ -49,8 +64,9 @@ class Result:
49
64
  and ``_run_id`` fields to comparing with other result instance.
50
65
  """
51
66
 
52
- status: Status = field(default=Status.WAIT)
67
+ status: Status = field(default=WAIT)
53
68
  context: DictData = field(default_factory=dict)
69
+ errors: DictData = field(default_factory=dict)
54
70
  run_id: Optional[str] = field(default_factory=default_gen_id)
55
71
  parent_run_id: Optional[str] = field(default=None, compare=False)
56
72
  ts: datetime = field(default_factory=get_dt_tznow, compare=False)
@@ -64,9 +80,16 @@ class Result:
64
80
  run_id: str | None = None,
65
81
  parent_run_id: str | None = None,
66
82
  id_logic: str | None = None,
67
- ) -> Self: # pragma: no cov
83
+ ) -> Self:
68
84
  """Create the Result object or set parent running id if passing Result
69
85
  object.
86
+
87
+ :param result:
88
+ :param run_id:
89
+ :param parent_run_id:
90
+ :param id_logic:
91
+
92
+ :rtype: Self
70
93
  """
71
94
  if result is None:
72
95
  result: Result = cls(
@@ -101,18 +124,23 @@ class Result:
101
124
  self,
102
125
  status: int | Status,
103
126
  context: DictData | None = None,
127
+ error: DictData | None = None,
104
128
  ) -> Self:
105
129
  """Catch the status and context to this Result object. This method will
106
130
  use between a child execution return a result, and it wants to pass
107
131
  status and context to this object.
108
132
 
109
- :param status:
110
- :param context:
133
+ :param status: A status enum object.
134
+ :param context: A context data that will update to the current context.
135
+ :param error: An error data that will update to the current errors.
136
+
137
+ :rtype: Self
111
138
  """
112
139
  self.__dict__["status"] = (
113
140
  Status(status) if isinstance(status, int) else status
114
141
  )
115
142
  self.__dict__["context"].update(context or {})
143
+ self.__dict__["errors"].update(error or {})
116
144
  return self
117
145
 
118
146
  def alive_time(self) -> float: # pragma: no cov
@@ -56,14 +56,13 @@ from .conf import Loader, SimLoad, config, get_logger
56
56
  from .cron import On
57
57
  from .exceptions import ScheduleException, WorkflowException
58
58
  from .logs import Audit, get_audit
59
- from .result import Result, Status
59
+ from .result import SUCCESS, Result
60
60
  from .utils import batch, delay
61
61
  from .workflow import Release, ReleaseQueue, Workflow, WorkflowTask
62
62
 
63
63
  P = ParamSpec("P")
64
- logger = get_logger("ddeutil.workflow")
65
64
 
66
- # NOTE: Adjust logging level on the `schedule` package.
65
+ logger = get_logger("ddeutil.workflow")
67
66
  logging.getLogger("schedule").setLevel(logging.INFO)
68
67
 
69
68
 
@@ -393,7 +392,7 @@ class Schedule(BaseModel):
393
392
  audit=audit,
394
393
  )
395
394
 
396
- return result.catch(status=Status.SUCCESS)
395
+ return result.catch(status=SUCCESS)
397
396
 
398
397
 
399
398
  ResultOrCancel = Union[type[CancelJob], Result]
@@ -572,9 +571,7 @@ def schedule_task(
572
571
  f"[SCHEDULE]: End schedule task that run since "
573
572
  f"{current_date:%Y-%m-%d %H:%M:%S} {'=' * 30}"
574
573
  )
575
- return result.catch(
576
- status=Status.SUCCESS, context={"task_date": current_date}
577
- )
574
+ return result.catch(status=SUCCESS, context={"task_date": current_date})
578
575
 
579
576
 
580
577
  def monitor(
@@ -690,7 +687,7 @@ def scheduler_pending(
690
687
  f"[SCHEDULE]: Queue: {[list(queue[wf].queue) for wf in queue]}"
691
688
  )
692
689
  return result.catch(
693
- status=Status.SUCCESS,
690
+ status=SUCCESS,
694
691
  context={
695
692
  "threads": [
696
693
  {
@@ -759,7 +756,7 @@ def schedule_control(
759
756
  audit=audit,
760
757
  )
761
758
 
762
- return result.catch(status=Status.SUCCESS, context={"schedules": schedules})
759
+ return result.catch(status=SUCCESS, context={"schedules": schedules})
763
760
 
764
761
 
765
762
  def schedule_runner(
@@ -4,18 +4,18 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  # [x] Use dynamic config
7
- """Stage Model that use for getting stage data template from the Job Model.
8
- The stage handle the minimize task that run in some thread (same thread at
9
- its job owner) that mean it is the lowest executor of a workflow that can
10
- tracking logs.
7
+ """Stage model. It stores all stage model that use for getting stage data template
8
+ from the Job Model. The stage handle the minimize task that run in some thread
9
+ (same thread at its job owner) that mean it is the lowest executor of a workflow
10
+ that can tracking logs.
11
11
 
12
12
  The output of stage execution only return 0 status because I do not want to
13
13
  handle stage error on this stage model. I think stage model should have a lot of
14
14
  use-case, and it does not worry when I want to create a new one.
15
15
 
16
- Execution --> Ok --> Result with 0
16
+ Execution --> Ok --> Result with SUCCESS
17
17
 
18
- --> Error ┬-> Result with 1 (if env var was set)
18
+ --> Error ┬-> Result with FAILED (if env var was set)
19
19
  ╰-> Raise StageException(...)
20
20
 
21
21
  On the context I/O that pass to a stage object at execute process. The
@@ -52,7 +52,7 @@ from typing_extensions import Self
52
52
  from .__types import DictData, DictStr, TupleStr
53
53
  from .conf import dynamic
54
54
  from .exceptions import StageException, to_dict
55
- from .result import Result, Status
55
+ from .result import FAILED, SUCCESS, Result, Status
56
56
  from .reusables import TagFunc, extract_call, not_in_template, param2template
57
57
  from .utils import (
58
58
  gen_id,
@@ -80,6 +80,10 @@ class BaseStage(BaseModel, ABC):
80
80
  This class is the abstraction class for any stage class.
81
81
  """
82
82
 
83
+ extras: DictData = Field(
84
+ default_factory=dict,
85
+ description="An extra override config values.",
86
+ )
83
87
  id: Optional[str] = Field(
84
88
  default=None,
85
89
  description=(
@@ -95,10 +99,6 @@ class BaseStage(BaseModel, ABC):
95
99
  description="A stage condition statement to allow stage executable.",
96
100
  alias="if",
97
101
  )
98
- extras: DictData = Field(
99
- default_factory=dict,
100
- description="An extra override config values.",
101
- )
102
102
 
103
103
  @property
104
104
  def iden(self) -> str:
@@ -170,12 +170,12 @@ class BaseStage(BaseModel, ABC):
170
170
  specific environment variable,`WORKFLOW_CORE_STAGE_RAISE_ERROR`.
171
171
 
172
172
  Execution --> Ok --> Result
173
- |-status: Status.SUCCESS
173
+ |-status: SUCCESS
174
174
  ╰-context:
175
175
  ╰-outputs: ...
176
176
 
177
177
  --> Error --> Result (if env var was set)
178
- |-status: Status.FAILED
178
+ |-status: FAILED
179
179
  ╰-errors:
180
180
  |-class: ...
181
181
  |-name: ...
@@ -209,26 +209,24 @@ class BaseStage(BaseModel, ABC):
209
209
 
210
210
  try:
211
211
  rs: Result = self.execute(params, result=result, event=event)
212
- if to is not None:
213
- return self.set_outputs(rs.context, to=to)
214
- return rs
215
- except Exception as err:
216
- result.trace.error(f"[STAGE]: {err.__class__.__name__}: {err}")
212
+ return self.set_outputs(rs.context, to=to) if to is not None else rs
213
+ except Exception as e:
214
+ result.trace.error(f"[STAGE]: {e.__class__.__name__}: {e}")
217
215
 
218
216
  if dynamic("stage_raise_error", f=raise_error, extras=self.extras):
219
- if isinstance(err, StageException):
217
+ if isinstance(e, StageException):
220
218
  raise
221
219
 
222
220
  raise StageException(
223
221
  f"{self.__class__.__name__}: \n\t"
224
- f"{err.__class__.__name__}: {err}"
225
- ) from None
222
+ f"{e.__class__.__name__}: {e}"
223
+ ) from e
226
224
 
227
- errors: DictData = {"errors": to_dict(err)}
225
+ errors: DictData = {"errors": to_dict(e)}
228
226
  if to is not None:
229
227
  return self.set_outputs(errors, to=to)
230
228
 
231
- return result.catch(status=Status.FAILED, context=errors)
229
+ return result.catch(status=FAILED, context=errors)
232
230
 
233
231
  def set_outputs(self, output: DictData, to: DictData) -> DictData:
234
232
  """Set an outputs from execution process to the received context. The
@@ -315,8 +313,8 @@ class BaseStage(BaseModel, ABC):
315
313
  if not isinstance(rs, bool):
316
314
  raise TypeError("Return type of condition does not be boolean")
317
315
  return not rs
318
- except Exception as err:
319
- raise StageException(f"{err.__class__.__name__}: {err}") from err
316
+ except Exception as e:
317
+ raise StageException(f"{e.__class__.__name__}: {e}") from e
320
318
 
321
319
 
322
320
  class BaseAsyncStage(BaseStage):
@@ -393,25 +391,23 @@ class BaseAsyncStage(BaseStage):
393
391
  if to is not None:
394
392
  return self.set_outputs(rs.context, to=to)
395
393
  return rs
396
- except Exception as err:
397
- await result.trace.aerror(
398
- f"[STAGE]: {err.__class__.__name__}: {err}"
399
- )
394
+ except Exception as e:
395
+ await result.trace.aerror(f"[STAGE]: {e.__class__.__name__}: {e}")
400
396
 
401
397
  if dynamic("stage_raise_error", f=raise_error, extras=self.extras):
402
- if isinstance(err, StageException):
398
+ if isinstance(e, StageException):
403
399
  raise
404
400
 
405
401
  raise StageException(
406
402
  f"{self.__class__.__name__}: \n\t"
407
- f"{err.__class__.__name__}: {err}"
403
+ f"{e.__class__.__name__}: {e}"
408
404
  ) from None
409
405
 
410
- errors: DictData = {"errors": to_dict(err)}
406
+ errors: DictData = {"errors": to_dict(e)}
411
407
  if to is not None:
412
408
  return self.set_outputs(errors, to=to)
413
409
 
414
- return result.catch(status=Status.FAILED, context=errors)
410
+ return result.catch(status=FAILED, context=errors)
415
411
 
416
412
 
417
413
  class EmptyStage(BaseAsyncStage):
@@ -472,7 +468,7 @@ class EmptyStage(BaseAsyncStage):
472
468
  result.trace.info(f"[STAGE]: ... sleep ({self.sleep} seconds)")
473
469
  time.sleep(self.sleep)
474
470
 
475
- return result.catch(status=Status.SUCCESS)
471
+ return result.catch(status=SUCCESS)
476
472
 
477
473
  async def axecute(
478
474
  self,
@@ -510,7 +506,7 @@ class EmptyStage(BaseAsyncStage):
510
506
  )
511
507
  await asyncio.sleep(self.sleep)
512
508
 
513
- return result.catch(status=Status.SUCCESS)
509
+ return result.catch(status=SUCCESS)
514
510
 
515
511
 
516
512
  class BashStage(BaseStage):
@@ -619,17 +615,17 @@ class BashStage(BaseStage):
619
615
 
620
616
  if rs.returncode > 0:
621
617
  # NOTE: Prepare stderr message that returning from subprocess.
622
- err: str = (
618
+ e: str = (
623
619
  rs.stderr.encode("utf-8").decode("utf-16")
624
620
  if "\\x00" in rs.stderr
625
621
  else rs.stderr
626
622
  ).removesuffix("\n")
627
623
  raise StageException(
628
- f"Subprocess: {err}\nRunning Statement:\n---\n"
624
+ f"Subprocess: {e}\nRunning Statement:\n---\n"
629
625
  f"```bash\n{bash}\n```"
630
626
  )
631
627
  return result.catch(
632
- status=Status.SUCCESS,
628
+ status=SUCCESS,
633
629
  context={
634
630
  "return_code": rs.returncode,
635
631
  "stdout": None if (out := rs.stdout.strip("\n")) == "" else out,
@@ -749,7 +745,7 @@ class PyStage(BaseStage):
749
745
  )
750
746
 
751
747
  return result.catch(
752
- status=Status.SUCCESS, context={"locals": lc, "globals": gb}
748
+ status=SUCCESS, context={"locals": lc, "globals": gb}
753
749
  )
754
750
 
755
751
 
@@ -871,7 +867,7 @@ class CallStage(BaseStage):
871
867
  f"Return type: '{t_func.name}@{t_func.tag}' does not serialize "
872
868
  f"to result model, you change return type to `dict`."
873
869
  )
874
- return result.catch(status=Status.SUCCESS, context=rs)
870
+ return result.catch(status=SUCCESS, context=rs)
875
871
 
876
872
 
877
873
  class TriggerStage(BaseStage):
@@ -999,19 +995,11 @@ class ParallelStage(BaseStage): # pragma: no cov
999
995
  ).context,
1000
996
  to=context,
1001
997
  )
1002
- except StageException as err: # pragma: no cov
998
+ except StageException as e: # pragma: no cov
1003
999
  result.trace.error(
1004
- f"[STAGE]: Catch:\n\t{err.__class__.__name__}:" f"\n\t{err}"
1005
- )
1006
- context.update(
1007
- {
1008
- "errors": {
1009
- "class": err,
1010
- "name": err.__class__.__name__,
1011
- "message": f"{err.__class__.__name__}: {err}",
1012
- },
1013
- },
1000
+ f"[STAGE]: Catch:\n\t{e.__class__.__name__}:" f"\n\t{e}"
1014
1001
  )
1002
+ context.update({"errors": e.to_dict()})
1015
1003
  return context
1016
1004
 
1017
1005
  def execute(
@@ -1041,7 +1029,7 @@ class ParallelStage(BaseStage): # pragma: no cov
1041
1029
  f"[STAGE]: Parallel-Execute with {self.max_parallel_core} cores."
1042
1030
  )
1043
1031
  rs: DictData = {"parallel": {}}
1044
- status = Status.SUCCESS
1032
+ status = SUCCESS
1045
1033
  with ThreadPoolExecutor(
1046
1034
  max_workers=self.max_parallel_core,
1047
1035
  thread_name_prefix="parallel_stage_exec_",
@@ -1065,7 +1053,7 @@ class ParallelStage(BaseStage): # pragma: no cov
1065
1053
  rs["parallel"][context.pop("branch")] = context
1066
1054
 
1067
1055
  if "errors" in context:
1068
- status = Status.FAILED
1056
+ status = FAILED
1069
1057
 
1070
1058
  return result.catch(status=status, context=rs)
1071
1059
 
@@ -1144,7 +1132,7 @@ class ForEachStage(BaseStage):
1144
1132
 
1145
1133
  result.trace.info(f"[STAGE]: Foreach-Execute: {foreach!r}.")
1146
1134
  rs: DictData = {"items": foreach, "foreach": {}}
1147
- status = Status.SUCCESS
1135
+ status: Status = SUCCESS
1148
1136
  # TODO: Implement concurrent more than 1.
1149
1137
  for item in foreach:
1150
1138
  result.trace.debug(f"[STAGE]: Execute foreach item: {item!r}")
@@ -1161,21 +1149,12 @@ class ForEachStage(BaseStage):
1161
1149
  ).context,
1162
1150
  to=context,
1163
1151
  )
1164
- except StageException as err: # pragma: no cov
1165
- status = Status.FAILED
1152
+ except StageException as e: # pragma: no cov
1153
+ status = FAILED
1166
1154
  result.trace.error(
1167
- f"[STAGE]: Catch:\n\t{err.__class__.__name__}:"
1168
- f"\n\t{err}"
1169
- )
1170
- context.update(
1171
- {
1172
- "errors": {
1173
- "class": err,
1174
- "name": err.__class__.__name__,
1175
- "message": f"{err.__class__.__name__}: {err}",
1176
- },
1177
- },
1155
+ f"[STAGE]: Catch:\n\t{e.__class__.__name__}:" f"\n\t{e}"
1178
1156
  )
1157
+ context.update({"errors": e.to_dict()})
1179
1158
 
1180
1159
  rs["foreach"][item] = context
1181
1160
 
@@ -1288,7 +1267,7 @@ class CaseStage(BaseStage): # pragma: no cov
1288
1267
  result: Result = Result(
1289
1268
  run_id=gen_id(self.name + (self.id or ""), unique=True)
1290
1269
  )
1291
- status = Status.SUCCESS
1270
+ status = SUCCESS
1292
1271
  _case = param2template(self.case, params, extras=self.extras)
1293
1272
  _else = None
1294
1273
  context = {}
@@ -1310,21 +1289,12 @@ class CaseStage(BaseStage): # pragma: no cov
1310
1289
  ).context,
1311
1290
  to=context,
1312
1291
  )
1313
- except StageException as err: # pragma: no cov
1314
- status = Status.FAILED
1292
+ except StageException as e: # pragma: no cov
1293
+ status = FAILED
1315
1294
  result.trace.error(
1316
- f"[STAGE]: Catch:\n\t{err.__class__.__name__}:"
1317
- f"\n\t{err}"
1318
- )
1319
- context.update(
1320
- {
1321
- "errors": {
1322
- "class": err,
1323
- "name": err.__class__.__name__,
1324
- "message": f"{err.__class__.__name__}: {err}",
1325
- },
1326
- },
1295
+ f"[STAGE]: Catch:\n\t{e.__class__.__name__}:" f"\n\t{e}"
1327
1296
  )
1297
+ context.update({"errors": e.to_dict()})
1328
1298
 
1329
1299
  return result.catch(status=status, context=context)
1330
1300
 
@@ -35,10 +35,10 @@ from .__types import DictData, TupleStr
35
35
  from .conf import Loader, SimLoad, dynamic, get_logger
36
36
  from .cron import On
37
37
  from .exceptions import JobException, WorkflowException
38
- from .job import Job, TriggerState
38
+ from .job import Job
39
39
  from .logs import Audit, get_audit
40
40
  from .params import Param
41
- from .result import Result, Status
41
+ from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
42
42
  from .reusables import has_template, param2template
43
43
  from .utils import (
44
44
  gen_id,
@@ -673,7 +673,7 @@ class Workflow(BaseModel):
673
673
  )
674
674
 
675
675
  return result.catch(
676
- status=Status.SUCCESS,
676
+ status=SUCCESS,
677
677
  context={
678
678
  "params": params,
679
679
  "release": {
@@ -804,7 +804,7 @@ class Workflow(BaseModel):
804
804
  result.trace.info(
805
805
  f"[POKING]: {self.name!r} does not have any schedule to run."
806
806
  )
807
- return result.catch(status=Status.SUCCESS, context={"outputs": []})
807
+ return result.catch(status=SUCCESS, context={"outputs": []})
808
808
 
809
809
  # NOTE: Create the current date that change microsecond to 0
810
810
  current_date: datetime = datetime.now(
@@ -850,7 +850,7 @@ class Workflow(BaseModel):
850
850
  result.trace.info(
851
851
  f"[POKING]: {self.name!r} does not have any queue."
852
852
  )
853
- return result.catch(status=Status.SUCCESS, context={"outputs": []})
853
+ return result.catch(status=SUCCESS, context={"outputs": []})
854
854
 
855
855
  # NOTE: Start create the thread pool executor for running this poke
856
856
  # process.
@@ -912,7 +912,7 @@ class Workflow(BaseModel):
912
912
  context.append(future.result())
913
913
 
914
914
  return result.catch(
915
- status=Status.SUCCESS,
915
+ status=SUCCESS,
916
916
  context={"outputs": context},
917
917
  )
918
918
 
@@ -991,7 +991,7 @@ class Workflow(BaseModel):
991
991
  "Handle error from the job execution does not support yet."
992
992
  ) from None
993
993
 
994
- return result.catch(status=Status.SUCCESS, context=params)
994
+ return result.catch(status=SUCCESS, context=params)
995
995
 
996
996
  def execute(
997
997
  self,
@@ -1048,7 +1048,7 @@ class Workflow(BaseModel):
1048
1048
  result.trace.warning(
1049
1049
  f"[WORKFLOW]: {self.name!r} does not have any jobs"
1050
1050
  )
1051
- return result.catch(status=Status.SUCCESS, context=params)
1051
+ return result.catch(status=SUCCESS, context=params)
1052
1052
 
1053
1053
  # NOTE: Create a job queue that keep the job that want to run after
1054
1054
  # its dependency condition.
@@ -1065,7 +1065,7 @@ class Workflow(BaseModel):
1065
1065
  # }
1066
1066
  #
1067
1067
  context: DictData = self.parameterize(params)
1068
- status: Status = Status.SUCCESS
1068
+ status: Status = SUCCESS
1069
1069
  try:
1070
1070
  if (
1071
1071
  dynamic(
@@ -1091,7 +1091,7 @@ class Workflow(BaseModel):
1091
1091
  event=event,
1092
1092
  )
1093
1093
  except WorkflowException as err:
1094
- status = Status.FAILED
1094
+ status = FAILED
1095
1095
  context.update({"errors": err.to_dict()})
1096
1096
 
1097
1097
  return result.catch(status=status, context=context)
@@ -1142,16 +1142,16 @@ class Workflow(BaseModel):
1142
1142
  job_id: str = job_queue.get()
1143
1143
  job: Job = self.jobs[job_id]
1144
1144
 
1145
- if (check := job.check_needs(context["jobs"])).is_waiting():
1145
+ if (check := job.check_needs(context["jobs"])) == WAIT:
1146
1146
  job_queue.task_done()
1147
1147
  job_queue.put(job_id)
1148
1148
  time.sleep(0.15)
1149
1149
  continue
1150
- elif check == TriggerState.failed: # pragma: no cov
1150
+ elif check == FAILED: # pragma: no cov
1151
1151
  raise WorkflowException(
1152
1152
  "Check job trigger rule was failed."
1153
1153
  )
1154
- elif check == TriggerState.skipped: # pragma: no cov
1154
+ elif check == SKIP: # pragma: no cov
1155
1155
  result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1156
1156
  job.set_outputs({"SKIP": {"skipped": True}}, to=context)
1157
1157
  job_queue.task_done()
@@ -1249,14 +1249,14 @@ class Workflow(BaseModel):
1249
1249
  job_id: str = job_queue.get()
1250
1250
  job: Job = self.jobs[job_id]
1251
1251
 
1252
- if (check := job.check_needs(context["jobs"])).is_waiting():
1252
+ if (check := job.check_needs(context["jobs"])) == WAIT:
1253
1253
  job_queue.task_done()
1254
1254
  job_queue.put(job_id)
1255
1255
  time.sleep(0.075)
1256
1256
  continue
1257
- elif check == TriggerState.failed: # pragma: no cov
1257
+ elif check == FAILED: # pragma: no cov
1258
1258
  raise WorkflowException("Check job trigger rule was failed.")
1259
- elif check == TriggerState.skipped: # pragma: no cov
1259
+ elif check == SKIP: # pragma: no cov
1260
1260
  result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1261
1261
  job.set_outputs({"SKIP": {"skipped": True}}, to=context)
1262
1262
  job_queue.task_done()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.42
3
+ Version: 0.0.43
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -22,7 +22,7 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Requires-Python: >=3.9.13
23
23
  Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
- Requires-Dist: ddeutil>=0.4.6
25
+ Requires-Dist: ddeutil[checksum]>=0.4.6
26
26
  Requires-Dist: ddeutil-io[toml,yaml]>=0.2.10
27
27
  Requires-Dist: pydantic==2.11.1
28
28
  Requires-Dist: python-dotenv==1.1.0
@@ -212,7 +212,7 @@ execution time such as `run-date` should change base on that workflow running da
212
212
  ```python
213
213
  from ddeutil.workflow import Workflow, Result
214
214
 
215
- workflow: Workflow = Workflow.from_loader('run-py-local')
215
+ workflow: Workflow = Workflow.from_conf('run-py-local')
216
216
  result: Result = workflow.execute(
217
217
  params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
218
218
  )
@@ -246,7 +246,7 @@ from ddeutil.workflow import Schedule
246
246
 
247
247
  (
248
248
  Schedule
249
- .from_loader("schedule-run-local-wf")
249
+ .from_conf("schedule-run-local-wf")
250
250
  .pending(stop=None)
251
251
  )
252
252
  ```
@@ -261,31 +261,31 @@ it will use default value and do not raise any error to you.
261
261
  > The config value that you will set on the environment should combine with
262
262
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
263
263
 
264
- | Name | Component | Default | Description |
265
- |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
266
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
267
- | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
268
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
269
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
270
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
271
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
272
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
273
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
275
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
276
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
277
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
278
- | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
279
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
280
- | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
281
- | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
282
- | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
283
- | **TRACE_ENABLE_WRITE** | Log | `false` | |
284
- | **AUDIT_PATH** | Log | `./audits` | |
285
- | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
286
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
287
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
288
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
264
+ | Name | Component | Default | Override | Description |
265
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
266
+ | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
267
+ | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
268
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
269
+ | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
270
+ | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
271
+ | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
272
+ | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
273
+ | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
+ | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
275
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
276
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
277
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
278
+ | **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
279
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
280
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
281
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
282
+ | **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
283
+ | **TRACE_ENABLE_WRITE** | Log | `false` | No | |
284
+ | **AUDIT_PATH** | Log | `./audits` | No | |
285
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
286
+ | **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
287
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
288
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
289
289
 
290
290
  **API Application**:
291
291
 
@@ -1,19 +1,19 @@
1
- ddeutil/workflow/__about__.py,sha256=B3KiwXw9ATZmMG1vER6qdPImMLkQPPjYkRvYepuIhF4,28
1
+ ddeutil/workflow/__about__.py,sha256=feByUkEJfWx0d1Lb9oGvd2f3clHHuGXDT7T7Ki5zoEA,28
2
2
  ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
3
- ddeutil/workflow/__init__.py,sha256=cYWwG2utpsYvdwqvkFSRWi_Q6gylDgNQBcIWcF5NFs4,1861
3
+ ddeutil/workflow/__init__.py,sha256=m7ZTCuUOarcTKJuXOyuaXd5WTIO7NTkqCeCrNX3d5i8,1943
4
4
  ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
5
5
  ddeutil/workflow/conf.py,sha256=lDzWiVSNlNAhTzxbNIhIbQAIF1ggbmetAp0yn2fgnsc,12385
6
6
  ddeutil/workflow/cron.py,sha256=80SijzMdDOBxTWRsiF-Fmuz7Ym7leY0XT2lzRAPGdXc,8781
7
- ddeutil/workflow/exceptions.py,sha256=fO37f9p7lOjIJgVOpKE_1X44yJTwBepyukZV9a7NNm4,1241
8
- ddeutil/workflow/job.py,sha256=vsayKMzwKDpjchgYQnbshZHnp-vuM9CobpFWhUJETRU,30315
7
+ ddeutil/workflow/exceptions.py,sha256=uLNxzav3HRcr4vaZnvbUIF_eTR6UXXZNaxroMWFOUL4,1418
8
+ ddeutil/workflow/job.py,sha256=6CqLb1F_z3mHU_rOVRY7Z2V2B1tY3p7FZIJgb8DM1YE,29969
9
9
  ddeutil/workflow/logs.py,sha256=RkM5o_JPoWhFY7NrbYAARZQWjLC62YB_FYzTTcyDp8U,19816
10
- ddeutil/workflow/params.py,sha256=Mv-D2DY5inm1ug0lsgCPDkO5wT_AUhc5XEF5jxgDx6U,8036
11
- ddeutil/workflow/result.py,sha256=ynZB0g_vEEXn24034J-hatjNWDBmRAj38S8SqGRM-8I,4029
10
+ ddeutil/workflow/params.py,sha256=xCtFEh0-G-G-f8y_SXxyf31bU6Ox5p5Z-WbBFXrjy8M,9960
11
+ ddeutil/workflow/result.py,sha256=iwkUzOubxhLCuO-ngWEWL6t-CpYBpINIIO_ubg4kz14,4701
12
12
  ddeutil/workflow/reusables.py,sha256=AtZO83HDFu1uK_azUinv5d8jsA36f2i3n_tqMrolbvc,17529
13
- ddeutil/workflow/scheduler.py,sha256=wFEgcnxtgF-8y5otv8RqT1MuBttZl7mu-bBu5ffwV_Y,27534
14
- ddeutil/workflow/stages.py,sha256=prw1-za1zwYehbrjeAnoJ79GxpfTqdKLsI2PY0OuSlY,48417
13
+ ddeutil/workflow/scheduler.py,sha256=_MDsEHbBVOeF-381U8DfIMDyca_nG3XNXmgX4229_EU,27437
14
+ ddeutil/workflow/stages.py,sha256=RMyOU9KqPhS-49kQduT6-iysGJZFwX0Cbgo9o8kzIAY,47304
15
15
  ddeutil/workflow/utils.py,sha256=sblje9qOtejCHVt8EVrbC0KY98vKqvxccaR5HIkRiTA,7363
16
- ddeutil/workflow/workflow.py,sha256=Y1D5arh2KSobkIZGJ1fWSTe15heURi9OhhdfIr0jHyo,50591
16
+ ddeutil/workflow/workflow.py,sha256=LPZzGNf55Tca0XXQZytTiyWbocC1T47cB_XsWpiXdyc,50482
17
17
  ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
18
18
  ddeutil/workflow/api/api.py,sha256=b-bMg0aRsEqt8Qb2hNUtamEt2Fq2CgNotF2oXSAdDu8,5226
19
19
  ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
@@ -23,8 +23,8 @@ ddeutil/workflow/api/routes/job.py,sha256=YVta083i8vU8-o4WdKFwDpfdC9vN1dZ6goZSmN
23
23
  ddeutil/workflow/api/routes/logs.py,sha256=TeRDrEelbKS2Hu_EovgLh0bOdmSv9mfnrIZsrE7uPD4,5353
24
24
  ddeutil/workflow/api/routes/schedules.py,sha256=rUWBm5RgLS1PNBHSWwWXJ0l-c5mYWfl9os0BA9_OTEw,4810
25
25
  ddeutil/workflow/api/routes/workflows.py,sha256=ctgQGxXfpIV6bHFDM9IQ1_qaQHT6n5-HjJ1-D4GKWpc,4527
26
- ddeutil_workflow-0.0.42.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
- ddeutil_workflow-0.0.42.dist-info/METADATA,sha256=TJp1M40eLXYOInkTpl_XhFOWGHLd0hIHktXQXiFsmEw,18853
28
- ddeutil_workflow-0.0.42.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
29
- ddeutil_workflow-0.0.42.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
30
- ddeutil_workflow-0.0.42.dist-info/RECORD,,
26
+ ddeutil_workflow-0.0.43.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
+ ddeutil_workflow-0.0.43.dist-info/METADATA,sha256=eOwkhUh-w_bSW0LOlOCielBQTg0mvf2gEkkNMlbwIU4,19134
28
+ ddeutil_workflow-0.0.43.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
29
+ ddeutil_workflow-0.0.43.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
30
+ ddeutil_workflow-0.0.43.dist-info/RECORD,,