ddeutil-workflow 0.0.57__py3-none-any.whl → 0.0.59__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.57"
1
+ __version__: str = "0.0.59"
@@ -699,9 +699,9 @@ class CronJob:
699
699
 
700
700
  def schedule(
701
701
  self,
702
- date: datetime | None = None,
702
+ date: Optional[datetime] = None,
703
703
  *,
704
- tz: str | None = None,
704
+ tz: Optional[str] = None,
705
705
  ) -> CronRunner:
706
706
  """Returns CronRunner instance that be datetime runner with this
707
707
  cronjob. It can use `next`, `prev`, or `reset` methods to generate
@@ -766,7 +766,7 @@ class CronRunner:
766
766
  def __init__(
767
767
  self,
768
768
  cron: CronJob | CronJobYear,
769
- date: datetime | None = None,
769
+ date: Optional[datetime] = None,
770
770
  *,
771
771
  tz: str | ZoneInfo | None = None,
772
772
  ) -> None:
ddeutil/workflow/conf.py CHANGED
@@ -5,6 +5,7 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from __future__ import annotations
7
7
 
8
+ import copy
8
9
  import json
9
10
  import os
10
11
  from abc import ABC, abstractmethod
@@ -26,13 +27,13 @@ T = TypeVar("T")
26
27
  PREFIX: Final[str] = "WORKFLOW"
27
28
 
28
29
 
29
- def env(var: str, default: str | None = None) -> str | None:
30
+ def env(var: str, default: Optional[str] = None) -> Optional[str]:
30
31
  """Get environment variable with uppercase and adding prefix string.
31
32
 
32
33
  :param var: (str) A env variable name.
33
- :param default: (str | None) A default value if an env var does not set.
34
+ :param default: (Optional[str]) A default value if an env var does not set.
34
35
 
35
- :rtype: str | None
36
+ :rtype: Optional[str]
36
37
  """
37
38
  return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
38
39
 
@@ -218,7 +219,7 @@ class BaseLoad(ABC): # pragma: no cov
218
219
 
219
220
  class FileLoad(BaseLoad):
220
221
  """Base Load object that use to search config data by given some identity
221
- value like name of `Workflow` or `On` templates.
222
+ value like name of `Workflow` or `Crontab` templates.
222
223
 
223
224
  :param name: (str) A name of key of config data that read with YAML
224
225
  Environment object.
@@ -298,6 +299,7 @@ class FileLoad(BaseLoad):
298
299
  f"Multi-config paths does not support for type: {type(paths)}"
299
300
  )
300
301
  else:
302
+ paths: list[Path] = copy.deepcopy(paths)
301
303
  paths.append(path)
302
304
 
303
305
  all_data: list[tuple[float, DictData]] = []
@@ -398,7 +400,7 @@ class FileLoad(BaseLoad):
398
400
  return is_ignored(file, read_ignore(path / ignore_filename))
399
401
 
400
402
  @classmethod
401
- def filter_yaml(cls, file: Path, name: str | None = None) -> DictData:
403
+ def filter_yaml(cls, file: Path, name: Optional[str] = None) -> DictData:
402
404
  """Read a YAML file context from an input file path and specific name.
403
405
 
404
406
  :param file: (Path) A file path that want to extract YAML context.
ddeutil/workflow/event.py CHANGED
@@ -3,14 +3,14 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """Event module that store all event object. Now, it has only `On` and `OnYear`
7
- model these are schedule with crontab event.
6
+ """Event module that store all event object. Now, it has only `Crontab` and
7
+ `CrontabYear` model these are schedule with crontab event.
8
8
  """
9
9
  from __future__ import annotations
10
10
 
11
11
  from dataclasses import fields
12
12
  from datetime import datetime
13
- from typing import Annotated, Any, Literal, Union
13
+ from typing import Annotated, Any, Literal, Optional, Union
14
14
  from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
15
15
 
16
16
  from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
@@ -28,7 +28,7 @@ Interval = Literal["daily", "weekly", "monthly"]
28
28
  def interval2crontab(
29
29
  interval: Interval,
30
30
  *,
31
- day: str | None = None,
31
+ day: Optional[str] = None,
32
32
  time: str = "00:00",
33
33
  ) -> str:
34
34
  """Return the crontab string that was generated from specific values.
@@ -63,9 +63,9 @@ def interval2crontab(
63
63
  return f"{h} {m} {'1' if interval == 'monthly' else '*'} * {d}"
64
64
 
65
65
 
66
- class On(BaseModel):
67
- """On model (Warped crontab object by Pydantic model) to keep crontab value
68
- and generate CronRunner object from this crontab value.
66
+ class Crontab(BaseModel):
67
+ """Cron event model (Warped the CronJob object by Pydantic model) to keep
68
+ crontab value and generate CronRunner object from this crontab value.
69
69
 
70
70
  Methods:
71
71
  - generate: is the main use-case of this schedule object.
@@ -128,7 +128,7 @@ class On(BaseModel):
128
128
  extras: DictData | None = None,
129
129
  ) -> Self:
130
130
  """Constructor from the name of config loader that will use loader
131
- object for getting the `On` data.
131
+ object for getting the `Crontab` data.
132
132
 
133
133
  :param name: (str) A name of config that will get from loader.
134
134
  :param extras: (DictData) An extra parameter that use to override core
@@ -172,7 +172,7 @@ class On(BaseModel):
172
172
  def __prepare_values(cls, data: Any) -> Any:
173
173
  """Extract tz key from value and change name to timezone key.
174
174
 
175
- :param data: (DictData) A data that want to pass for create an On
175
+ :param data: (DictData) A data that want to pass for create an Crontab
176
176
  model.
177
177
 
178
178
  :rtype: DictData
@@ -265,9 +265,9 @@ class On(BaseModel):
265
265
  return runner
266
266
 
267
267
 
268
- class YearOn(On):
269
- """On with enhance Year Pydantic model for limit year matrix that use by
270
- some data schedule tools like AWS Glue.
268
+ class CrontabYear(Crontab):
269
+ """Cron event with enhance Year Pydantic model for limit year matrix that
270
+ use by some data schedule tools like AWS Glue.
271
271
  """
272
272
 
273
273
  model_config = ConfigDict(arbitrary_types_allowed=True)
@@ -9,7 +9,7 @@ annotate for handle error only.
9
9
  """
10
10
  from __future__ import annotations
11
11
 
12
- from typing import Literal, TypedDict, overload
12
+ from typing import Literal, Optional, TypedDict, overload
13
13
 
14
14
 
15
15
  class ErrorData(TypedDict):
@@ -39,9 +39,9 @@ class BaseWorkflowException(Exception):
39
39
  making an error context to the result context.
40
40
  """
41
41
 
42
- def __init__(self, message: str, *, refs: str | None = None):
42
+ def __init__(self, message: str, *, refs: Optional[str] = None):
43
43
  super().__init__(message)
44
- self.refs: str | None = refs
44
+ self.refs: Optional[str] = refs
45
45
 
46
46
  @overload
47
47
  def to_dict(
ddeutil/workflow/job.py CHANGED
@@ -19,6 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  import copy
21
21
  import time
22
+ from collections.abc import Iterator
22
23
  from concurrent.futures import (
23
24
  FIRST_EXCEPTION,
24
25
  CancelledError,
@@ -67,8 +68,8 @@ def make(
67
68
 
68
69
  :param matrix: (Matrix) A matrix values that want to cross product to
69
70
  possible parallelism values.
70
- :param include: (A list of additional matrix that want to adds-in.
71
- :param exclude: (A list of exclude matrix that want to filter-out.
71
+ :param include: A list of additional matrix that want to adds-in.
72
+ :param exclude: A list of exclude matrix that want to filter-out.
72
73
 
73
74
  :rtype: list[DictStr]
74
75
  """
@@ -191,22 +192,22 @@ class Strategy(BaseModel):
191
192
  class Rule(str, Enum):
192
193
  """Rule enum object for assign trigger option."""
193
194
 
194
- ALL_SUCCESS: str = "all_success"
195
- ALL_FAILED: str = "all_failed"
196
- ALL_DONE: str = "all_done"
197
- ONE_FAILED: str = "one_failed"
198
- ONE_SUCCESS: str = "one_success"
199
- NONE_FAILED: str = "none_failed"
200
- NONE_SKIPPED: str = "none_skipped"
195
+ ALL_SUCCESS = "all_success"
196
+ ALL_FAILED = "all_failed"
197
+ ALL_DONE = "all_done"
198
+ ONE_FAILED = "one_failed"
199
+ ONE_SUCCESS = "one_success"
200
+ NONE_FAILED = "none_failed"
201
+ NONE_SKIPPED = "none_skipped"
201
202
 
202
203
 
203
204
  class RunsOn(str, Enum):
204
205
  """Runs-On enum object."""
205
206
 
206
- LOCAL: str = "local"
207
- SELF_HOSTED: str = "self_hosted"
208
- AZ_BATCH: str = "azure_batch"
209
- DOCKER: str = "docker"
207
+ LOCAL = "local"
208
+ SELF_HOSTED = "self_hosted"
209
+ AZ_BATCH = "azure_batch"
210
+ DOCKER = "docker"
210
211
 
211
212
 
212
213
  class BaseRunsOn(BaseModel): # pragma: no cov
@@ -566,7 +567,7 @@ class Job(BaseModel):
566
567
  :param output: (DictData) A result data context that want to extract
567
568
  and transfer to the `strategies` key in receive context.
568
569
  :param to: (DictData) A received context data.
569
- :param job_id: (str | None) A job ID if the `id` field does not set.
570
+ :param job_id: (Optional[str]) A job ID if the `id` field does not set.
570
571
 
571
572
  :rtype: DictData
572
573
  """
@@ -606,9 +607,9 @@ class Job(BaseModel):
606
607
  self,
607
608
  params: DictData,
608
609
  *,
609
- run_id: str | None = None,
610
- parent_run_id: str | None = None,
611
- event: Event | None = None,
610
+ run_id: Optional[str] = None,
611
+ parent_run_id: Optional[str] = None,
612
+ event: Optional[Event] = None,
612
613
  ) -> Result:
613
614
  """Job execution with passing dynamic parameters from the workflow
614
615
  execution. It will generate matrix values at the first step and run
@@ -676,8 +677,8 @@ def local_execute_strategy(
676
677
  strategy: DictData,
677
678
  params: DictData,
678
679
  *,
679
- result: Result | None = None,
680
- event: Event | None = None,
680
+ result: Optional[Result] = None,
681
+ event: Optional[Event] = None,
681
682
  ) -> Result:
682
683
  """Local strategy execution with passing dynamic parameters from the
683
684
  job execution and strategy matrix.
@@ -799,9 +800,9 @@ def local_execute(
799
800
  job: Job,
800
801
  params: DictData,
801
802
  *,
802
- run_id: str | None = None,
803
- parent_run_id: str | None = None,
804
- event: Event | None = None,
803
+ run_id: Optional[str] = None,
804
+ parent_run_id: Optional[str] = None,
805
+ event: Optional[Event] = None,
805
806
  ) -> Result:
806
807
  """Local job execution with passing dynamic parameters from the workflow
807
808
  execution or directly. It will generate matrix values at the first
@@ -839,7 +840,7 @@ def local_execute(
839
840
  ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
840
841
  workers: int = job.strategy.max_parallel
841
842
  result.trace.info(
842
- f"[JOB]: Execute {ls}: {job.id} with {workers} "
843
+ f"[JOB]: Execute {ls}: {job.id!r} with {workers} "
843
844
  f"worker{'s' if workers > 1 else ''}."
844
845
  )
845
846
 
@@ -874,10 +875,10 @@ def local_execute(
874
875
  status: Status = SUCCESS
875
876
 
876
877
  if not fail_fast_flag:
877
- done: list[Future] = as_completed(futures)
878
+ done: Iterator[Future] = as_completed(futures)
878
879
  else:
879
880
  done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
880
- if len(done) != len(futures):
881
+ if len(list(done)) != len(futures):
881
882
  result.trace.warning(
882
883
  "[JOB]: Handler Fail-Fast: Got exception and set event."
883
884
  )
@@ -886,9 +887,16 @@ def local_execute(
886
887
  future.cancel()
887
888
  time.sleep(0.075)
888
889
 
889
- nd: str = f", strategies not run: {not_done}" if not_done else ""
890
+ nd: str = (
891
+ (
892
+ f", {len(not_done)} strateg"
893
+ f"{'ies' if len(not_done) > 1 else 'y'} not run!!!"
894
+ )
895
+ if not_done
896
+ else ""
897
+ )
890
898
  result.trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
891
- done: list[Future] = as_completed(futures)
899
+ done: Iterator[Future] = as_completed(futures)
892
900
 
893
901
  for future in done:
894
902
  try:
@@ -911,9 +919,9 @@ def self_hosted_execute(
911
919
  job: Job,
912
920
  params: DictData,
913
921
  *,
914
- run_id: str | None = None,
915
- parent_run_id: str | None = None,
916
- event: Event | None = None,
922
+ run_id: Optional[str] = None,
923
+ parent_run_id: Optional[str] = None,
924
+ event: Optional[Event] = None,
917
925
  ) -> Result: # pragma: no cov
918
926
  """Self-Hosted job execution with passing dynamic parameters from the
919
927
  workflow execution or itself execution. It will make request to the
@@ -974,9 +982,9 @@ def azure_batch_execute(
974
982
  job: Job,
975
983
  params: DictData,
976
984
  *,
977
- run_id: str | None = None,
978
- parent_run_id: str | None = None,
979
- event: Event | None = None,
985
+ run_id: Optional[str] = None,
986
+ parent_run_id: Optional[str] = None,
987
+ event: Optional[Event] = None,
980
988
  ) -> Result: # pragma: no cov
981
989
  """Azure Batch job execution that will run all job's stages on the Azure
982
990
  Batch Node and extract the result file to be returning context result.
@@ -1028,9 +1036,9 @@ def docker_execution(
1028
1036
  job: Job,
1029
1037
  params: DictData,
1030
1038
  *,
1031
- run_id: str | None = None,
1032
- parent_run_id: str | None = None,
1033
- event: Event | None = None,
1039
+ run_id: Optional[str] = None,
1040
+ parent_run_id: Optional[str] = None,
1041
+ event: Optional[Event] = None,
1034
1042
  ): # pragma: no cov
1035
1043
  """Docker job execution.
1036
1044