ddeutil-workflow 0.0.18__tar.gz → 0.0.20__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. {ddeutil_workflow-0.0.18/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.20}/PKG-INFO +7 -7
  2. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/README.md +4 -4
  3. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/pyproject.toml +2 -2
  4. ddeutil_workflow-0.0.20/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/__cron.py +29 -2
  6. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/__init__.py +9 -4
  7. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/conf.py +49 -40
  8. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/exceptions.py +4 -0
  9. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/job.py +58 -45
  10. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/on.py +4 -2
  11. ddeutil_workflow-0.0.20/src/ddeutil/workflow/scheduler.py +574 -0
  12. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/stage.py +92 -66
  13. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/utils.py +61 -43
  14. ddeutil_workflow-0.0.20/src/ddeutil/workflow/workflow.py +1084 -0
  15. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20/src/ddeutil_workflow.egg-info}/PKG-INFO +7 -7
  16. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil_workflow.egg-info/SOURCES.txt +12 -14
  17. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil_workflow.egg-info/requires.txt +2 -2
  18. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test__cron.py +2 -1
  19. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_conf_log.py +8 -3
  20. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_job.py +3 -7
  21. ddeutil_workflow-0.0.18/tests/test_job_strategy_run.py → ddeutil_workflow-0.0.20/tests/test_job_exec_strategy.py +1 -1
  22. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_job_strategy.py +41 -0
  23. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_on.py +13 -4
  24. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_scheduler.py +71 -24
  25. ddeutil_workflow-0.0.20/tests/test_scheduler_tasks.py +79 -0
  26. ddeutil_workflow-0.0.20/tests/test_stage.py +75 -0
  27. ddeutil_workflow-0.0.18/tests/test_stage_bash.py → ddeutil_workflow-0.0.20/tests/test_stage_exec_bash.py +9 -9
  28. ddeutil_workflow-0.0.18/tests/test_stage_hook.py → ddeutil_workflow-0.0.20/tests/test_stage_exec_hook.py +5 -5
  29. ddeutil_workflow-0.0.18/tests/test_stage_py.py → ddeutil_workflow-0.0.20/tests/test_stage_exec_py.py +4 -4
  30. ddeutil_workflow-0.0.18/tests/test_stage_trigger.py → ddeutil_workflow-0.0.20/tests/test_stage_exec_trigger.py +2 -2
  31. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_utils_filter.py +11 -2
  32. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_utils_result.py +1 -1
  33. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_utils_template.py +44 -54
  34. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_workflow.py +66 -43
  35. ddeutil_workflow-0.0.18/tests/test_workflow_run.py → ddeutil_workflow-0.0.20/tests/test_workflow_exec.py +74 -20
  36. ddeutil_workflow-0.0.18/tests/test_workflow_task.py → ddeutil_workflow-0.0.20/tests/test_workflow_exec_hook.py +4 -4
  37. ddeutil_workflow-0.0.18/tests/test_workflow_depends.py → ddeutil_workflow-0.0.20/tests/test_workflow_exec_needs.py +2 -2
  38. ddeutil_workflow-0.0.20/tests/test_workflow_poke.py +50 -0
  39. ddeutil_workflow-0.0.20/tests/test_workflow_release.py +44 -0
  40. ddeutil_workflow-0.0.18/src/ddeutil/workflow/__about__.py +0 -1
  41. ddeutil_workflow-0.0.18/src/ddeutil/workflow/scheduler.py +0 -1404
  42. ddeutil_workflow-0.0.18/tests/test__conf_exist.py +0 -12
  43. ddeutil_workflow-0.0.18/tests/test_scheduler_tasks.py +0 -72
  44. ddeutil_workflow-0.0.18/tests/test_stage.py +0 -58
  45. ddeutil_workflow-0.0.18/tests/test_workflow_matrix.py +0 -156
  46. ddeutil_workflow-0.0.18/tests/test_workflow_on.py +0 -12
  47. ddeutil_workflow-0.0.18/tests/test_workflow_params.py +0 -23
  48. ddeutil_workflow-0.0.18/tests/test_workflow_poke.py +0 -22
  49. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/LICENSE +0 -0
  50. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/setup.cfg +0 -0
  51. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/__types.py +0 -0
  52. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/api.py +0 -0
  53. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/cli.py +0 -0
  54. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/repeat.py +0 -0
  55. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil/workflow/route.py +0 -0
  56. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  57. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
  58. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  59. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test__regex.py +0 -0
  60. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_conf.py +0 -0
  61. /ddeutil_workflow-0.0.18/tests/test_job_py.py → /ddeutil_workflow-0.0.20/tests/test_job_exec_py.py +0 -0
  62. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_params.py +0 -0
  63. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_utils.py +0 -0
  64. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_utils_params.py +0 -0
  65. {ddeutil_workflow-0.0.18 → ddeutil_workflow-0.0.20}/tests/test_utils_tag.py +0 -0
  66. /ddeutil_workflow-0.0.18/tests/test_workflow_job_run.py → /ddeutil_workflow-0.0.20/tests/test_workflow_job_exec.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.18
3
+ Version: 0.0.20
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -24,9 +24,9 @@ Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
25
  Requires-Dist: ddeutil>=0.4.3
26
26
  Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
27
- Requires-Dist: pydantic==2.9.2
27
+ Requires-Dist: pydantic==2.10.2
28
28
  Requires-Dist: python-dotenv==1.0.1
29
- Requires-Dist: typer<1.0.0,==0.12.5
29
+ Requires-Dist: typer==0.15.1
30
30
  Requires-Dist: schedule<2.0.0,==1.2.2
31
31
  Provides-Extra: api
32
32
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
@@ -46,7 +46,7 @@ for easy to make a simple metadata driven for data workflow orchestration.
46
46
  It can to use for data operator by a `.yaml` template.
47
47
 
48
48
  > [!WARNING]
49
- > This package provide only orchestration workload task. That mean you should not
49
+ > This package provide only orchestration workload. That mean you should not
50
50
  > use the workflow stage to process any large volume data which use lot of compute
51
51
  > resource. :cold_sweat:
52
52
 
@@ -58,10 +58,10 @@ configuration. It called **Metadata Driven Data Workflow**.
58
58
 
59
59
  **:pushpin: <u>Rules of This Workflow engine</u>**:
60
60
 
61
- 1. Minimum frequency unit of scheduling is **1 minute** :warning:
61
+ 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
62
62
  2. Can not re-run only failed stage and its pending downstream :rotating_light:
63
63
  3. All parallel tasks inside workflow engine use Multi-Threading
64
- (Because Python 3.13 unlock GIL :unlock:)
64
+ (Python 3.13 unlock GIL :unlock:)
65
65
 
66
66
  > [!NOTE]
67
67
  > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
@@ -183,7 +183,7 @@ application. If any configuration values do not set yet, it will use default val
183
183
  and do not raise any error to you.
184
184
 
185
185
  | Environment | Component | Default | Description | Remark |
186
- |:----------------------------------------|-----------|----------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
186
+ |:----------------------------------------|:----------|:---------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
187
187
  | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
188
188
  | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage. | |
189
189
  | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template. | |
@@ -13,7 +13,7 @@ for easy to make a simple metadata driven for data workflow orchestration.
13
13
  It can to use for data operator by a `.yaml` template.
14
14
 
15
15
  > [!WARNING]
16
- > This package provide only orchestration workload task. That mean you should not
16
+ > This package provide only orchestration workload. That mean you should not
17
17
  > use the workflow stage to process any large volume data which use lot of compute
18
18
  > resource. :cold_sweat:
19
19
 
@@ -25,10 +25,10 @@ configuration. It called **Metadata Driven Data Workflow**.
25
25
 
26
26
  **:pushpin: <u>Rules of This Workflow engine</u>**:
27
27
 
28
- 1. Minimum frequency unit of scheduling is **1 minute** :warning:
28
+ 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
29
29
  2. Can not re-run only failed stage and its pending downstream :rotating_light:
30
30
  3. All parallel tasks inside workflow engine use Multi-Threading
31
- (Because Python 3.13 unlock GIL :unlock:)
31
+ (Python 3.13 unlock GIL :unlock:)
32
32
 
33
33
  > [!NOTE]
34
34
  > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
@@ -150,7 +150,7 @@ application. If any configuration values do not set yet, it will use default val
150
150
  and do not raise any error to you.
151
151
 
152
152
  | Environment | Component | Default | Description | Remark |
153
- |:----------------------------------------|-----------|----------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
153
+ |:----------------------------------------|:----------|:---------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
154
154
  | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
155
155
  | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage. | |
156
156
  | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template. | |
@@ -28,9 +28,9 @@ requires-python = ">=3.9.13"
28
28
  dependencies = [
29
29
  "ddeutil>=0.4.3",
30
30
  "ddeutil-io[yaml,toml]>=0.2.3",
31
- "pydantic==2.9.2",
31
+ "pydantic==2.10.2",
32
32
  "python-dotenv==1.0.1",
33
- "typer==0.12.5,<1.0.0",
33
+ "typer==0.15.1",
34
34
  "schedule==1.2.2,<2.0.0",
35
35
  ]
36
36
  dynamic = ["version"]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.20"
@@ -646,12 +646,27 @@ class CronJob:
646
646
 
647
647
  :param date: An initial date that want to mark as the start point.
648
648
  :param tz: A string timezone that want to change on runner.
649
+
649
650
  :rtype: CronRunner
650
651
  """
651
652
  return CronRunner(self, date, tz=tz)
652
653
 
653
654
 
654
655
  class CronJobYear(CronJob):
656
+ """The Cron Job Converter with Year extension object that generate datetime
657
+ dimension of cron job schedule format,
658
+
659
+ * * * * * * <command to execute>
660
+
661
+ (i) minute (0 - 59)
662
+ (ii) hour (0 - 23)
663
+ (iii) day of the month (1 - 31)
664
+ (iv) month (1 - 12)
665
+ (v) day of the week (0 - 6) (Sunday to Saturday; 7 is also Sunday
666
+ on some systems)
667
+ (vi) year (1990 - 2100)
668
+ """
669
+
655
670
  cron_length = 6
656
671
  cron_units = CRON_UNITS_YEAR
657
672
 
@@ -704,9 +719,17 @@ class CronRunner:
704
719
  else:
705
720
  self.date: datetime = datetime.now(tz=self.tz)
706
721
 
722
+ # NOTE: Add one second if the microsecond value more than 0.
723
+ if self.date.microsecond > 0:
724
+ self.date: datetime = self.date.replace(microsecond=0) + timedelta(
725
+ seconds=1
726
+ )
727
+
707
728
  # NOTE: Add one minute if the second value more than 0.
708
729
  if self.date.second > 0:
709
- self.date: datetime = self.date + timedelta(minutes=1)
730
+ self.date: datetime = self.date.replace(second=0) + timedelta(
731
+ minutes=1
732
+ )
710
733
 
711
734
  self.__start_date: datetime = self.date
712
735
  self.cron: CronJob | CronJobYear = cron
@@ -752,7 +775,7 @@ class CronRunner:
752
775
  not self.__shift_date(mode, reverse)
753
776
  for mode in ("year", "month", "day", "hour", "minute")
754
777
  ):
755
- return copy.deepcopy(self.date.replace(second=0, microsecond=0))
778
+ return copy.deepcopy(self.date)
756
779
 
757
780
  raise RecursionError("Unable to find execution time for schedule")
758
781
 
@@ -801,6 +824,10 @@ class CronRunner:
801
824
  # NOTE: Replace date that less than it mode to zero.
802
825
  self.date: datetime = replace_date(self.date, mode, reverse=reverse)
803
826
 
827
+ # NOTE: Replace second and microsecond values that change from
828
+ # the replace_date func with reverse flag.
829
+ self.date: datetime = self.date.replace(second=0, microsecond=0)
830
+
804
831
  if current_value != getattr(self.date, switch[mode]):
805
832
  return mode != "month"
806
833
 
@@ -15,7 +15,10 @@ from .exceptions import (
15
15
  UtilException,
16
16
  WorkflowException,
17
17
  )
18
- from .job import Job, Strategy
18
+ from .job import (
19
+ Job,
20
+ Strategy,
21
+ )
19
22
  from .on import (
20
23
  On,
21
24
  YearOn,
@@ -24,8 +27,6 @@ from .on import (
24
27
  from .scheduler import (
25
28
  Schedule,
26
29
  ScheduleWorkflow,
27
- Workflow,
28
- WorkflowTaskData,
29
30
  )
30
31
  from .stage import (
31
32
  BashStage,
@@ -34,7 +35,7 @@ from .stage import (
34
35
  PyStage,
35
36
  Stage,
36
37
  TriggerStage,
37
- handler_result,
38
+ extract_hook,
38
39
  )
39
40
  from .utils import (
40
41
  FILTERS,
@@ -70,3 +71,7 @@ from .utils import (
70
71
  str2template,
71
72
  tag,
72
73
  )
74
+ from .workflow import (
75
+ Workflow,
76
+ WorkflowTaskData,
77
+ )
@@ -33,6 +33,29 @@ load_dotenv()
33
33
  env = os.getenv
34
34
 
35
35
 
36
+ @lru_cache
37
+ def get_logger(name: str):
38
+ """Return logger object with an input module name.
39
+
40
+ :param name: A module name that want to log.
41
+ """
42
+ lg = logging.getLogger(name)
43
+ formatter = logging.Formatter(
44
+ fmt=(
45
+ "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
46
+ "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
47
+ "(%(filename)s:%(lineno)s)"
48
+ ),
49
+ datefmt="%Y-%m-%d %H:%M:%S",
50
+ )
51
+ stream = logging.StreamHandler()
52
+ stream.setFormatter(formatter)
53
+ lg.addHandler(stream)
54
+
55
+ lg.setLevel(logging.DEBUG if config.debug else logging.INFO)
56
+ return lg
57
+
58
+
36
59
  class Config:
37
60
  """Config object for keeping application configuration on current session
38
61
  without changing when if the application still running.
@@ -98,12 +121,14 @@ class Config:
98
121
  os.getenv("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")
99
122
  )
100
123
 
101
- def __init__(self):
124
+ def __init__(self) -> None:
125
+ # VALIDATE: the MAX_JOB_PARALLEL value should not less than 0.
102
126
  if self.max_job_parallel < 0:
103
127
  raise ValueError(
104
128
  f"``MAX_JOB_PARALLEL`` should more than 0 but got "
105
129
  f"{self.max_job_parallel}."
106
130
  )
131
+
107
132
  try:
108
133
  self.stop_boundary_delta: timedelta = timedelta(
109
134
  **json.loads(self.stop_boundary_delta_str)
@@ -195,6 +220,7 @@ class SimLoad:
195
220
  :param conf: A config object.
196
221
  :param include:
197
222
  :param exclude:
223
+
198
224
  :rtype: Iterator[tuple[str, DictData]]
199
225
  """
200
226
  exclude: list[str] = exclude or []
@@ -247,12 +273,14 @@ class Loader(SimLoad):
247
273
  include: list[str] | None = None,
248
274
  exclude: list[str] | None = None,
249
275
  **kwargs,
250
- ) -> DictData:
276
+ ) -> Iterator[tuple[str, DictData]]:
251
277
  """Override the find class method from the Simple Loader object.
252
278
 
253
279
  :param obj: A object that want to validate matching before return.
254
280
  :param include:
255
281
  :param exclude:
282
+
283
+ :rtype: Iterator[tuple[str, DictData]]
256
284
  """
257
285
  return super().finds(
258
286
  obj=obj, conf=Config(), include=include, exclude=exclude
@@ -268,6 +296,7 @@ def get_type(t: str, params: Config) -> AnyModelType:
268
296
  :param t: A importable type string.
269
297
  :param params: A config parameters that use registry to search this
270
298
  type.
299
+
271
300
  :rtype: AnyModelType
272
301
  """
273
302
  try:
@@ -283,29 +312,7 @@ def get_type(t: str, params: Config) -> AnyModelType:
283
312
 
284
313
 
285
314
  config = Config()
286
-
287
-
288
- @lru_cache
289
- def get_logger(name: str):
290
- """Return logger object with an input module name.
291
-
292
- :param name: A module name that want to log.
293
- """
294
- logger = logging.getLogger(name)
295
- formatter = logging.Formatter(
296
- fmt=(
297
- "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
298
- "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
299
- "(%(filename)s:%(lineno)s)"
300
- ),
301
- datefmt="%Y-%m-%d %H:%M:%S",
302
- )
303
- stream = logging.StreamHandler()
304
- stream.setFormatter(formatter)
305
- logger.addHandler(stream)
306
-
307
- logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
308
- return logger
315
+ logger = get_logger("ddeutil.workflow")
309
316
 
310
317
 
311
318
  class BaseLog(BaseModel, ABC):
@@ -315,8 +322,8 @@ class BaseLog(BaseModel, ABC):
315
322
  """
316
323
 
317
324
  name: str = Field(description="A workflow name.")
318
- on: str = Field(description="A cronjob string of this piepline schedule.")
319
325
  release: datetime = Field(description="A release datetime.")
326
+ type: str = Field(description="A running type before logging.")
320
327
  context: DictData = Field(
321
328
  default_factory=dict,
322
329
  description=(
@@ -366,6 +373,8 @@ class FileLog(BaseLog):
366
373
  workflow name.
367
374
 
368
375
  :param name: A workflow name that want to search release logging data.
376
+
377
+ :rtype: Iterator[Self]
369
378
  """
370
379
  pointer: Path = config.root_path / f"./logs/workflow={name}"
371
380
  if not pointer.exists():
@@ -387,6 +396,9 @@ class FileLog(BaseLog):
387
396
  workflow name and release values. If a release does not pass to an input
388
397
  argument, it will return the latest release from the current log path.
389
398
 
399
+ :param name:
400
+ :param release:
401
+
390
402
  :raise FileNotFoundError:
391
403
  :raise NotImplementedError:
392
404
 
@@ -411,21 +423,17 @@ class FileLog(BaseLog):
411
423
  return cls.model_validate(obj=json.load(f))
412
424
 
413
425
  @classmethod
414
- def is_pointed(
415
- cls,
416
- name: str,
417
- release: datetime,
418
- *,
419
- queue: list[datetime] | None = None,
420
- ) -> bool:
421
- """Check this log already point in the destination.
426
+ def is_pointed(cls, name: str, release: datetime) -> bool:
427
+ """Check the release log already pointed or created at the destination
428
+ log path.
422
429
 
423
430
  :param name: A workflow name.
424
431
  :param release: A release datetime.
425
- :param queue: A list of queue of datetime that already run in the
426
- future.
432
+
433
+ :rtype: bool
434
+ :return: Return False if the release log was not pointed or created.
427
435
  """
428
- # NOTE: Check environ variable was set for real writing.
436
+ # NOTE: Return False if enable writing log flag does not set.
429
437
  if not config.enable_write_log:
430
438
  return False
431
439
 
@@ -434,9 +442,7 @@ class FileLog(BaseLog):
434
442
  name=name, release=release
435
443
  )
436
444
 
437
- if not queue:
438
- return pointer.exists()
439
- return pointer.exists() or (release in queue)
445
+ return pointer.exists()
440
446
 
441
447
  def pointer(self) -> Path:
442
448
  """Return release directory path that was generated from model data.
@@ -459,6 +465,9 @@ class FileLog(BaseLog):
459
465
  if not config.enable_write_log:
460
466
  return self
461
467
 
468
+ logger.debug(
469
+ f"({self.run_id}) [LOG]: Start writing log: {self.name!r}."
470
+ )
462
471
  log_file: Path = self.pointer() / f"{self.run_id}.log"
463
472
  log_file.write_text(
464
473
  json.dumps(
@@ -3,6 +3,10 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ """Exception objects for this package do not do anything because I want to
7
+ create the lightweight workflow package. So, this module do just a exception
8
+ annotate for handle error only.
9
+ """
6
10
  from __future__ import annotations
7
11
 
8
12