ddeutil-workflow 0.0.30__tar.gz → 0.0.31__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.30/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.31}/PKG-INFO +10 -1
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/README.md +9 -0
- ddeutil_workflow-0.0.31/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/__cron.py +1 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/__init__.py +5 -1
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/conf.py +4 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/cron.py +77 -21
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/exceptions.py +3 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/params.py +18 -1
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/result.py +1 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/scheduler.py +90 -56
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/stage.py +13 -1
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/utils.py +36 -10
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/workflow.py +118 -87
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31/src/ddeutil_workflow.egg-info}/PKG-INFO +10 -1
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil_workflow.egg-info/SOURCES.txt +3 -2
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_cron_on.py +9 -0
- ddeutil_workflow-0.0.31/tests/test_release.py +62 -0
- ddeutil_workflow-0.0.31/tests/test_release_queue.py +66 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_schedule_control.py +6 -0
- ddeutil_workflow-0.0.30/tests/test_workflow_schedule.py → ddeutil_workflow-0.0.31/tests/test_schedule_workflow.py +25 -25
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_utils.py +15 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_workflow_exec_poke.py +14 -7
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_workflow_exec_release.py +19 -3
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_workflow_task.py +9 -0
- ddeutil_workflow-0.0.30/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.30/tests/test_release_and_queue.py +0 -75
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/LICENSE +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/pyproject.toml +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/api/api.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/api/repeat.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/api/route.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/hook.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/job.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil/workflow/templates.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_conf_log.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_hook_tag.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_job_exec_py.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_schedule.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_schedule_tasks.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_stage_handler_exec.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_templates.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_templates_filter.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.31}/tests/test_workflow_job_exec.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.31
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -192,6 +192,15 @@ The above workflow template is main executor pipeline that you want to do. If yo
|
|
192
192
|
want to schedule this workflow, you want to dynamic its parameters change base on
|
193
193
|
execution time such as `run-date` should change base on that workflow running date.
|
194
194
|
|
195
|
+
```python
|
196
|
+
from ddeutil.workflow import Workflow, Result
|
197
|
+
|
198
|
+
workflow: Workflow = Workflow.from_loader('run-py-local')
|
199
|
+
result: Result = workflow.execute(
|
200
|
+
params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
|
201
|
+
)
|
202
|
+
```
|
203
|
+
|
195
204
|
So, this package provide the `Schedule` template for this action.
|
196
205
|
|
197
206
|
```yaml
|
@@ -160,6 +160,15 @@ The above workflow template is main executor pipeline that you want to do. If yo
|
|
160
160
|
want to schedule this workflow, you want to dynamic its parameters change base on
|
161
161
|
execution time such as `run-date` should change base on that workflow running date.
|
162
162
|
|
163
|
+
```python
|
164
|
+
from ddeutil.workflow import Workflow, Result
|
165
|
+
|
166
|
+
workflow: Workflow = Workflow.from_loader('run-py-local')
|
167
|
+
result: Result = workflow.execute(
|
168
|
+
params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
|
169
|
+
)
|
170
|
+
```
|
171
|
+
|
163
172
|
So, this package provide the `Schedule` template for this action.
|
164
173
|
|
165
174
|
```yaml
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.31"
|
@@ -47,8 +47,10 @@ from .params import (
|
|
47
47
|
from .result import Result
|
48
48
|
from .scheduler import (
|
49
49
|
Schedule,
|
50
|
-
|
50
|
+
ScheduleWorkflow,
|
51
|
+
schedule_control,
|
51
52
|
schedule_runner,
|
53
|
+
schedule_task,
|
52
54
|
)
|
53
55
|
from .stage import (
|
54
56
|
BashStage,
|
@@ -83,6 +85,8 @@ from .utils import (
|
|
83
85
|
make_exec,
|
84
86
|
)
|
85
87
|
from .workflow import (
|
88
|
+
Release,
|
89
|
+
ReleaseQueue,
|
86
90
|
Workflow,
|
87
91
|
WorkflowTask,
|
88
92
|
)
|
@@ -582,6 +582,10 @@ Log = Union[
|
|
582
582
|
|
583
583
|
|
584
584
|
def get_log() -> type[Log]: # pragma: no cov
|
585
|
+
"""Get logging class that dynamic base on the config log path value.
|
586
|
+
|
587
|
+
:rtype: type[Log]
|
588
|
+
"""
|
585
589
|
if config.log_path.is_file():
|
586
590
|
return SQLiteLog
|
587
591
|
return FileLog
|
@@ -5,16 +5,17 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
+
from dataclasses import fields
|
8
9
|
from datetime import datetime
|
9
|
-
from typing import Annotated, Literal
|
10
|
+
from typing import Annotated, Literal, Union
|
10
11
|
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
11
12
|
|
12
|
-
from pydantic import BaseModel, ConfigDict, Field
|
13
|
+
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
|
13
14
|
from pydantic.functional_serializers import field_serializer
|
14
15
|
from pydantic.functional_validators import field_validator, model_validator
|
15
16
|
from typing_extensions import Self
|
16
17
|
|
17
|
-
from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner
|
18
|
+
from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner, Options
|
18
19
|
from .__types import DictData, DictStr, TupleStr
|
19
20
|
from .conf import Loader
|
20
21
|
|
@@ -47,6 +48,8 @@ def interval2crontab(
|
|
47
48
|
'0 0 1 * *'
|
48
49
|
>>> interval2crontab(interval='monthly', day='tuesday', time='12:00')
|
49
50
|
'12 0 1 * 2'
|
51
|
+
|
52
|
+
:rtype: str
|
50
53
|
"""
|
51
54
|
d: str = "*"
|
52
55
|
if interval == "weekly":
|
@@ -64,12 +67,19 @@ class On(BaseModel):
|
|
64
67
|
"""On Pydantic model (Warped crontab object by model).
|
65
68
|
|
66
69
|
See Also:
|
67
|
-
* ``generate()`` is the main
|
70
|
+
* ``generate()`` is the main use-case of this schedule object.
|
68
71
|
"""
|
69
72
|
|
70
73
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
71
74
|
|
72
75
|
# NOTE: This is fields of the base schedule.
|
76
|
+
extras: Annotated[
|
77
|
+
DictData,
|
78
|
+
Field(
|
79
|
+
default_factory=dict,
|
80
|
+
description="An extras mapping parameters",
|
81
|
+
),
|
82
|
+
]
|
73
83
|
cronjob: Annotated[
|
74
84
|
CronJob,
|
75
85
|
Field(description="Cron job of this schedule"),
|
@@ -81,13 +91,6 @@ class On(BaseModel):
|
|
81
91
|
alias="timezone",
|
82
92
|
),
|
83
93
|
] = "Etc/UTC"
|
84
|
-
extras: Annotated[
|
85
|
-
DictData,
|
86
|
-
Field(
|
87
|
-
default_factory=dict,
|
88
|
-
description="An extras mapping parameters",
|
89
|
-
),
|
90
|
-
]
|
91
94
|
|
92
95
|
@classmethod
|
93
96
|
def from_value(cls, value: DictStr, externals: DictData) -> Self:
|
@@ -153,6 +156,7 @@ class On(BaseModel):
|
|
153
156
|
|
154
157
|
@model_validator(mode="before")
|
155
158
|
def __prepare_values(cls, values: DictData) -> DictData:
|
159
|
+
"""Extract tz key from value and change name to timezone key."""
|
156
160
|
if tz := values.pop("tz", None):
|
157
161
|
values["timezone"] = tz
|
158
162
|
return values
|
@@ -160,24 +164,55 @@ class On(BaseModel):
|
|
160
164
|
@field_validator("tz")
|
161
165
|
def __validate_tz(cls, value: str) -> str:
|
162
166
|
"""Validate timezone value that able to initialize with ZoneInfo after
|
163
|
-
it passing to this model in before mode.
|
167
|
+
it passing to this model in before mode.
|
168
|
+
|
169
|
+
:rtype: str
|
170
|
+
"""
|
164
171
|
try:
|
165
172
|
_ = ZoneInfo(value)
|
166
173
|
return value
|
167
174
|
except ZoneInfoNotFoundError as err:
|
168
175
|
raise ValueError(f"Invalid timezone: {value}") from err
|
169
176
|
|
170
|
-
@field_validator(
|
171
|
-
|
172
|
-
|
173
|
-
|
177
|
+
@field_validator(
|
178
|
+
"cronjob", mode="before", json_schema_input_type=Union[CronJob, str]
|
179
|
+
)
|
180
|
+
def __prepare_cronjob(
|
181
|
+
cls, value: str | CronJob, info: ValidationInfo
|
182
|
+
) -> CronJob:
|
183
|
+
"""Prepare crontab value that able to receive with string type.
|
184
|
+
This step will get options kwargs from extras and pass to the
|
185
|
+
CronJob object.
|
186
|
+
|
187
|
+
:rtype: CronJob
|
188
|
+
"""
|
189
|
+
extras: DictData = info.data.get("extras", {})
|
190
|
+
return (
|
191
|
+
CronJob(
|
192
|
+
value,
|
193
|
+
option={
|
194
|
+
name: extras[name]
|
195
|
+
for name in (f.name for f in fields(Options))
|
196
|
+
if name in extras
|
197
|
+
},
|
198
|
+
)
|
199
|
+
if isinstance(value, str)
|
200
|
+
else value
|
201
|
+
)
|
174
202
|
|
175
203
|
@field_serializer("cronjob")
|
176
204
|
def __serialize_cronjob(self, value: CronJob) -> str:
|
205
|
+
"""Serialize the cronjob field that store with CronJob object.
|
206
|
+
|
207
|
+
:rtype: str
|
208
|
+
"""
|
177
209
|
return str(value)
|
178
210
|
|
179
211
|
def generate(self, start: str | datetime) -> CronRunner:
|
180
|
-
"""Return Cron runner object.
|
212
|
+
"""Return Cron runner object.
|
213
|
+
|
214
|
+
:rtype: CronRunner
|
215
|
+
"""
|
181
216
|
if isinstance(start, str):
|
182
217
|
start: datetime = datetime.fromisoformat(start)
|
183
218
|
elif not isinstance(start, datetime):
|
@@ -187,6 +222,8 @@ class On(BaseModel):
|
|
187
222
|
def next(self, start: str | datetime) -> CronRunner:
|
188
223
|
"""Return a next datetime from Cron runner object that start with any
|
189
224
|
date that given from input.
|
225
|
+
|
226
|
+
:rtype: CronRunner
|
190
227
|
"""
|
191
228
|
runner: CronRunner = self.generate(start=start)
|
192
229
|
|
@@ -209,7 +246,26 @@ class YearOn(On):
|
|
209
246
|
Field(description="Cron job of this schedule"),
|
210
247
|
]
|
211
248
|
|
212
|
-
@field_validator(
|
213
|
-
|
214
|
-
|
215
|
-
|
249
|
+
@field_validator(
|
250
|
+
"cronjob", mode="before", json_schema_input_type=Union[CronJob, str]
|
251
|
+
)
|
252
|
+
def __prepare_cronjob(
|
253
|
+
cls, value: str | CronJobYear, info: ValidationInfo
|
254
|
+
) -> CronJobYear:
|
255
|
+
"""Prepare crontab value that able to receive with string type.
|
256
|
+
|
257
|
+
:rtype: CronJobYear
|
258
|
+
"""
|
259
|
+
extras: DictData = info.data.get("extras", {})
|
260
|
+
return (
|
261
|
+
CronJobYear(
|
262
|
+
value,
|
263
|
+
option={
|
264
|
+
name: extras[name]
|
265
|
+
for name in (f.name for f in fields(Options))
|
266
|
+
if name in extras
|
267
|
+
},
|
268
|
+
)
|
269
|
+
if isinstance(value, str)
|
270
|
+
else value
|
271
|
+
)
|
@@ -5,6 +5,7 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
+
import decimal
|
8
9
|
import logging
|
9
10
|
from abc import ABC, abstractmethod
|
10
11
|
from datetime import date, datetime
|
@@ -49,7 +50,7 @@ class BaseParam(BaseModel, ABC):
|
|
49
50
|
|
50
51
|
class DefaultParam(BaseParam):
|
51
52
|
"""Default Parameter that will check default if it required. This model do
|
52
|
-
not implement the receive method.
|
53
|
+
not implement the `receive` method.
|
53
54
|
"""
|
54
55
|
|
55
56
|
required: bool = Field(
|
@@ -68,6 +69,15 @@ class DefaultParam(BaseParam):
|
|
68
69
|
)
|
69
70
|
|
70
71
|
|
72
|
+
# TODO: Not implement this parameter yet
|
73
|
+
class DateParam(DefaultParam):
|
74
|
+
"""Date parameter."""
|
75
|
+
|
76
|
+
type: Literal["date"] = "date"
|
77
|
+
|
78
|
+
def receive(self, value: Optional[str | date] = None) -> date: ...
|
79
|
+
|
80
|
+
|
71
81
|
class DatetimeParam(DefaultParam):
|
72
82
|
"""Datetime parameter."""
|
73
83
|
|
@@ -145,6 +155,13 @@ class IntParam(DefaultParam):
|
|
145
155
|
return value
|
146
156
|
|
147
157
|
|
158
|
+
# TODO: Not implement this parameter yet
|
159
|
+
class DecimalParam(DefaultParam):
|
160
|
+
type: Literal["decimal"] = "decimal"
|
161
|
+
|
162
|
+
def receive(self, value: float | None = None) -> decimal.Decimal: ...
|
163
|
+
|
164
|
+
|
148
165
|
class ChoiceParam(BaseParam):
|
149
166
|
"""Choice parameter."""
|
150
167
|
|
@@ -94,6 +94,7 @@ class Result:
|
|
94
94
|
# NOTE: Check the context has jobs key.
|
95
95
|
if "jobs" not in self.__dict__["context"]:
|
96
96
|
self.__dict__["context"]["jobs"] = {}
|
97
|
+
|
97
98
|
self.__dict__["context"]["jobs"].update(result.context)
|
98
99
|
|
99
100
|
# NOTE: Update running ID from an incoming result.
|
@@ -33,7 +33,7 @@ from functools import wraps
|
|
33
33
|
from heapq import heappop, heappush
|
34
34
|
from textwrap import dedent
|
35
35
|
from threading import Thread
|
36
|
-
from typing import Callable, Optional, TypedDict
|
36
|
+
from typing import Callable, Optional, TypedDict, Union
|
37
37
|
|
38
38
|
from pydantic import BaseModel, Field
|
39
39
|
from pydantic.functional_validators import field_validator, model_validator
|
@@ -41,7 +41,7 @@ from typing_extensions import Self
|
|
41
41
|
|
42
42
|
try:
|
43
43
|
from typing import ParamSpec
|
44
|
-
except ImportError:
|
44
|
+
except ImportError: # pragma: no cov
|
45
45
|
from typing_extensions import ParamSpec
|
46
46
|
|
47
47
|
try:
|
@@ -53,11 +53,9 @@ from .__cron import CronRunner
|
|
53
53
|
from .__types import DictData, TupleStr
|
54
54
|
from .conf import Loader, Log, config, get_log, get_logger
|
55
55
|
from .cron import On
|
56
|
-
from .exceptions import WorkflowException
|
57
|
-
from .
|
58
|
-
|
59
|
-
delay,
|
60
|
-
)
|
56
|
+
from .exceptions import ScheduleException, WorkflowException
|
57
|
+
from .result import Result
|
58
|
+
from .utils import batch, delay
|
61
59
|
from .workflow import Release, ReleaseQueue, Workflow, WorkflowTask
|
62
60
|
|
63
61
|
P = ParamSpec("P")
|
@@ -69,7 +67,7 @@ logging.getLogger("schedule").setLevel(logging.INFO)
|
|
69
67
|
|
70
68
|
__all__: TupleStr = (
|
71
69
|
"Schedule",
|
72
|
-
"
|
70
|
+
"ScheduleWorkflow",
|
73
71
|
"schedule_task",
|
74
72
|
"monitor",
|
75
73
|
"schedule_control",
|
@@ -79,8 +77,8 @@ __all__: TupleStr = (
|
|
79
77
|
)
|
80
78
|
|
81
79
|
|
82
|
-
class
|
83
|
-
"""Workflow
|
80
|
+
class ScheduleWorkflow(BaseModel):
|
81
|
+
"""Schedule Workflow Pydantic model that use to keep workflow model for
|
84
82
|
the Schedule model. it should not use Workflow model directly because on the
|
85
83
|
schedule config it can adjust crontab value that different from the Workflow
|
86
84
|
model.
|
@@ -233,9 +231,9 @@ class Schedule(BaseModel):
|
|
233
231
|
"A schedule description that can be string of markdown content."
|
234
232
|
),
|
235
233
|
)
|
236
|
-
workflows: list[
|
234
|
+
workflows: list[ScheduleWorkflow] = Field(
|
237
235
|
default_factory=list,
|
238
|
-
description="A list of
|
236
|
+
description="A list of ScheduleWorkflow models.",
|
239
237
|
)
|
240
238
|
|
241
239
|
@field_validator("desc", mode="after")
|
@@ -258,7 +256,7 @@ class Schedule(BaseModel):
|
|
258
256
|
an input schedule name. The loader object will use this schedule name to
|
259
257
|
searching configuration data of this schedule model in conf path.
|
260
258
|
|
261
|
-
:param name: A schedule name that want to pass to Loader object.
|
259
|
+
:param name: (str) A schedule name that want to pass to Loader object.
|
262
260
|
:param externals: An external parameters that want to pass to Loader
|
263
261
|
object.
|
264
262
|
|
@@ -277,6 +275,37 @@ class Schedule(BaseModel):
|
|
277
275
|
|
278
276
|
return cls.model_validate(obj=loader_data)
|
279
277
|
|
278
|
+
@classmethod
|
279
|
+
def extract_tasks(
|
280
|
+
cls,
|
281
|
+
schedules: list[str],
|
282
|
+
start_date: datetime,
|
283
|
+
queue: dict[str, ReleaseQueue],
|
284
|
+
externals: DictData | None = None,
|
285
|
+
) -> list[WorkflowTask]:
|
286
|
+
"""Return the list of WorkflowTask object from all schedule object that
|
287
|
+
include in an input schedules argument.
|
288
|
+
|
289
|
+
:param schedules: A list of schedule name that will use `from_loader`
|
290
|
+
method.
|
291
|
+
:param start_date: A start date that get from the workflow schedule.
|
292
|
+
:param queue: A mapping of name and list of datetime for queue.
|
293
|
+
:param externals: An external parameters that pass to the Loader object.
|
294
|
+
|
295
|
+
:rtype: list[WorkflowTask]
|
296
|
+
"""
|
297
|
+
tasks: list[WorkflowTask] = []
|
298
|
+
for name in schedules:
|
299
|
+
schedule: Schedule = Schedule.from_loader(name, externals=externals)
|
300
|
+
tasks.extend(
|
301
|
+
schedule.tasks(
|
302
|
+
start_date,
|
303
|
+
queue=queue,
|
304
|
+
externals=externals,
|
305
|
+
),
|
306
|
+
)
|
307
|
+
return tasks
|
308
|
+
|
280
309
|
def tasks(
|
281
310
|
self,
|
282
311
|
start_date: datetime,
|
@@ -311,7 +340,8 @@ class Schedule(BaseModel):
|
|
311
340
|
return workflow_tasks
|
312
341
|
|
313
342
|
|
314
|
-
|
343
|
+
ResultOrCancelJob = Union[type[CancelJob], Result]
|
344
|
+
ReturnCancelJob = Callable[P, ResultOrCancelJob]
|
315
345
|
DecoratorCancelJob = Callable[[ReturnCancelJob], ReturnCancelJob]
|
316
346
|
|
317
347
|
|
@@ -326,24 +356,25 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
|
|
326
356
|
"""
|
327
357
|
|
328
358
|
def decorator(func: ReturnCancelJob) -> ReturnCancelJob: # pragma: no cov
|
329
|
-
try:
|
330
359
|
|
331
|
-
|
332
|
-
|
360
|
+
@wraps(func)
|
361
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> ResultOrCancelJob:
|
362
|
+
try:
|
333
363
|
return func(*args, **kwargs)
|
364
|
+
except Exception as err:
|
365
|
+
logger.exception(err)
|
366
|
+
if cancel_on_failure:
|
367
|
+
return CancelJob
|
368
|
+
raise err
|
334
369
|
|
335
|
-
|
336
|
-
|
337
|
-
except Exception as err:
|
338
|
-
logger.exception(err)
|
339
|
-
if cancel_on_failure:
|
340
|
-
return CancelJob
|
341
|
-
raise err
|
370
|
+
return wrapper
|
342
371
|
|
343
372
|
return decorator
|
344
373
|
|
345
374
|
|
346
375
|
class ReleaseThread(TypedDict):
|
376
|
+
"""TypeDict for the release thread."""
|
377
|
+
|
347
378
|
thread: Thread
|
348
379
|
start_date: datetime
|
349
380
|
|
@@ -358,11 +389,13 @@ def schedule_task(
|
|
358
389
|
queue: dict[str, ReleaseQueue],
|
359
390
|
threads: ReleaseThreads,
|
360
391
|
log: type[Log],
|
361
|
-
) -> CancelJob | None:
|
362
|
-
"""
|
363
|
-
|
392
|
+
) -> type[CancelJob] | None:
|
393
|
+
"""Schedule task function that generate thread of workflow task release
|
394
|
+
method in background. This function do the same logic as the workflow poke
|
395
|
+
method, but it runs with map of schedules and the on values.
|
364
396
|
|
365
|
-
This
|
397
|
+
This schedule task start runs every minute at ':02' second and it does
|
398
|
+
not allow you to run with offset time.
|
366
399
|
|
367
400
|
:param tasks: A list of WorkflowTask object.
|
368
401
|
:param stop: A stop datetime object that force stop running scheduler.
|
@@ -370,7 +403,7 @@ def schedule_task(
|
|
370
403
|
:param threads: A mapping of alias name and Thread object.
|
371
404
|
:param log: A log class that want to make log object.
|
372
405
|
|
373
|
-
:rtype: CancelJob | None
|
406
|
+
:rtype: type[CancelJob] | None
|
374
407
|
"""
|
375
408
|
current_date: datetime = datetime.now(tz=config.tz)
|
376
409
|
if current_date > stop.replace(tzinfo=config.tz):
|
@@ -410,12 +443,17 @@ def schedule_task(
|
|
410
443
|
current_release: datetime = current_date.replace(
|
411
444
|
second=0, microsecond=0
|
412
445
|
)
|
413
|
-
if (first_date := q.first_queue.date)
|
446
|
+
if (first_date := q.first_queue.date) > current_release:
|
414
447
|
logger.debug(
|
415
448
|
f"[WORKFLOW]: Skip schedule "
|
416
449
|
f"{first_date:%Y-%m-%d %H:%M:%S} for : {task.alias!r}"
|
417
450
|
)
|
418
451
|
continue
|
452
|
+
elif first_date < current_release: # pragma: no cov
|
453
|
+
raise ScheduleException(
|
454
|
+
"The first release date from queue should not less than current"
|
455
|
+
"release date."
|
456
|
+
)
|
419
457
|
|
420
458
|
# NOTE: Pop the latest release and push it to running.
|
421
459
|
release: Release = heappop(q.queue)
|
@@ -445,7 +483,7 @@ def schedule_task(
|
|
445
483
|
|
446
484
|
delay()
|
447
485
|
|
448
|
-
logger.debug(f"[SCHEDULE]: End schedule
|
486
|
+
logger.debug(f"[SCHEDULE]: End schedule task {'=' * 80}")
|
449
487
|
|
450
488
|
|
451
489
|
def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
|
@@ -455,9 +493,7 @@ def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
|
|
455
493
|
:param threads: A mapping of Thread object and its name.
|
456
494
|
:type threads: ReleaseThreads
|
457
495
|
"""
|
458
|
-
logger.debug(
|
459
|
-
"[MONITOR]: Start checking long running workflow release task."
|
460
|
-
)
|
496
|
+
logger.debug("[MONITOR]: Start checking long running schedule task.")
|
461
497
|
|
462
498
|
snapshot_threads: list[str] = list(threads.keys())
|
463
499
|
for t_name in snapshot_threads:
|
@@ -476,12 +512,15 @@ def schedule_control(
|
|
476
512
|
*,
|
477
513
|
log: type[Log] | None = None,
|
478
514
|
) -> list[str]: # pragma: no cov
|
479
|
-
"""Scheduler control function that
|
515
|
+
"""Scheduler control function that run the chuck of schedules every minute
|
516
|
+
and this function release monitoring thread for tracking undead thread in
|
517
|
+
the background.
|
480
518
|
|
481
519
|
:param schedules: A list of workflow names that want to schedule running.
|
482
520
|
:param stop: A datetime value that use to stop running schedule.
|
483
521
|
:param externals: An external parameters that pass to Loader.
|
484
|
-
:param log:
|
522
|
+
:param log: A log class that use on the workflow task release for writing
|
523
|
+
its release log context.
|
485
524
|
|
486
525
|
:rtype: list[str]
|
487
526
|
"""
|
@@ -493,8 +532,11 @@ def schedule_control(
|
|
493
532
|
"Should install schedule package before use this module."
|
494
533
|
) from None
|
495
534
|
|
535
|
+
# NOTE: Get default logging.
|
496
536
|
log: type[Log] = log or get_log()
|
497
537
|
scheduler: Scheduler = Scheduler()
|
538
|
+
|
539
|
+
# NOTE: Create the start and stop datetime.
|
498
540
|
start_date: datetime = datetime.now(tz=config.tz)
|
499
541
|
stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
|
500
542
|
|
@@ -506,25 +548,15 @@ def schedule_control(
|
|
506
548
|
second=0, microsecond=0
|
507
549
|
) + timedelta(minutes=1)
|
508
550
|
|
509
|
-
# NOTE: Start create workflow tasks from list of schedule name.
|
510
|
-
tasks: list[WorkflowTask] = []
|
511
|
-
for name in schedules:
|
512
|
-
schedule: Schedule = Schedule.from_loader(name, externals=externals)
|
513
|
-
tasks.extend(
|
514
|
-
schedule.tasks(
|
515
|
-
start_date_waiting,
|
516
|
-
queue=queue,
|
517
|
-
externals=externals,
|
518
|
-
),
|
519
|
-
)
|
520
|
-
|
521
551
|
# NOTE: This schedule job will start every minute at :02 seconds.
|
522
552
|
(
|
523
553
|
scheduler.every(1)
|
524
554
|
.minutes.at(":02")
|
525
555
|
.do(
|
526
556
|
schedule_task,
|
527
|
-
tasks=
|
557
|
+
tasks=Schedule.extract_tasks(
|
558
|
+
schedules, start_date_waiting, queue, externals=externals
|
559
|
+
),
|
528
560
|
stop=stop_date,
|
529
561
|
queue=queue,
|
530
562
|
threads=threads,
|
@@ -533,7 +565,8 @@ def schedule_control(
|
|
533
565
|
.tag("control")
|
534
566
|
)
|
535
567
|
|
536
|
-
# NOTE: Checking zombie task with schedule job will start every 5 minute
|
568
|
+
# NOTE: Checking zombie task with schedule job will start every 5 minute at
|
569
|
+
# :10 seconds.
|
537
570
|
(
|
538
571
|
scheduler.every(5)
|
539
572
|
.minutes.at(":10")
|
@@ -579,16 +612,15 @@ def schedule_runner(
|
|
579
612
|
externals: DictData | None = None,
|
580
613
|
excluded: list[str] | None = None,
|
581
614
|
) -> list[str]: # pragma: no cov
|
582
|
-
"""Schedule runner function
|
583
|
-
|
584
|
-
|
615
|
+
"""Schedule runner function it the multiprocess controller function for
|
616
|
+
split the setting schedule to the `schedule_control` function on the
|
617
|
+
process pool. It chunks schedule configs that exists in config
|
618
|
+
path by `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` value.
|
585
619
|
|
586
620
|
:param stop: A stop datetime object that force stop running scheduler.
|
587
621
|
:param externals:
|
588
622
|
:param excluded: A list of schedule name that want to exclude from finding.
|
589
623
|
|
590
|
-
:rtype: list[str]
|
591
|
-
|
592
624
|
This function will get all workflows that include on value that was
|
593
625
|
created in config path and chuck it with application config variable
|
594
626
|
``WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS`` env var to multiprocess executor
|
@@ -600,7 +632,9 @@ def schedule_runner(
|
|
600
632
|
--> thread of release task 01 02
|
601
633
|
==> schedule --> thread of release task 02 01
|
602
634
|
--> thread of release task 02 02
|
603
|
-
==> process 02
|
635
|
+
==> process 02 ==> ...
|
636
|
+
|
637
|
+
:rtype: list[str]
|
604
638
|
"""
|
605
639
|
results: list[str] = []
|
606
640
|
|
@@ -328,7 +328,7 @@ class BashStage(BaseStage):
|
|
328
328
|
If your current OS is Windows, it will run on the bash in the WSL.
|
329
329
|
|
330
330
|
I get some limitation when I run shell statement with the built-in
|
331
|
-
|
331
|
+
subprocess package. It does not good enough to use multiline statement.
|
332
332
|
Thus, I add writing ``.sh`` file before execution process for fix this
|
333
333
|
issue.
|
334
334
|
|
@@ -665,3 +665,15 @@ Stage = Union[
|
|
665
665
|
TriggerStage,
|
666
666
|
EmptyStage,
|
667
667
|
]
|
668
|
+
|
669
|
+
|
670
|
+
# TODO: Not implement this stages yet
|
671
|
+
class ParallelStage(BaseModel):
|
672
|
+
parallel: list[Stage]
|
673
|
+
max_parallel_core: int = Field(default=2)
|
674
|
+
|
675
|
+
|
676
|
+
# TODO: Not implement this stages yet
|
677
|
+
class ForEachStage(BaseModel):
|
678
|
+
foreach: list[str]
|
679
|
+
stages: list[Stage]
|