ddeutil-workflow 0.0.30__tar.gz → 0.0.32__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.30/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.32}/PKG-INFO +27 -3
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/README.md +26 -2
- ddeutil_workflow-0.0.32/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/__cron.py +1 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/__init__.py +5 -1
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/api/api.py +4 -6
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/api/route.py +4 -4
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/conf.py +4 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/cron.py +77 -21
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/exceptions.py +3 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/job.py +2 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/params.py +21 -1
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/result.py +1 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/scheduler.py +156 -50
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/stage.py +13 -1
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/templates.py +13 -4
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/utils.py +36 -10
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/workflow.py +118 -87
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32/src/ddeutil_workflow.egg-info}/PKG-INFO +27 -3
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/SOURCES.txt +4 -2
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_cron_on.py +9 -0
- ddeutil_workflow-0.0.32/tests/test_release.py +62 -0
- ddeutil_workflow-0.0.32/tests/test_release_queue.py +66 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_schedule_control.py +6 -0
- ddeutil_workflow-0.0.32/tests/test_schedule_pending.py +13 -0
- ddeutil_workflow-0.0.30/tests/test_workflow_schedule.py → ddeutil_workflow-0.0.32/tests/test_schedule_workflow.py +25 -25
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_utils.py +15 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_workflow_exec_poke.py +14 -7
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_workflow_exec_release.py +19 -3
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_workflow_task.py +9 -0
- ddeutil_workflow-0.0.30/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.30/tests/test_release_and_queue.py +0 -75
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/LICENSE +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/pyproject.toml +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/api/repeat.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/hook.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_conf_log.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_hook_tag.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_job_exec_py.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_schedule.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_schedule_tasks.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_stage_handler_exec.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_templates.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_templates_filter.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.30 → ddeutil_workflow-0.0.32}/tests/test_workflow_job_exec.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.32
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -110,7 +110,7 @@ flowchart LR
|
|
110
110
|
E -.->|read| G
|
111
111
|
```
|
112
112
|
|
113
|
-
> [!
|
113
|
+
> [!WARNING]
|
114
114
|
> _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
|
115
115
|
> with `.yml` files and all configs file from several data orchestration framework
|
116
116
|
> tools from my experience on Data Engineer. :grimacing:
|
@@ -192,7 +192,17 @@ The above workflow template is main executor pipeline that you want to do. If yo
|
|
192
192
|
want to schedule this workflow, you want to dynamic its parameters change base on
|
193
193
|
execution time such as `run-date` should change base on that workflow running date.
|
194
194
|
|
195
|
-
|
195
|
+
```python
|
196
|
+
from ddeutil.workflow import Workflow, Result
|
197
|
+
|
198
|
+
workflow: Workflow = Workflow.from_loader('run-py-local')
|
199
|
+
result: Result = workflow.execute(
|
200
|
+
params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
|
201
|
+
)
|
202
|
+
```
|
203
|
+
|
204
|
+
So, this package provide the `Schedule` template for this action, and you can dynamic
|
205
|
+
pass the parameters for changing align with that running time by the `release` prefix.
|
196
206
|
|
197
207
|
```yaml
|
198
208
|
schedule-run-local-wf:
|
@@ -210,6 +220,20 @@ schedule-run-local-wf:
|
|
210
220
|
asat-dt: "${{ release.logical_date }}"
|
211
221
|
```
|
212
222
|
|
223
|
+
The main method of the `Schedule` model that use to running is `pending`. If you
|
224
|
+
do not pass the `stop` date on this method, it will use config with `WORKFLOW_APP_STOP_BOUNDARY_DELTA`
|
225
|
+
key for generate this stop date.
|
226
|
+
|
227
|
+
```python
|
228
|
+
from ddeutil.workflow import Schedule
|
229
|
+
|
230
|
+
(
|
231
|
+
Schedule
|
232
|
+
.from_loader("schedule-run-local-wf")
|
233
|
+
.pending(stop=None)
|
234
|
+
)
|
235
|
+
```
|
236
|
+
|
213
237
|
## :cookie: Configuration
|
214
238
|
|
215
239
|
The main configuration that use to dynamic changing this workflow engine for your
|
@@ -78,7 +78,7 @@ flowchart LR
|
|
78
78
|
E -.->|read| G
|
79
79
|
```
|
80
80
|
|
81
|
-
> [!
|
81
|
+
> [!WARNING]
|
82
82
|
> _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
|
83
83
|
> with `.yml` files and all configs file from several data orchestration framework
|
84
84
|
> tools from my experience on Data Engineer. :grimacing:
|
@@ -160,7 +160,17 @@ The above workflow template is main executor pipeline that you want to do. If yo
|
|
160
160
|
want to schedule this workflow, you want to dynamic its parameters change base on
|
161
161
|
execution time such as `run-date` should change base on that workflow running date.
|
162
162
|
|
163
|
-
|
163
|
+
```python
|
164
|
+
from ddeutil.workflow import Workflow, Result
|
165
|
+
|
166
|
+
workflow: Workflow = Workflow.from_loader('run-py-local')
|
167
|
+
result: Result = workflow.execute(
|
168
|
+
params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
|
169
|
+
)
|
170
|
+
```
|
171
|
+
|
172
|
+
So, this package provide the `Schedule` template for this action, and you can dynamic
|
173
|
+
pass the parameters for changing align with that running time by the `release` prefix.
|
164
174
|
|
165
175
|
```yaml
|
166
176
|
schedule-run-local-wf:
|
@@ -178,6 +188,20 @@ schedule-run-local-wf:
|
|
178
188
|
asat-dt: "${{ release.logical_date }}"
|
179
189
|
```
|
180
190
|
|
191
|
+
The main method of the `Schedule` model that use to running is `pending`. If you
|
192
|
+
do not pass the `stop` date on this method, it will use config with `WORKFLOW_APP_STOP_BOUNDARY_DELTA`
|
193
|
+
key for generate this stop date.
|
194
|
+
|
195
|
+
```python
|
196
|
+
from ddeutil.workflow import Schedule
|
197
|
+
|
198
|
+
(
|
199
|
+
Schedule
|
200
|
+
.from_loader("schedule-run-local-wf")
|
201
|
+
.pending(stop=None)
|
202
|
+
)
|
203
|
+
```
|
204
|
+
|
181
205
|
## :cookie: Configuration
|
182
206
|
|
183
207
|
The main configuration that use to dynamic changing this workflow engine for your
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.32"
|
@@ -47,8 +47,10 @@ from .params import (
|
|
47
47
|
from .result import Result
|
48
48
|
from .scheduler import (
|
49
49
|
Schedule,
|
50
|
-
|
50
|
+
ScheduleWorkflow,
|
51
|
+
schedule_control,
|
51
52
|
schedule_runner,
|
53
|
+
schedule_task,
|
52
54
|
)
|
53
55
|
from .stage import (
|
54
56
|
BashStage,
|
@@ -83,6 +85,8 @@ from .utils import (
|
|
83
85
|
make_exec,
|
84
86
|
)
|
85
87
|
from .workflow import (
|
88
|
+
Release,
|
89
|
+
ReleaseQueue,
|
86
90
|
Workflow,
|
87
91
|
WorkflowTask,
|
88
92
|
)
|
@@ -43,9 +43,7 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
|
|
43
43
|
a.state.workflow_queue = {}
|
44
44
|
|
45
45
|
yield {
|
46
|
-
|
47
|
-
"upper_result": a.state.upper_result,
|
48
|
-
# NOTE: Scheduler value should be contain a key of workflow workflow and
|
46
|
+
# NOTE: Scheduler value should be contained a key of workflow and
|
49
47
|
# list of datetime of queue and running.
|
50
48
|
#
|
51
49
|
# ... {
|
@@ -88,7 +86,7 @@ if config.enable_route_workflow:
|
|
88
86
|
|
89
87
|
# NOTE: Enable the schedule route.
|
90
88
|
if config.enable_route_schedule:
|
91
|
-
from ..conf import
|
89
|
+
from ..conf import get_log
|
92
90
|
from ..scheduler import schedule_task
|
93
91
|
from .route import schedule_route
|
94
92
|
|
@@ -108,11 +106,11 @@ if config.enable_route_schedule:
|
|
108
106
|
stop=datetime.now(config.tz) + timedelta(minutes=1),
|
109
107
|
queue=app.state.workflow_queue,
|
110
108
|
threads=app.state.workflow_threads,
|
111
|
-
log=
|
109
|
+
log=get_log(),
|
112
110
|
)
|
113
111
|
|
114
112
|
@schedule_route.on_event("startup")
|
115
|
-
@repeat_at(cron="*/5 * * * *")
|
113
|
+
@repeat_at(cron="*/5 * * * *", delay=10)
|
116
114
|
def monitoring():
|
117
115
|
logger.debug("[MONITOR]: Start monitoring threading.")
|
118
116
|
snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
|
@@ -169,7 +169,7 @@ async def get_schedules(name: str):
|
|
169
169
|
)
|
170
170
|
|
171
171
|
|
172
|
-
@schedule_route.get(path="/deploy")
|
172
|
+
@schedule_route.get(path="/deploy/")
|
173
173
|
async def get_deploy_schedulers(request: Request):
|
174
174
|
snapshot = copy.deepcopy(request.state.scheduler)
|
175
175
|
return {"schedule": snapshot}
|
@@ -178,9 +178,9 @@ async def get_deploy_schedulers(request: Request):
|
|
178
178
|
@schedule_route.get(path="/deploy/{name}")
|
179
179
|
async def get_deploy_scheduler(request: Request, name: str):
|
180
180
|
if name in request.state.scheduler:
|
181
|
-
|
181
|
+
schedule = Schedule.from_loader(name)
|
182
182
|
getter: list[dict[str, dict[str, list[datetime]]]] = []
|
183
|
-
for workflow in
|
183
|
+
for workflow in schedule.workflows:
|
184
184
|
getter.append(
|
185
185
|
{
|
186
186
|
workflow.name: {
|
@@ -219,7 +219,7 @@ async def add_deploy_scheduler(request: Request, name: str):
|
|
219
219
|
second=0, microsecond=0
|
220
220
|
)
|
221
221
|
|
222
|
-
# NOTE: Create pair of workflow and on from schedule model.
|
222
|
+
# NOTE: Create a pair of workflow and on from schedule model.
|
223
223
|
try:
|
224
224
|
schedule: Schedule = Schedule.from_loader(name)
|
225
225
|
except ValueError as err:
|
@@ -582,6 +582,10 @@ Log = Union[
|
|
582
582
|
|
583
583
|
|
584
584
|
def get_log() -> type[Log]: # pragma: no cov
|
585
|
+
"""Get logging class that dynamic base on the config log path value.
|
586
|
+
|
587
|
+
:rtype: type[Log]
|
588
|
+
"""
|
585
589
|
if config.log_path.is_file():
|
586
590
|
return SQLiteLog
|
587
591
|
return FileLog
|
@@ -5,16 +5,17 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
+
from dataclasses import fields
|
8
9
|
from datetime import datetime
|
9
|
-
from typing import Annotated, Literal
|
10
|
+
from typing import Annotated, Literal, Union
|
10
11
|
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
11
12
|
|
12
|
-
from pydantic import BaseModel, ConfigDict, Field
|
13
|
+
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
|
13
14
|
from pydantic.functional_serializers import field_serializer
|
14
15
|
from pydantic.functional_validators import field_validator, model_validator
|
15
16
|
from typing_extensions import Self
|
16
17
|
|
17
|
-
from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner
|
18
|
+
from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner, Options
|
18
19
|
from .__types import DictData, DictStr, TupleStr
|
19
20
|
from .conf import Loader
|
20
21
|
|
@@ -47,6 +48,8 @@ def interval2crontab(
|
|
47
48
|
'0 0 1 * *'
|
48
49
|
>>> interval2crontab(interval='monthly', day='tuesday', time='12:00')
|
49
50
|
'12 0 1 * 2'
|
51
|
+
|
52
|
+
:rtype: str
|
50
53
|
"""
|
51
54
|
d: str = "*"
|
52
55
|
if interval == "weekly":
|
@@ -64,12 +67,19 @@ class On(BaseModel):
|
|
64
67
|
"""On Pydantic model (Warped crontab object by model).
|
65
68
|
|
66
69
|
See Also:
|
67
|
-
* ``generate()`` is the main
|
70
|
+
* ``generate()`` is the main use-case of this schedule object.
|
68
71
|
"""
|
69
72
|
|
70
73
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
71
74
|
|
72
75
|
# NOTE: This is fields of the base schedule.
|
76
|
+
extras: Annotated[
|
77
|
+
DictData,
|
78
|
+
Field(
|
79
|
+
default_factory=dict,
|
80
|
+
description="An extras mapping parameters",
|
81
|
+
),
|
82
|
+
]
|
73
83
|
cronjob: Annotated[
|
74
84
|
CronJob,
|
75
85
|
Field(description="Cron job of this schedule"),
|
@@ -81,13 +91,6 @@ class On(BaseModel):
|
|
81
91
|
alias="timezone",
|
82
92
|
),
|
83
93
|
] = "Etc/UTC"
|
84
|
-
extras: Annotated[
|
85
|
-
DictData,
|
86
|
-
Field(
|
87
|
-
default_factory=dict,
|
88
|
-
description="An extras mapping parameters",
|
89
|
-
),
|
90
|
-
]
|
91
94
|
|
92
95
|
@classmethod
|
93
96
|
def from_value(cls, value: DictStr, externals: DictData) -> Self:
|
@@ -153,6 +156,7 @@ class On(BaseModel):
|
|
153
156
|
|
154
157
|
@model_validator(mode="before")
|
155
158
|
def __prepare_values(cls, values: DictData) -> DictData:
|
159
|
+
"""Extract tz key from value and change name to timezone key."""
|
156
160
|
if tz := values.pop("tz", None):
|
157
161
|
values["timezone"] = tz
|
158
162
|
return values
|
@@ -160,24 +164,55 @@ class On(BaseModel):
|
|
160
164
|
@field_validator("tz")
|
161
165
|
def __validate_tz(cls, value: str) -> str:
|
162
166
|
"""Validate timezone value that able to initialize with ZoneInfo after
|
163
|
-
it passing to this model in before mode.
|
167
|
+
it passing to this model in before mode.
|
168
|
+
|
169
|
+
:rtype: str
|
170
|
+
"""
|
164
171
|
try:
|
165
172
|
_ = ZoneInfo(value)
|
166
173
|
return value
|
167
174
|
except ZoneInfoNotFoundError as err:
|
168
175
|
raise ValueError(f"Invalid timezone: {value}") from err
|
169
176
|
|
170
|
-
@field_validator(
|
171
|
-
|
172
|
-
|
173
|
-
|
177
|
+
@field_validator(
|
178
|
+
"cronjob", mode="before", json_schema_input_type=Union[CronJob, str]
|
179
|
+
)
|
180
|
+
def __prepare_cronjob(
|
181
|
+
cls, value: str | CronJob, info: ValidationInfo
|
182
|
+
) -> CronJob:
|
183
|
+
"""Prepare crontab value that able to receive with string type.
|
184
|
+
This step will get options kwargs from extras and pass to the
|
185
|
+
CronJob object.
|
186
|
+
|
187
|
+
:rtype: CronJob
|
188
|
+
"""
|
189
|
+
extras: DictData = info.data.get("extras", {})
|
190
|
+
return (
|
191
|
+
CronJob(
|
192
|
+
value,
|
193
|
+
option={
|
194
|
+
name: extras[name]
|
195
|
+
for name in (f.name for f in fields(Options))
|
196
|
+
if name in extras
|
197
|
+
},
|
198
|
+
)
|
199
|
+
if isinstance(value, str)
|
200
|
+
else value
|
201
|
+
)
|
174
202
|
|
175
203
|
@field_serializer("cronjob")
|
176
204
|
def __serialize_cronjob(self, value: CronJob) -> str:
|
205
|
+
"""Serialize the cronjob field that store with CronJob object.
|
206
|
+
|
207
|
+
:rtype: str
|
208
|
+
"""
|
177
209
|
return str(value)
|
178
210
|
|
179
211
|
def generate(self, start: str | datetime) -> CronRunner:
|
180
|
-
"""Return Cron runner object.
|
212
|
+
"""Return Cron runner object.
|
213
|
+
|
214
|
+
:rtype: CronRunner
|
215
|
+
"""
|
181
216
|
if isinstance(start, str):
|
182
217
|
start: datetime = datetime.fromisoformat(start)
|
183
218
|
elif not isinstance(start, datetime):
|
@@ -187,6 +222,8 @@ class On(BaseModel):
|
|
187
222
|
def next(self, start: str | datetime) -> CronRunner:
|
188
223
|
"""Return a next datetime from Cron runner object that start with any
|
189
224
|
date that given from input.
|
225
|
+
|
226
|
+
:rtype: CronRunner
|
190
227
|
"""
|
191
228
|
runner: CronRunner = self.generate(start=start)
|
192
229
|
|
@@ -209,7 +246,26 @@ class YearOn(On):
|
|
209
246
|
Field(description="Cron job of this schedule"),
|
210
247
|
]
|
211
248
|
|
212
|
-
@field_validator(
|
213
|
-
|
214
|
-
|
215
|
-
|
249
|
+
@field_validator(
|
250
|
+
"cronjob", mode="before", json_schema_input_type=Union[CronJob, str]
|
251
|
+
)
|
252
|
+
def __prepare_cronjob(
|
253
|
+
cls, value: str | CronJobYear, info: ValidationInfo
|
254
|
+
) -> CronJobYear:
|
255
|
+
"""Prepare crontab value that able to receive with string type.
|
256
|
+
|
257
|
+
:rtype: CronJobYear
|
258
|
+
"""
|
259
|
+
extras: DictData = info.data.get("extras", {})
|
260
|
+
return (
|
261
|
+
CronJobYear(
|
262
|
+
value,
|
263
|
+
option={
|
264
|
+
name: extras[name]
|
265
|
+
for name in (f.name for f in fields(Options))
|
266
|
+
if name in extras
|
267
|
+
},
|
268
|
+
)
|
269
|
+
if isinstance(value, str)
|
270
|
+
else value
|
271
|
+
)
|
@@ -7,6 +7,8 @@
|
|
7
7
|
The job handle the lineage of stages and location of execution of stages that
|
8
8
|
mean the job model able to define ``runs-on`` key that allow you to run this
|
9
9
|
job.
|
10
|
+
|
11
|
+
This module include Strategy Model that use on the job strategy field.
|
10
12
|
"""
|
11
13
|
from __future__ import annotations
|
12
14
|
|
@@ -3,8 +3,12 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
"""Param Model that use for parsing incoming parameters that pass to the
|
7
|
+
Workflow and Schedule objects.
|
8
|
+
"""
|
6
9
|
from __future__ import annotations
|
7
10
|
|
11
|
+
import decimal
|
8
12
|
import logging
|
9
13
|
from abc import ABC, abstractmethod
|
10
14
|
from datetime import date, datetime
|
@@ -49,7 +53,7 @@ class BaseParam(BaseModel, ABC):
|
|
49
53
|
|
50
54
|
class DefaultParam(BaseParam):
|
51
55
|
"""Default Parameter that will check default if it required. This model do
|
52
|
-
not implement the receive method.
|
56
|
+
not implement the `receive` method.
|
53
57
|
"""
|
54
58
|
|
55
59
|
required: bool = Field(
|
@@ -68,6 +72,15 @@ class DefaultParam(BaseParam):
|
|
68
72
|
)
|
69
73
|
|
70
74
|
|
75
|
+
# TODO: Not implement this parameter yet
|
76
|
+
class DateParam(DefaultParam): # pragma: no cov
|
77
|
+
"""Date parameter."""
|
78
|
+
|
79
|
+
type: Literal["date"] = "date"
|
80
|
+
|
81
|
+
def receive(self, value: Optional[str | date] = None) -> date: ...
|
82
|
+
|
83
|
+
|
71
84
|
class DatetimeParam(DefaultParam):
|
72
85
|
"""Datetime parameter."""
|
73
86
|
|
@@ -145,6 +158,13 @@ class IntParam(DefaultParam):
|
|
145
158
|
return value
|
146
159
|
|
147
160
|
|
161
|
+
# TODO: Not implement this parameter yet
|
162
|
+
class DecimalParam(DefaultParam): # pragma: no cov
|
163
|
+
type: Literal["decimal"] = "decimal"
|
164
|
+
|
165
|
+
def receive(self, value: float | None = None) -> decimal.Decimal: ...
|
166
|
+
|
167
|
+
|
148
168
|
class ChoiceParam(BaseParam):
|
149
169
|
"""Choice parameter."""
|
150
170
|
|
@@ -94,6 +94,7 @@ class Result:
|
|
94
94
|
# NOTE: Check the context has jobs key.
|
95
95
|
if "jobs" not in self.__dict__["context"]:
|
96
96
|
self.__dict__["context"]["jobs"] = {}
|
97
|
+
|
97
98
|
self.__dict__["context"]["jobs"].update(result.context)
|
98
99
|
|
99
100
|
# NOTE: Update running ID from an incoming result.
|