ddeutil-workflow 0.0.31__tar.gz → 0.0.32__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/PKG-INFO +18 -3
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/README.md +17 -2
- ddeutil_workflow-0.0.32/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/api/api.py +4 -6
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/api/route.py +4 -4
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/job.py +2 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/params.py +5 -2
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/scheduler.py +113 -41
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/stage.py +2 -2
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/templates.py +13 -4
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/PKG-INFO +18 -3
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/SOURCES.txt +1 -0
- ddeutil_workflow-0.0.32/tests/test_schedule_pending.py +13 -0
- ddeutil_workflow-0.0.31/src/ddeutil/workflow/__about__.py +0 -1
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/LICENSE +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/pyproject.toml +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/__init__.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/api/repeat.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/conf.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/cron.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/hook.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/result.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/utils.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil/workflow/workflow.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_conf_log.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_cron_on.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_hook_tag.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_job_exec_py.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_release.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_release_queue.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_schedule.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_schedule_control.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_schedule_tasks.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_schedule_workflow.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_stage_handler_exec.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_templates.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_templates_filter.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_utils.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_workflow_exec_poke.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_workflow_exec_release.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_workflow_job_exec.py +0 -0
- {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.32
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -110,7 +110,7 @@ flowchart LR
|
|
110
110
|
E -.->|read| G
|
111
111
|
```
|
112
112
|
|
113
|
-
> [!
|
113
|
+
> [!WARNING]
|
114
114
|
> _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
|
115
115
|
> with `.yml` files and all configs file from several data orchestration framework
|
116
116
|
> tools from my experience on Data Engineer. :grimacing:
|
@@ -201,7 +201,8 @@ result: Result = workflow.execute(
|
|
201
201
|
)
|
202
202
|
```
|
203
203
|
|
204
|
-
So, this package provide the `Schedule` template for this action
|
204
|
+
So, this package provide the `Schedule` template for this action, and you can dynamic
|
205
|
+
pass the parameters for changing align with that running time by the `release` prefix.
|
205
206
|
|
206
207
|
```yaml
|
207
208
|
schedule-run-local-wf:
|
@@ -219,6 +220,20 @@ schedule-run-local-wf:
|
|
219
220
|
asat-dt: "${{ release.logical_date }}"
|
220
221
|
```
|
221
222
|
|
223
|
+
The main method of the `Schedule` model that use to running is `pending`. If you
|
224
|
+
do not pass the `stop` date on this method, it will use config with `WORKFLOW_APP_STOP_BOUNDARY_DELTA`
|
225
|
+
key for generate this stop date.
|
226
|
+
|
227
|
+
```python
|
228
|
+
from ddeutil.workflow import Schedule
|
229
|
+
|
230
|
+
(
|
231
|
+
Schedule
|
232
|
+
.from_loader("schedule-run-local-wf")
|
233
|
+
.pending(stop=None)
|
234
|
+
)
|
235
|
+
```
|
236
|
+
|
222
237
|
## :cookie: Configuration
|
223
238
|
|
224
239
|
The main configuration that use to dynamic changing this workflow engine for your
|
@@ -78,7 +78,7 @@ flowchart LR
|
|
78
78
|
E -.->|read| G
|
79
79
|
```
|
80
80
|
|
81
|
-
> [!
|
81
|
+
> [!WARNING]
|
82
82
|
> _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
|
83
83
|
> with `.yml` files and all configs file from several data orchestration framework
|
84
84
|
> tools from my experience on Data Engineer. :grimacing:
|
@@ -169,7 +169,8 @@ result: Result = workflow.execute(
|
|
169
169
|
)
|
170
170
|
```
|
171
171
|
|
172
|
-
So, this package provide the `Schedule` template for this action
|
172
|
+
So, this package provide the `Schedule` template for this action, and you can dynamic
|
173
|
+
pass the parameters for changing align with that running time by the `release` prefix.
|
173
174
|
|
174
175
|
```yaml
|
175
176
|
schedule-run-local-wf:
|
@@ -187,6 +188,20 @@ schedule-run-local-wf:
|
|
187
188
|
asat-dt: "${{ release.logical_date }}"
|
188
189
|
```
|
189
190
|
|
191
|
+
The main method of the `Schedule` model that use to running is `pending`. If you
|
192
|
+
do not pass the `stop` date on this method, it will use config with `WORKFLOW_APP_STOP_BOUNDARY_DELTA`
|
193
|
+
key for generate this stop date.
|
194
|
+
|
195
|
+
```python
|
196
|
+
from ddeutil.workflow import Schedule
|
197
|
+
|
198
|
+
(
|
199
|
+
Schedule
|
200
|
+
.from_loader("schedule-run-local-wf")
|
201
|
+
.pending(stop=None)
|
202
|
+
)
|
203
|
+
```
|
204
|
+
|
190
205
|
## :cookie: Configuration
|
191
206
|
|
192
207
|
The main configuration that use to dynamic changing this workflow engine for your
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.32"
|
@@ -43,9 +43,7 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
|
|
43
43
|
a.state.workflow_queue = {}
|
44
44
|
|
45
45
|
yield {
|
46
|
-
|
47
|
-
"upper_result": a.state.upper_result,
|
48
|
-
# NOTE: Scheduler value should be contain a key of workflow workflow and
|
46
|
+
# NOTE: Scheduler value should be contained a key of workflow and
|
49
47
|
# list of datetime of queue and running.
|
50
48
|
#
|
51
49
|
# ... {
|
@@ -88,7 +86,7 @@ if config.enable_route_workflow:
|
|
88
86
|
|
89
87
|
# NOTE: Enable the schedule route.
|
90
88
|
if config.enable_route_schedule:
|
91
|
-
from ..conf import
|
89
|
+
from ..conf import get_log
|
92
90
|
from ..scheduler import schedule_task
|
93
91
|
from .route import schedule_route
|
94
92
|
|
@@ -108,11 +106,11 @@ if config.enable_route_schedule:
|
|
108
106
|
stop=datetime.now(config.tz) + timedelta(minutes=1),
|
109
107
|
queue=app.state.workflow_queue,
|
110
108
|
threads=app.state.workflow_threads,
|
111
|
-
log=
|
109
|
+
log=get_log(),
|
112
110
|
)
|
113
111
|
|
114
112
|
@schedule_route.on_event("startup")
|
115
|
-
@repeat_at(cron="*/5 * * * *")
|
113
|
+
@repeat_at(cron="*/5 * * * *", delay=10)
|
116
114
|
def monitoring():
|
117
115
|
logger.debug("[MONITOR]: Start monitoring threading.")
|
118
116
|
snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
|
@@ -169,7 +169,7 @@ async def get_schedules(name: str):
|
|
169
169
|
)
|
170
170
|
|
171
171
|
|
172
|
-
@schedule_route.get(path="/deploy")
|
172
|
+
@schedule_route.get(path="/deploy/")
|
173
173
|
async def get_deploy_schedulers(request: Request):
|
174
174
|
snapshot = copy.deepcopy(request.state.scheduler)
|
175
175
|
return {"schedule": snapshot}
|
@@ -178,9 +178,9 @@ async def get_deploy_schedulers(request: Request):
|
|
178
178
|
@schedule_route.get(path="/deploy/{name}")
|
179
179
|
async def get_deploy_scheduler(request: Request, name: str):
|
180
180
|
if name in request.state.scheduler:
|
181
|
-
|
181
|
+
schedule = Schedule.from_loader(name)
|
182
182
|
getter: list[dict[str, dict[str, list[datetime]]]] = []
|
183
|
-
for workflow in
|
183
|
+
for workflow in schedule.workflows:
|
184
184
|
getter.append(
|
185
185
|
{
|
186
186
|
workflow.name: {
|
@@ -219,7 +219,7 @@ async def add_deploy_scheduler(request: Request, name: str):
|
|
219
219
|
second=0, microsecond=0
|
220
220
|
)
|
221
221
|
|
222
|
-
# NOTE: Create pair of workflow and on from schedule model.
|
222
|
+
# NOTE: Create a pair of workflow and on from schedule model.
|
223
223
|
try:
|
224
224
|
schedule: Schedule = Schedule.from_loader(name)
|
225
225
|
except ValueError as err:
|
@@ -7,6 +7,8 @@
|
|
7
7
|
The job handle the lineage of stages and location of execution of stages that
|
8
8
|
mean the job model able to define ``runs-on`` key that allow you to run this
|
9
9
|
job.
|
10
|
+
|
11
|
+
This module include Strategy Model that use on the job strategy field.
|
10
12
|
"""
|
11
13
|
from __future__ import annotations
|
12
14
|
|
@@ -3,6 +3,9 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
"""Param Model that use for parsing incoming parameters that pass to the
|
7
|
+
Workflow and Schedule objects.
|
8
|
+
"""
|
6
9
|
from __future__ import annotations
|
7
10
|
|
8
11
|
import decimal
|
@@ -70,7 +73,7 @@ class DefaultParam(BaseParam):
|
|
70
73
|
|
71
74
|
|
72
75
|
# TODO: Not implement this parameter yet
|
73
|
-
class DateParam(DefaultParam):
|
76
|
+
class DateParam(DefaultParam): # pragma: no cov
|
74
77
|
"""Date parameter."""
|
75
78
|
|
76
79
|
type: Literal["date"] = "date"
|
@@ -156,7 +159,7 @@ class IntParam(DefaultParam):
|
|
156
159
|
|
157
160
|
|
158
161
|
# TODO: Not implement this parameter yet
|
159
|
-
class DecimalParam(DefaultParam):
|
162
|
+
class DecimalParam(DefaultParam): # pragma: no cov
|
160
163
|
type: Literal["decimal"] = "decimal"
|
161
164
|
|
162
165
|
def receive(self, value: float | None = None) -> decimal.Decimal: ...
|
@@ -275,37 +275,6 @@ class Schedule(BaseModel):
|
|
275
275
|
|
276
276
|
return cls.model_validate(obj=loader_data)
|
277
277
|
|
278
|
-
@classmethod
|
279
|
-
def extract_tasks(
|
280
|
-
cls,
|
281
|
-
schedules: list[str],
|
282
|
-
start_date: datetime,
|
283
|
-
queue: dict[str, ReleaseQueue],
|
284
|
-
externals: DictData | None = None,
|
285
|
-
) -> list[WorkflowTask]:
|
286
|
-
"""Return the list of WorkflowTask object from all schedule object that
|
287
|
-
include in an input schedules argument.
|
288
|
-
|
289
|
-
:param schedules: A list of schedule name that will use `from_loader`
|
290
|
-
method.
|
291
|
-
:param start_date: A start date that get from the workflow schedule.
|
292
|
-
:param queue: A mapping of name and list of datetime for queue.
|
293
|
-
:param externals: An external parameters that pass to the Loader object.
|
294
|
-
|
295
|
-
:rtype: list[WorkflowTask]
|
296
|
-
"""
|
297
|
-
tasks: list[WorkflowTask] = []
|
298
|
-
for name in schedules:
|
299
|
-
schedule: Schedule = Schedule.from_loader(name, externals=externals)
|
300
|
-
tasks.extend(
|
301
|
-
schedule.tasks(
|
302
|
-
start_date,
|
303
|
-
queue=queue,
|
304
|
-
externals=externals,
|
305
|
-
),
|
306
|
-
)
|
307
|
-
return tasks
|
308
|
-
|
309
278
|
def tasks(
|
310
279
|
self,
|
311
280
|
start_date: datetime,
|
@@ -339,6 +308,99 @@ class Schedule(BaseModel):
|
|
339
308
|
|
340
309
|
return workflow_tasks
|
341
310
|
|
311
|
+
def pending(
|
312
|
+
self,
|
313
|
+
*,
|
314
|
+
stop: datetime | None = None,
|
315
|
+
externals: DictData | None = None,
|
316
|
+
log: type[Log] | None = None,
|
317
|
+
) -> None: # pragma: no cov
|
318
|
+
"""Pending this schedule tasks with the schedule package.
|
319
|
+
|
320
|
+
:param stop: A datetime value that use to stop running schedule.
|
321
|
+
:param externals: An external parameters that pass to Loader.
|
322
|
+
:param log: A log class that use on the workflow task release for
|
323
|
+
writing its release log context.
|
324
|
+
"""
|
325
|
+
try:
|
326
|
+
from schedule import Scheduler
|
327
|
+
except ImportError:
|
328
|
+
raise ImportError(
|
329
|
+
"Should install schedule package before use this method."
|
330
|
+
) from None
|
331
|
+
|
332
|
+
# NOTE: Get default logging.
|
333
|
+
log: type[Log] = log or get_log()
|
334
|
+
scheduler: Scheduler = Scheduler()
|
335
|
+
|
336
|
+
# NOTE: Create the start and stop datetime.
|
337
|
+
start_date: datetime = datetime.now(tz=config.tz)
|
338
|
+
stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
|
339
|
+
|
340
|
+
# IMPORTANT: Create main mapping of queue and thread object.
|
341
|
+
queue: dict[str, ReleaseQueue] = {}
|
342
|
+
threads: ReleaseThreads = {}
|
343
|
+
|
344
|
+
start_date_waiting: datetime = start_date.replace(
|
345
|
+
second=0, microsecond=0
|
346
|
+
) + timedelta(minutes=1)
|
347
|
+
|
348
|
+
# NOTE: This schedule job will start every minute at :02 seconds.
|
349
|
+
(
|
350
|
+
scheduler.every(1)
|
351
|
+
.minutes.at(":02")
|
352
|
+
.do(
|
353
|
+
schedule_task,
|
354
|
+
tasks=self.tasks(
|
355
|
+
start_date_waiting, queue=queue, externals=externals
|
356
|
+
),
|
357
|
+
stop=stop_date,
|
358
|
+
queue=queue,
|
359
|
+
threads=threads,
|
360
|
+
log=log,
|
361
|
+
)
|
362
|
+
.tag("control")
|
363
|
+
)
|
364
|
+
|
365
|
+
# NOTE: Checking zombie task with schedule job will start every 5 minute at
|
366
|
+
# :10 seconds.
|
367
|
+
(
|
368
|
+
scheduler.every(5)
|
369
|
+
.minutes.at(":10")
|
370
|
+
.do(
|
371
|
+
monitor,
|
372
|
+
threads=threads,
|
373
|
+
)
|
374
|
+
.tag("monitor")
|
375
|
+
)
|
376
|
+
|
377
|
+
# NOTE: Start running schedule
|
378
|
+
logger.info(
|
379
|
+
f"[SCHEDULE]: Schedule with stopper: {stop_date:%Y-%m-%d %H:%M:%S}"
|
380
|
+
)
|
381
|
+
|
382
|
+
while True:
|
383
|
+
scheduler.run_pending()
|
384
|
+
time.sleep(1)
|
385
|
+
|
386
|
+
# NOTE: Break the scheduler when the control job does not exist.
|
387
|
+
if not scheduler.get_jobs("control"):
|
388
|
+
scheduler.clear("monitor")
|
389
|
+
|
390
|
+
while len(threads) > 0:
|
391
|
+
logger.warning(
|
392
|
+
"[SCHEDULE]: Waiting schedule release thread that still "
|
393
|
+
"running in background."
|
394
|
+
)
|
395
|
+
delay(10)
|
396
|
+
monitor(threads)
|
397
|
+
|
398
|
+
break
|
399
|
+
|
400
|
+
logger.warning(
|
401
|
+
f"[SCHEDULE]: Queue: {[list(queue[wf].queue) for wf in queue]}"
|
402
|
+
)
|
403
|
+
|
342
404
|
|
343
405
|
ResultOrCancelJob = Union[type[CancelJob], Result]
|
344
406
|
ReturnCancelJob = Callable[P, ResultOrCancelJob]
|
@@ -394,7 +456,7 @@ def schedule_task(
|
|
394
456
|
method in background. This function do the same logic as the workflow poke
|
395
457
|
method, but it runs with map of schedules and the on values.
|
396
458
|
|
397
|
-
This schedule task start runs every minute at ':02' second and it does
|
459
|
+
This schedule task start runs every minute at ':02' second, and it does
|
398
460
|
not allow you to run with offset time.
|
399
461
|
|
400
462
|
:param tasks: A list of WorkflowTask object.
|
@@ -414,15 +476,16 @@ def schedule_task(
|
|
414
476
|
# function. It will deplicate running with different schedule value
|
415
477
|
# because I use current time in this condition.
|
416
478
|
#
|
417
|
-
# For example, if a
|
418
|
-
#
|
419
|
-
# This condition
|
479
|
+
# For example, if a queue has a time release be '00:02:00' that should
|
480
|
+
# to run and its schedule has '*/2 * * * *' and '*/35 * * * *'.
|
481
|
+
# This condition make this function create 2 threading tasks.
|
420
482
|
#
|
421
|
-
#
|
422
|
-
#
|
483
|
+
# '00:02:00' --> '*/2 * * * *' --> run
|
484
|
+
# --> '*/35 * * * *' --> skip
|
423
485
|
#
|
424
486
|
for task in tasks:
|
425
487
|
|
488
|
+
# NOTE: Get the ReleaseQueue with an alias of the WorkflowTask.
|
426
489
|
q: ReleaseQueue = queue[task.alias]
|
427
490
|
|
428
491
|
# NOTE: Start adding queue and move the runner date in the WorkflowTask.
|
@@ -548,15 +611,24 @@ def schedule_control(
|
|
548
611
|
second=0, microsecond=0
|
549
612
|
) + timedelta(minutes=1)
|
550
613
|
|
614
|
+
tasks: list[WorkflowTask] = []
|
615
|
+
for name in schedules:
|
616
|
+
schedule: Schedule = Schedule.from_loader(name, externals=externals)
|
617
|
+
tasks.extend(
|
618
|
+
schedule.tasks(
|
619
|
+
start_date_waiting,
|
620
|
+
queue=queue,
|
621
|
+
externals=externals,
|
622
|
+
),
|
623
|
+
)
|
624
|
+
|
551
625
|
# NOTE: This schedule job will start every minute at :02 seconds.
|
552
626
|
(
|
553
627
|
scheduler.every(1)
|
554
628
|
.minutes.at(":02")
|
555
629
|
.do(
|
556
630
|
schedule_task,
|
557
|
-
tasks=
|
558
|
-
schedules, start_date_waiting, queue, externals=externals
|
559
|
-
),
|
631
|
+
tasks=tasks,
|
560
632
|
stop=stop_date,
|
561
633
|
queue=queue,
|
562
634
|
threads=threads,
|
@@ -596,7 +668,7 @@ def schedule_control(
|
|
596
668
|
"[SCHEDULE]: Waiting schedule release thread that still "
|
597
669
|
"running in background."
|
598
670
|
)
|
599
|
-
delay(
|
671
|
+
delay(10)
|
600
672
|
monitor(threads)
|
601
673
|
|
602
674
|
break
|
@@ -668,12 +668,12 @@ Stage = Union[
|
|
668
668
|
|
669
669
|
|
670
670
|
# TODO: Not implement this stages yet
|
671
|
-
class ParallelStage(BaseModel):
|
671
|
+
class ParallelStage(BaseModel): # pragma: no cov
|
672
672
|
parallel: list[Stage]
|
673
673
|
max_parallel_core: int = Field(default=2)
|
674
674
|
|
675
675
|
|
676
676
|
# TODO: Not implement this stages yet
|
677
|
-
class ForEachStage(BaseModel):
|
677
|
+
class ForEachStage(BaseModel): # pragma: no cov
|
678
678
|
foreach: list[str]
|
679
679
|
stages: list[Stage]
|
@@ -79,7 +79,7 @@ def custom_filter(name: str) -> Callable[P, FilterFunc]:
|
|
79
79
|
def make_filter_registry() -> dict[str, FilterRegistry]:
|
80
80
|
"""Return registries of all functions that able to called with task.
|
81
81
|
|
82
|
-
:rtype: dict[str,
|
82
|
+
:rtype: dict[str, FilterRegistry]
|
83
83
|
"""
|
84
84
|
rs: dict[str, FilterRegistry] = {}
|
85
85
|
for module in config.regis_filter:
|
@@ -108,6 +108,8 @@ def get_args_const(
|
|
108
108
|
) -> tuple[str, list[Constant], dict[str, Constant]]:
|
109
109
|
"""Get arguments and keyword-arguments from function calling string.
|
110
110
|
|
111
|
+
:param expr: An expr string value.
|
112
|
+
|
111
113
|
:rtype: tuple[str, list[Constant], dict[str, Constant]]
|
112
114
|
"""
|
113
115
|
try:
|
@@ -150,6 +152,11 @@ def get_args_from_filter(
|
|
150
152
|
) -> tuple[str, FilterRegistry, list[Any], dict[Any, Any]]: # pragma: no cov
|
151
153
|
"""Get arguments and keyword-arguments from filter function calling string.
|
152
154
|
and validate it with the filter functions mapping dict.
|
155
|
+
|
156
|
+
:param ft:
|
157
|
+
:param filters:
|
158
|
+
|
159
|
+
:rtype: tuple[str, FilterRegistry, list[Any], dict[Any, Any]]
|
153
160
|
"""
|
154
161
|
func_name, _args, _kwargs = get_args_const(ft)
|
155
162
|
args: list[Any] = [arg.value for arg in _args]
|
@@ -243,7 +250,7 @@ def str2template(
|
|
243
250
|
params: DictData,
|
244
251
|
*,
|
245
252
|
filters: dict[str, FilterRegistry] | None = None,
|
246
|
-
) ->
|
253
|
+
) -> str:
|
247
254
|
"""(Sub-function) Pass param to template string that can search by
|
248
255
|
``RE_CALLER`` regular expression.
|
249
256
|
|
@@ -255,6 +262,8 @@ def str2template(
|
|
255
262
|
:param params: A parameter value that getting with matched regular
|
256
263
|
expression.
|
257
264
|
:param filters:
|
265
|
+
|
266
|
+
:rtype: str
|
258
267
|
"""
|
259
268
|
filters: dict[str, FilterRegistry] = filters or make_filter_registry()
|
260
269
|
|
@@ -295,7 +304,7 @@ def str2template(
|
|
295
304
|
return search_env_replace(value)
|
296
305
|
|
297
306
|
|
298
|
-
def param2template(value:
|
307
|
+
def param2template(value: T, params: DictData) -> T:
|
299
308
|
"""Pass param to template string that can search by ``RE_CALLER`` regular
|
300
309
|
expression.
|
301
310
|
|
@@ -303,7 +312,7 @@ def param2template(value: Any, params: DictData) -> Any:
|
|
303
312
|
:param params: A parameter value that getting with matched regular
|
304
313
|
expression.
|
305
314
|
|
306
|
-
:rtype:
|
315
|
+
:rtype: T
|
307
316
|
:returns: An any getter value from the params input.
|
308
317
|
"""
|
309
318
|
filters: dict[str, FilterRegistry] = make_filter_registry()
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.32
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -110,7 +110,7 @@ flowchart LR
|
|
110
110
|
E -.->|read| G
|
111
111
|
```
|
112
112
|
|
113
|
-
> [!
|
113
|
+
> [!WARNING]
|
114
114
|
> _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
|
115
115
|
> with `.yml` files and all configs file from several data orchestration framework
|
116
116
|
> tools from my experience on Data Engineer. :grimacing:
|
@@ -201,7 +201,8 @@ result: Result = workflow.execute(
|
|
201
201
|
)
|
202
202
|
```
|
203
203
|
|
204
|
-
So, this package provide the `Schedule` template for this action
|
204
|
+
So, this package provide the `Schedule` template for this action, and you can dynamic
|
205
|
+
pass the parameters for changing align with that running time by the `release` prefix.
|
205
206
|
|
206
207
|
```yaml
|
207
208
|
schedule-run-local-wf:
|
@@ -219,6 +220,20 @@ schedule-run-local-wf:
|
|
219
220
|
asat-dt: "${{ release.logical_date }}"
|
220
221
|
```
|
221
222
|
|
223
|
+
The main method of the `Schedule` model that use to running is `pending`. If you
|
224
|
+
do not pass the `stop` date on this method, it will use config with `WORKFLOW_APP_STOP_BOUNDARY_DELTA`
|
225
|
+
key for generate this stop date.
|
226
|
+
|
227
|
+
```python
|
228
|
+
from ddeutil.workflow import Schedule
|
229
|
+
|
230
|
+
(
|
231
|
+
Schedule
|
232
|
+
.from_loader("schedule-run-local-wf")
|
233
|
+
.pending(stop=None)
|
234
|
+
)
|
235
|
+
```
|
236
|
+
|
222
237
|
## :cookie: Configuration
|
223
238
|
|
224
239
|
The main configuration that use to dynamic changing this workflow engine for your
|
@@ -0,0 +1,13 @@
|
|
1
|
+
from datetime import timedelta
|
2
|
+
from unittest import mock
|
3
|
+
|
4
|
+
import pytest
|
5
|
+
from ddeutil.workflow.conf import Config
|
6
|
+
from ddeutil.workflow.scheduler import Schedule
|
7
|
+
|
8
|
+
|
9
|
+
@pytest.mark.schedule
|
10
|
+
@mock.patch.object(Config, "stop_boundary_delta", timedelta(minutes=1))
|
11
|
+
@mock.patch.object(Config, "enable_write_log", False)
|
12
|
+
def test_schedule_pending():
|
13
|
+
Schedule.from_loader("schedule-every-minute-wf").pending()
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__: str = "0.0.31"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/requires.txt
RENAMED
File without changes
|
{ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.32}/src/ddeutil_workflow.egg-info/top_level.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|