ddeutil-workflow 0.0.17__tar.gz → 0.0.18__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.17/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.18}/PKG-INFO +3 -2
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/README.md +1 -1
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/pyproject.toml +1 -0
- ddeutil_workflow-0.0.18/src/ddeutil/workflow/__about__.py +1 -0
- ddeutil_workflow-0.0.18/src/ddeutil/workflow/__init__.py +72 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/conf.py +2 -2
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/job.py +0 -2
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/repeat.py +1 -1
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/scheduler.py +30 -23
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/stage.py +0 -1
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/utils.py +1 -1
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18/src/ddeutil_workflow.egg-info}/PKG-INFO +3 -2
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil_workflow.egg-info/SOURCES.txt +2 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil_workflow.egg-info/requires.txt +1 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_conf_log.py +15 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_scheduler.py +54 -4
- ddeutil_workflow-0.0.18/tests/test_scheduler_tasks.py +72 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_utils.py +1 -1
- ddeutil_workflow-0.0.18/tests/test_workflow_job_run.py +28 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_workflow_poke.py +6 -2
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_workflow_run.py +37 -11
- ddeutil_workflow-0.0.17/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.17/src/ddeutil/workflow/__init__.py +0 -25
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/LICENSE +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/api.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/cli.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/on.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil/workflow/route.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test__conf_exist.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_job_py.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_job_strategy_run.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_on.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_stage_bash.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_stage_hook.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_stage_py.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_stage_trigger.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_utils_filter.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_utils_params.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_utils_result.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_utils_tag.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_utils_template.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_workflow_depends.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_workflow_matrix.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_workflow_on.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_workflow_params.py +0 -0
- {ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.18
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -24,6 +24,7 @@ Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil>=0.4.3
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
|
27
|
+
Requires-Dist: pydantic==2.9.2
|
27
28
|
Requires-Dist: python-dotenv==1.0.1
|
28
29
|
Requires-Dist: typer<1.0.0,==0.12.5
|
29
30
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
@@ -194,7 +195,7 @@ and do not raise any error to you.
|
|
194
195
|
| `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
|
195
196
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
196
197
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
197
|
-
| `
|
198
|
+
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
198
199
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
199
200
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
200
201
|
| `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
|
@@ -162,7 +162,7 @@ and do not raise any error to you.
|
|
162
162
|
| `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
|
163
163
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
164
164
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
165
|
-
| `
|
165
|
+
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
166
166
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
167
167
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
168
168
|
| `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.18"
|
@@ -0,0 +1,72 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from .conf import (
|
7
|
+
Config,
|
8
|
+
FileLog,
|
9
|
+
Loader,
|
10
|
+
)
|
11
|
+
from .exceptions import (
|
12
|
+
JobException,
|
13
|
+
ParamValueException,
|
14
|
+
StageException,
|
15
|
+
UtilException,
|
16
|
+
WorkflowException,
|
17
|
+
)
|
18
|
+
from .job import Job, Strategy
|
19
|
+
from .on import (
|
20
|
+
On,
|
21
|
+
YearOn,
|
22
|
+
interval2crontab,
|
23
|
+
)
|
24
|
+
from .scheduler import (
|
25
|
+
Schedule,
|
26
|
+
ScheduleWorkflow,
|
27
|
+
Workflow,
|
28
|
+
WorkflowTaskData,
|
29
|
+
)
|
30
|
+
from .stage import (
|
31
|
+
BashStage,
|
32
|
+
EmptyStage,
|
33
|
+
HookStage,
|
34
|
+
PyStage,
|
35
|
+
Stage,
|
36
|
+
TriggerStage,
|
37
|
+
handler_result,
|
38
|
+
)
|
39
|
+
from .utils import (
|
40
|
+
FILTERS,
|
41
|
+
ChoiceParam,
|
42
|
+
DatetimeParam,
|
43
|
+
DefaultParam,
|
44
|
+
FilterFunc,
|
45
|
+
FilterRegistry,
|
46
|
+
IntParam,
|
47
|
+
Param,
|
48
|
+
Result,
|
49
|
+
ReturnTagFunc,
|
50
|
+
StrParam,
|
51
|
+
TagFunc,
|
52
|
+
batch,
|
53
|
+
cross_product,
|
54
|
+
custom_filter,
|
55
|
+
dash2underscore,
|
56
|
+
delay,
|
57
|
+
filter_func,
|
58
|
+
gen_id,
|
59
|
+
get_args_const,
|
60
|
+
get_diff_sec,
|
61
|
+
get_dt_now,
|
62
|
+
has_template,
|
63
|
+
make_exec,
|
64
|
+
make_filter_registry,
|
65
|
+
make_registry,
|
66
|
+
map_post_filter,
|
67
|
+
not_in_template,
|
68
|
+
param2template,
|
69
|
+
queue2str,
|
70
|
+
str2template,
|
71
|
+
tag,
|
72
|
+
)
|
@@ -41,8 +41,8 @@ class Config:
|
|
41
41
|
# NOTE: Core
|
42
42
|
root_path: Path = Path(os.getenv("WORKFLOW_ROOT_PATH", "."))
|
43
43
|
tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
|
44
|
-
|
45
|
-
os.getenv("
|
44
|
+
gen_id_simple_mode: bool = str2bool(
|
45
|
+
os.getenv("WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE", "true")
|
46
46
|
)
|
47
47
|
|
48
48
|
# NOTE: Register
|
@@ -553,8 +553,6 @@ class Job(BaseModel):
|
|
553
553
|
# NOTE: Create event for cancel executor by trigger stop running event.
|
554
554
|
event: Event = Event()
|
555
555
|
|
556
|
-
print("Job Run Fail-Fast:", self.strategy.fail_fast)
|
557
|
-
|
558
556
|
# IMPORTANT: Start running strategy execution by multithreading because
|
559
557
|
# it will running by strategy values without waiting previous
|
560
558
|
# execution.
|
@@ -376,7 +376,7 @@ class Workflow(BaseModel):
|
|
376
376
|
status=0,
|
377
377
|
context={
|
378
378
|
"params": params,
|
379
|
-
"
|
379
|
+
"release": {"status": "skipped", "cron": [str(on.cronjob)]},
|
380
380
|
},
|
381
381
|
)
|
382
382
|
|
@@ -388,7 +388,7 @@ class Workflow(BaseModel):
|
|
388
388
|
# NOTE: Release when the time is nearly to schedule time.
|
389
389
|
while (duration := get_diff_sec(next_time, tz=cron_tz)) > (
|
390
390
|
sleep_interval + 5
|
391
|
-
):
|
391
|
+
): # pragma: no cov
|
392
392
|
logger.debug(
|
393
393
|
f"({self.run_id}) [CORE]: {self.name!r} : {on.cronjob} : "
|
394
394
|
f"Sleep until: {duration}"
|
@@ -439,7 +439,7 @@ class Workflow(BaseModel):
|
|
439
439
|
status=0,
|
440
440
|
context={
|
441
441
|
"params": params,
|
442
|
-
"
|
442
|
+
"release": {"status": "run", "cron": [str(on.cronjob)]},
|
443
443
|
},
|
444
444
|
)
|
445
445
|
|
@@ -492,7 +492,7 @@ class Workflow(BaseModel):
|
|
492
492
|
for future in as_completed(futures):
|
493
493
|
results.append(future.result(timeout=60))
|
494
494
|
|
495
|
-
if len(queue) > 0:
|
495
|
+
if len(queue) > 0: # pragma: no cov
|
496
496
|
logger.error(
|
497
497
|
f"({self.run_id}) [POKING]: Log Queue does empty when poking "
|
498
498
|
f"process was finishing."
|
@@ -717,11 +717,11 @@ class Workflow(BaseModel):
|
|
717
717
|
return context
|
718
718
|
|
719
719
|
# NOTE: Raise timeout error.
|
720
|
-
logger.warning(
|
720
|
+
logger.warning( # pragma: no cov
|
721
721
|
f"({self.run_id}) [WORKFLOW]: Execution of workflow, {self.name!r} "
|
722
722
|
f", was timeout"
|
723
723
|
)
|
724
|
-
raise WorkflowException(
|
724
|
+
raise WorkflowException( # pragma: no cov
|
725
725
|
f"Execution of workflow: {self.name} was timeout"
|
726
726
|
)
|
727
727
|
|
@@ -765,7 +765,8 @@ class Workflow(BaseModel):
|
|
765
765
|
continue
|
766
766
|
|
767
767
|
# NOTE: Start workflow job execution with deep copy context data
|
768
|
-
# before release.
|
768
|
+
# before release. This job execution process will running until
|
769
|
+
# done before checking all execution timeout or not.
|
769
770
|
#
|
770
771
|
# {
|
771
772
|
# 'params': <input-params>,
|
@@ -783,10 +784,10 @@ class Workflow(BaseModel):
|
|
783
784
|
return context
|
784
785
|
|
785
786
|
# NOTE: Raise timeout error.
|
786
|
-
logger.warning(
|
787
|
+
logger.warning( # pragma: no cov
|
787
788
|
f"({self.run_id}) [WORKFLOW]: Execution of workflow was timeout"
|
788
789
|
)
|
789
|
-
raise WorkflowException(
|
790
|
+
raise WorkflowException( # pragma: no cov
|
790
791
|
f"Execution of workflow: {self.name} was timeout"
|
791
792
|
)
|
792
793
|
|
@@ -832,12 +833,13 @@ class ScheduleWorkflow(BaseModel):
|
|
832
833
|
if on := data.pop("on", []):
|
833
834
|
|
834
835
|
if isinstance(on, str):
|
835
|
-
on = [on]
|
836
|
+
on: list[str] = [on]
|
836
837
|
|
837
838
|
if any(not isinstance(n, (dict, str)) for n in on):
|
838
839
|
raise TypeError("The ``on`` key should be list of str or dict")
|
839
840
|
|
840
|
-
# NOTE: Pass on value to Loader and keep on model object to on
|
841
|
+
# NOTE: Pass on value to Loader and keep on model object to on
|
842
|
+
# field.
|
841
843
|
data["on"] = [
|
842
844
|
(
|
843
845
|
Loader(n, externals=(externals or {})).data
|
@@ -902,12 +904,14 @@ class Schedule(BaseModel):
|
|
902
904
|
*,
|
903
905
|
externals: DictData | None = None,
|
904
906
|
) -> list[WorkflowTaskData]:
|
905
|
-
"""
|
907
|
+
"""Return the list of WorkflowTaskData object from the specific input
|
908
|
+
datetime that mapping with the on field.
|
906
909
|
|
907
910
|
:param start_date: A start date that get from the workflow schedule.
|
908
911
|
:param queue: A mapping of name and list of datetime for queue.
|
909
912
|
:param running: A mapping of name and list of datetime for running.
|
910
913
|
:param externals: An external parameters that pass to the Loader object.
|
914
|
+
|
911
915
|
:rtype: list[WorkflowTaskData]
|
912
916
|
"""
|
913
917
|
|
@@ -922,12 +926,14 @@ class Schedule(BaseModel):
|
|
922
926
|
queue[wfs.name]: list[datetime] = []
|
923
927
|
running[wfs.name]: list[datetime] = []
|
924
928
|
|
925
|
-
# NOTE: Create default on if it does not passing on the
|
929
|
+
# NOTE: Create the default on value if it does not passing on the
|
930
|
+
# Schedule object.
|
926
931
|
_ons: list[On] = wf.on.copy() if len(wfs.on) == 0 else wfs.on
|
927
932
|
|
928
933
|
for on in _ons:
|
929
|
-
on_gen = on.generate(start_date)
|
934
|
+
on_gen: CronRunner = on.generate(start_date)
|
930
935
|
next_running_date = on_gen.next
|
936
|
+
|
931
937
|
while next_running_date in queue[wfs.name]:
|
932
938
|
next_running_date = on_gen.next
|
933
939
|
|
@@ -957,13 +963,14 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
|
|
957
963
|
|
958
964
|
:param cancel_on_failure: A flag that allow to return the CancelJob or not
|
959
965
|
it will raise.
|
960
|
-
|
966
|
+
|
967
|
+
:rtype: DecoratorCancelJob
|
961
968
|
"""
|
962
969
|
|
963
970
|
def decorator(func: ReturnCancelJob) -> ReturnCancelJob:
|
964
971
|
try:
|
965
972
|
# NOTE: Check the function that want to handle is method or not.
|
966
|
-
if inspect.ismethod(func):
|
973
|
+
if inspect.ismethod(func): # pragma: no cov
|
967
974
|
|
968
975
|
@wraps(func)
|
969
976
|
def wrapper(self, *args, **kwargs):
|
@@ -977,7 +984,7 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
|
|
977
984
|
|
978
985
|
return wrapper
|
979
986
|
|
980
|
-
except Exception as err:
|
987
|
+
except Exception as err: # pragma: no cov
|
981
988
|
logger.exception(err)
|
982
989
|
if cancel_on_failure:
|
983
990
|
return CancelJob
|
@@ -1005,7 +1012,7 @@ class WorkflowTaskData:
|
|
1005
1012
|
*,
|
1006
1013
|
waiting_sec: int = 60,
|
1007
1014
|
sleep_interval: int = 15,
|
1008
|
-
) -> None:
|
1015
|
+
) -> None: # pragma: no cov
|
1009
1016
|
"""Workflow release, it will use with the same logic of
|
1010
1017
|
`workflow.release` method.
|
1011
1018
|
|
@@ -1119,7 +1126,7 @@ class WorkflowTaskData:
|
|
1119
1126
|
future_running_time in self.running[wf.name]
|
1120
1127
|
or future_running_time in self.queue[wf.name]
|
1121
1128
|
or future_running_time < finish_time
|
1122
|
-
):
|
1129
|
+
): # pragma: no cov
|
1123
1130
|
future_running_time: datetime = gen.next
|
1124
1131
|
|
1125
1132
|
heappush(self.queue[wf.name], future_running_time)
|
@@ -1134,7 +1141,7 @@ class WorkflowTaskData:
|
|
1134
1141
|
return NotImplemented
|
1135
1142
|
|
1136
1143
|
|
1137
|
-
@catch_exceptions(cancel_on_failure=True)
|
1144
|
+
@catch_exceptions(cancel_on_failure=True) # pragma: no cov
|
1138
1145
|
def workflow_task(
|
1139
1146
|
workflow_tasks: list[WorkflowTaskData],
|
1140
1147
|
stop: datetime,
|
@@ -1233,7 +1240,7 @@ def workflow_task(
|
|
1233
1240
|
logger.debug(f"[WORKFLOW]: {'=' * 100}")
|
1234
1241
|
|
1235
1242
|
|
1236
|
-
def workflow_monitor(threads: dict[str, Thread]) -> None:
|
1243
|
+
def workflow_monitor(threads: dict[str, Thread]) -> None: # pragma: no cov
|
1237
1244
|
"""Workflow schedule for monitoring long running thread from the schedule
|
1238
1245
|
control.
|
1239
1246
|
|
@@ -1255,7 +1262,7 @@ def workflow_control(
|
|
1255
1262
|
schedules: list[str],
|
1256
1263
|
stop: datetime | None = None,
|
1257
1264
|
externals: DictData | None = None,
|
1258
|
-
) -> list[str]:
|
1265
|
+
) -> list[str]: # pragma: no cov
|
1259
1266
|
"""Workflow scheduler control.
|
1260
1267
|
|
1261
1268
|
:param schedules: A list of workflow names that want to schedule running.
|
@@ -1343,7 +1350,7 @@ def workflow_runner(
|
|
1343
1350
|
stop: datetime | None = None,
|
1344
1351
|
externals: DictData | None = None,
|
1345
1352
|
excluded: list[str] | None = None,
|
1346
|
-
) -> list[str]:
|
1353
|
+
) -> list[str]: # pragma: no cov
|
1347
1354
|
"""Workflow application that running multiprocessing schedule with chunk of
|
1348
1355
|
workflows that exists in config path.
|
1349
1356
|
|
@@ -127,7 +127,6 @@ def handler_result(message: str | None = None) -> DecoratorResult:
|
|
127
127
|
logger.error(
|
128
128
|
f"({self.run_id}) [STAGE]: {err.__class__.__name__}: {err}"
|
129
129
|
)
|
130
|
-
print("Stage Raise error:", config.stage_raise_error)
|
131
130
|
if config.stage_raise_error:
|
132
131
|
# NOTE: If error that raise from stage execution course by
|
133
132
|
# itself, it will return that error with previous
|
@@ -100,7 +100,7 @@ def gen_id(
|
|
100
100
|
if not isinstance(value, str):
|
101
101
|
value: str = str(value)
|
102
102
|
|
103
|
-
if config.
|
103
|
+
if config.gen_id_simple_mode:
|
104
104
|
return hash_str(f"{(value if sensitive else value.lower())}", n=10) + (
|
105
105
|
f"{datetime.now(tz=config.tz):%Y%m%d%H%M%S%f}" if unique else ""
|
106
106
|
)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.18
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -24,6 +24,7 @@ Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil>=0.4.3
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
|
27
|
+
Requires-Dist: pydantic==2.9.2
|
27
28
|
Requires-Dist: python-dotenv==1.0.1
|
28
29
|
Requires-Dist: typer<1.0.0,==0.12.5
|
29
30
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
@@ -194,7 +195,7 @@ and do not raise any error to you.
|
|
194
195
|
| `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
|
195
196
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
196
197
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
197
|
-
| `
|
198
|
+
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
198
199
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
199
200
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
200
201
|
| `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
|
{ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil_workflow.egg-info/SOURCES.txt
RENAMED
@@ -34,6 +34,7 @@ tests/test_job_strategy_run.py
|
|
34
34
|
tests/test_on.py
|
35
35
|
tests/test_params.py
|
36
36
|
tests/test_scheduler.py
|
37
|
+
tests/test_scheduler_tasks.py
|
37
38
|
tests/test_stage.py
|
38
39
|
tests/test_stage_bash.py
|
39
40
|
tests/test_stage_hook.py
|
@@ -47,6 +48,7 @@ tests/test_utils_tag.py
|
|
47
48
|
tests/test_utils_template.py
|
48
49
|
tests/test_workflow.py
|
49
50
|
tests/test_workflow_depends.py
|
51
|
+
tests/test_workflow_job_run.py
|
50
52
|
tests/test_workflow_matrix.py
|
51
53
|
tests/test_workflow_on.py
|
52
54
|
tests/test_workflow_params.py
|
@@ -50,7 +50,22 @@ def test_conf_log_file_do_first():
|
|
50
50
|
assert log.name == "wf-demo-logging"
|
51
51
|
|
52
52
|
|
53
|
+
@mock.patch.object(Config, "enable_write_log", True)
|
53
54
|
def test_conf_log_file_find_logs(root_path):
|
55
|
+
log = FileLog.model_validate(
|
56
|
+
obj={
|
57
|
+
"name": "wf-scheduling",
|
58
|
+
"on": "*/2 * * * *",
|
59
|
+
"release": datetime(2024, 1, 1, 1),
|
60
|
+
"context": {
|
61
|
+
"params": {"name": "foo"},
|
62
|
+
},
|
63
|
+
"parent_run_id": None,
|
64
|
+
"run_id": "558851633820240817184358131811",
|
65
|
+
"update": datetime.now(),
|
66
|
+
},
|
67
|
+
)
|
68
|
+
log.save(excluded=None)
|
54
69
|
log = next(FileLog.find_logs(name="wf-scheduling"))
|
55
70
|
assert isinstance(log, FileLog)
|
56
71
|
|
@@ -1,30 +1,80 @@
|
|
1
1
|
from datetime import datetime
|
2
2
|
|
3
|
+
import pytest
|
4
|
+
import yaml
|
3
5
|
from ddeutil.workflow import Workflow
|
4
6
|
from ddeutil.workflow.conf import Loader
|
5
7
|
from ddeutil.workflow.on import On
|
6
8
|
from ddeutil.workflow.scheduler import Schedule, WorkflowTaskData
|
7
9
|
|
8
10
|
|
9
|
-
def
|
11
|
+
def test_schedule():
|
10
12
|
schedule = Schedule.from_loader("schedule-wf")
|
11
13
|
print(schedule)
|
12
14
|
|
15
|
+
schedule = Schedule.from_loader("schedule-common-wf")
|
16
|
+
print(schedule)
|
17
|
+
|
18
|
+
|
19
|
+
def test_schedule_from_loader_raise(test_path):
|
20
|
+
test_file = test_path / "conf/demo/03_schedule_raise.yml"
|
21
|
+
|
22
|
+
with test_file.open(mode="w") as f:
|
23
|
+
yaml.dump(
|
24
|
+
{
|
25
|
+
"schedule-raise-wf": {
|
26
|
+
"type": "ddeutil.workflow.on.On",
|
27
|
+
"workflows": [
|
28
|
+
{"name": "wf-scheduling"},
|
29
|
+
],
|
30
|
+
}
|
31
|
+
},
|
32
|
+
f,
|
33
|
+
)
|
34
|
+
|
35
|
+
with pytest.raises(ValueError):
|
36
|
+
Schedule.from_loader("schedule-raise-wf")
|
13
37
|
|
14
|
-
|
38
|
+
with test_file.open(mode="w") as f:
|
39
|
+
yaml.dump(
|
40
|
+
{
|
41
|
+
"schedule-raise-wf": {
|
42
|
+
"type": "scheduler.Schedule",
|
43
|
+
"workflows": [
|
44
|
+
{
|
45
|
+
"name": "wf-scheduling",
|
46
|
+
"on": [
|
47
|
+
["every_3_minute_bkk"],
|
48
|
+
["every_minute_bkk"],
|
49
|
+
],
|
50
|
+
},
|
51
|
+
],
|
52
|
+
}
|
53
|
+
},
|
54
|
+
f,
|
55
|
+
)
|
56
|
+
|
57
|
+
with pytest.raises(TypeError):
|
58
|
+
Schedule.from_loader("schedule-raise-wf")
|
59
|
+
|
60
|
+
test_file.unlink(missing_ok=True)
|
61
|
+
|
62
|
+
|
63
|
+
def test_schedule_model_default_on():
|
15
64
|
schedule = Schedule.from_loader("schedule-default-wf")
|
16
65
|
print(schedule)
|
17
66
|
|
18
67
|
|
19
|
-
def
|
68
|
+
def test_schedule_loader_find_schedule():
|
20
69
|
for finding in Loader.finds(Schedule, excluded=[]):
|
21
70
|
print(finding)
|
22
71
|
|
23
72
|
|
24
|
-
def
|
73
|
+
def test_schedule_remove_workflow_task():
|
25
74
|
queue: dict[str, list[datetime]] = {"wf-scheduling": []}
|
26
75
|
running: dict[str, list[datetime]] = {"wf-scheduling": []}
|
27
76
|
pipeline_tasks: list[WorkflowTaskData] = []
|
77
|
+
|
28
78
|
wf: Workflow = Workflow.from_loader("wf-scheduling", externals={})
|
29
79
|
for on in wf.on:
|
30
80
|
pipeline_tasks.append(
|
@@ -0,0 +1,72 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from unittest import mock
|
3
|
+
from zoneinfo import ZoneInfo
|
4
|
+
|
5
|
+
from ddeutil.workflow.conf import Config
|
6
|
+
from ddeutil.workflow.scheduler import Schedule, WorkflowTaskData
|
7
|
+
|
8
|
+
|
9
|
+
def test_schedule_tasks():
|
10
|
+
schedule = Schedule.from_loader("schedule-wf")
|
11
|
+
|
12
|
+
queue: dict[str, list[datetime]] = {"wf-scheduling": []}
|
13
|
+
running: dict[str, list[datetime]] = {"wf-scheduling": []}
|
14
|
+
for wf_task in schedule.tasks(
|
15
|
+
datetime(2024, 1, 1, 1),
|
16
|
+
queue=queue,
|
17
|
+
running=running,
|
18
|
+
):
|
19
|
+
assert wf_task.workflow.name == "wf-scheduling"
|
20
|
+
|
21
|
+
task = schedule.tasks(
|
22
|
+
datetime(2024, 1, 1, 1),
|
23
|
+
queue=queue,
|
24
|
+
running=running,
|
25
|
+
)[0]
|
26
|
+
|
27
|
+
assert task != datetime(2024, 1, 1, 1)
|
28
|
+
assert task == task
|
29
|
+
assert task == WorkflowTaskData(
|
30
|
+
workflow=task.workflow,
|
31
|
+
on=task.on,
|
32
|
+
params={},
|
33
|
+
queue={},
|
34
|
+
running={},
|
35
|
+
)
|
36
|
+
|
37
|
+
|
38
|
+
@mock.patch.object(Config, "enable_write_log", False)
|
39
|
+
def test_schedule_tasks_release():
|
40
|
+
schedule = Schedule.from_loader("schedule-common-wf")
|
41
|
+
|
42
|
+
queue: dict[str, list[datetime]] = {"wf-scheduling": []}
|
43
|
+
running: dict[str, list[datetime]] = {"wf-scheduling": []}
|
44
|
+
for wf_task in schedule.tasks(
|
45
|
+
datetime(2024, 1, 1, 1, 2, 30),
|
46
|
+
queue=queue,
|
47
|
+
running=running,
|
48
|
+
):
|
49
|
+
assert wf_task.workflow.name == "wf-scheduling"
|
50
|
+
wf_task.release(waiting_sec=60)
|
51
|
+
|
52
|
+
|
53
|
+
@mock.patch.object(Config, "enable_write_log", False)
|
54
|
+
def test_schedule_tasks_release_skip():
|
55
|
+
schedule = Schedule.from_loader("schedule-common-wf")
|
56
|
+
|
57
|
+
queue: dict[str, list[datetime]] = {"wf-scheduling": []}
|
58
|
+
running: dict[str, list[datetime]] = {"wf-scheduling": []}
|
59
|
+
for wf_task in schedule.tasks(
|
60
|
+
datetime(2024, 1, 1, 1),
|
61
|
+
queue=queue,
|
62
|
+
running=running,
|
63
|
+
):
|
64
|
+
assert wf_task.workflow.name == "wf-scheduling"
|
65
|
+
wf_task.release(waiting_sec=0)
|
66
|
+
|
67
|
+
assert queue == {
|
68
|
+
"wf-scheduling": [
|
69
|
+
datetime(2024, 1, 1, 1, tzinfo=ZoneInfo("Asia/Bangkok")),
|
70
|
+
]
|
71
|
+
}
|
72
|
+
assert running == {"wf-scheduling": []}
|
@@ -8,7 +8,7 @@ def test_gen_id():
|
|
8
8
|
assert "1354680202" == utils.gen_id("{}")
|
9
9
|
assert "1354680202" == utils.gen_id("{}", sensitive=False)
|
10
10
|
|
11
|
-
with patch("ddeutil.workflow.utils.config.
|
11
|
+
with patch("ddeutil.workflow.utils.config.gen_id_simple_mode", False):
|
12
12
|
assert "99914b932bd37a50b983c5e7c90ae93b" == utils.gen_id("{}")
|
13
13
|
|
14
14
|
|
@@ -0,0 +1,28 @@
|
|
1
|
+
import pytest
|
2
|
+
from ddeutil.workflow import Workflow
|
3
|
+
from ddeutil.workflow.exceptions import WorkflowException
|
4
|
+
from ddeutil.workflow.utils import Result
|
5
|
+
|
6
|
+
|
7
|
+
def test_workflow_execute_job():
|
8
|
+
workflow = Workflow.from_loader(name="wf-run-python")
|
9
|
+
rs: Result = workflow.execute_job(
|
10
|
+
job_id="final-job",
|
11
|
+
params={
|
12
|
+
"author-run": "Local Workflow",
|
13
|
+
"run-date": "2024-01-01",
|
14
|
+
},
|
15
|
+
)
|
16
|
+
print(rs.context)
|
17
|
+
|
18
|
+
|
19
|
+
def test_workflow_execute_job_raise():
|
20
|
+
workflow = Workflow.from_loader(name="wf-run-python")
|
21
|
+
with pytest.raises(WorkflowException):
|
22
|
+
workflow.execute_job(
|
23
|
+
job_id="not-found-job",
|
24
|
+
params={
|
25
|
+
"author-run": "Local Workflow",
|
26
|
+
"run-date": "2024-01-01",
|
27
|
+
},
|
28
|
+
)
|
@@ -1,4 +1,5 @@
|
|
1
1
|
from ddeutil.workflow import Workflow
|
2
|
+
from ddeutil.workflow.utils import Result
|
2
3
|
|
3
4
|
|
4
5
|
def test_workflow_poke_no_on():
|
@@ -8,9 +9,12 @@ def test_workflow_poke_no_on():
|
|
8
9
|
|
9
10
|
def test_workflow_poke():
|
10
11
|
wf = Workflow.from_loader(name="wf-run-matrix-fail-fast", externals={})
|
11
|
-
results = wf.poke(params={"name": "FOO"})
|
12
|
+
results: list[Result] = wf.poke(params={"name": "FOO"})
|
12
13
|
for rs in results:
|
13
|
-
|
14
|
+
assert "status" in rs.context["release"]
|
15
|
+
assert "cron" in rs.context["release"]
|
16
|
+
|
17
|
+
wf.poke(params={"name": "FOO"})
|
14
18
|
|
15
19
|
|
16
20
|
def test_workflow_poke_with_release_params():
|
@@ -1,16 +1,13 @@
|
|
1
1
|
from datetime import datetime
|
2
2
|
from unittest import mock
|
3
3
|
|
4
|
-
|
4
|
+
from ddeutil.workflow import Workflow
|
5
5
|
from ddeutil.workflow.conf import Config
|
6
6
|
from ddeutil.workflow.utils import Result
|
7
7
|
|
8
8
|
|
9
9
|
def test_workflow_run_py():
|
10
|
-
workflow =
|
11
|
-
name="wf-run-python",
|
12
|
-
externals={},
|
13
|
-
)
|
10
|
+
workflow = Workflow.from_loader(name="wf-run-python")
|
14
11
|
rs: Result = workflow.execute(
|
15
12
|
params={
|
16
13
|
"author-run": "Local Workflow",
|
@@ -68,7 +65,7 @@ def test_workflow_run_py():
|
|
68
65
|
|
69
66
|
def test_workflow_run_py_with_parallel():
|
70
67
|
with mock.patch.object(Config, "max_job_parallel", 3):
|
71
|
-
workflow =
|
68
|
+
workflow = Workflow.from_loader(
|
72
69
|
name="wf-run-python",
|
73
70
|
externals={},
|
74
71
|
)
|
@@ -128,11 +125,40 @@ def test_workflow_run_py_with_parallel():
|
|
128
125
|
|
129
126
|
|
130
127
|
def test_workflow_run_py_raise():
|
131
|
-
workflow =
|
128
|
+
workflow = Workflow.from_loader("wf-run-python-raise")
|
132
129
|
rs = workflow.execute(params={})
|
133
|
-
|
134
|
-
assert
|
130
|
+
assert rs.status == 1
|
131
|
+
assert rs.context == {
|
132
|
+
"params": {},
|
133
|
+
"jobs": {},
|
134
|
+
"error": rs.context["error"],
|
135
|
+
"error_message": (
|
136
|
+
"WorkflowException: Get job execution error first-job: "
|
137
|
+
"JobException: Get stage execution error: "
|
138
|
+
"StageException: PyStage: \n\t"
|
139
|
+
"ValueError: Testing raise error inside PyStage!!!"
|
140
|
+
),
|
141
|
+
}
|
135
142
|
|
136
|
-
import json
|
137
143
|
|
138
|
-
|
144
|
+
@mock.patch.object(Config, "max_job_parallel", 2)
|
145
|
+
def test_workflow_run_py_raise_parallel():
|
146
|
+
workflow = Workflow.from_loader("wf-run-python-raise")
|
147
|
+
rs = workflow.execute(params={})
|
148
|
+
assert rs.status == 1
|
149
|
+
assert rs.context == {
|
150
|
+
"params": {},
|
151
|
+
"jobs": {
|
152
|
+
"second-job": {
|
153
|
+
"matrix": {},
|
154
|
+
"stages": {"1772094681": {"outputs": {}}},
|
155
|
+
}
|
156
|
+
},
|
157
|
+
"error": rs.context["error"],
|
158
|
+
"error_message": (
|
159
|
+
"WorkflowException: Get job execution error first-job: "
|
160
|
+
"JobException: Get stage execution error: "
|
161
|
+
"StageException: PyStage: \n\t"
|
162
|
+
"ValueError: Testing raise error inside PyStage!!!"
|
163
|
+
),
|
164
|
+
}
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__: str = "0.0.17"
|
@@ -1,25 +0,0 @@
|
|
1
|
-
# ------------------------------------------------------------------------------
|
2
|
-
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
-
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
-
# license information.
|
5
|
-
# ------------------------------------------------------------------------------
|
6
|
-
from .conf import Config, FileLog, Loader
|
7
|
-
from .exceptions import (
|
8
|
-
JobException,
|
9
|
-
ParamValueException,
|
10
|
-
StageException,
|
11
|
-
UtilException,
|
12
|
-
WorkflowException,
|
13
|
-
)
|
14
|
-
from .job import Job, Strategy
|
15
|
-
from .on import On, interval2crontab
|
16
|
-
from .scheduler import (
|
17
|
-
Schedule,
|
18
|
-
Workflow,
|
19
|
-
)
|
20
|
-
from .stage import Stage, handler_result
|
21
|
-
from .utils import (
|
22
|
-
Param,
|
23
|
-
dash2underscore,
|
24
|
-
param2template,
|
25
|
-
)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil_workflow.egg-info/entry_points.txt
RENAMED
File without changes
|
{ddeutil_workflow-0.0.17 → ddeutil_workflow-0.0.18}/src/ddeutil_workflow.egg-info/top_level.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|