ddeutil-workflow 0.0.26.post0__py3-none-any.whl → 0.0.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +24 -16
- ddeutil/workflow/conf.py +169 -105
- ddeutil/workflow/exceptions.py +0 -3
- ddeutil/workflow/hook.py +153 -0
- ddeutil/workflow/job.py +1 -1
- ddeutil/workflow/scheduler.py +2 -2
- ddeutil/workflow/stage.py +3 -55
- ddeutil/workflow/templates.py +334 -0
- ddeutil/workflow/utils.py +3 -391
- ddeutil/workflow/workflow.py +6 -7
- {ddeutil_workflow-0.0.26.post0.dist-info → ddeutil_workflow-0.0.27.dist-info}/METADATA +24 -24
- ddeutil_workflow-0.0.27.dist-info/RECORD +25 -0
- ddeutil_workflow-0.0.26.post0.dist-info/RECORD +0 -23
- {ddeutil_workflow-0.0.26.post0.dist-info → ddeutil_workflow-0.0.27.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.26.post0.dist-info → ddeutil_workflow-0.0.27.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.26.post0.dist-info → ddeutil_workflow-0.0.27.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.27"
|
ddeutil/workflow/__init__.py
CHANGED
@@ -3,11 +3,15 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
from .__cron import CronRunner
|
6
|
+
from .__cron import CronJob, CronRunner
|
7
7
|
from .conf import (
|
8
8
|
Config,
|
9
|
-
FileLog,
|
10
9
|
Loader,
|
10
|
+
Log,
|
11
|
+
config,
|
12
|
+
env,
|
13
|
+
get_log,
|
14
|
+
get_logger,
|
11
15
|
)
|
12
16
|
from .cron import (
|
13
17
|
On,
|
@@ -21,6 +25,13 @@ from .exceptions import (
|
|
21
25
|
UtilException,
|
22
26
|
WorkflowException,
|
23
27
|
)
|
28
|
+
from .hook import (
|
29
|
+
ReturnTagFunc,
|
30
|
+
TagFunc,
|
31
|
+
extract_hook,
|
32
|
+
make_registry,
|
33
|
+
tag,
|
34
|
+
)
|
24
35
|
from .job import (
|
25
36
|
Job,
|
26
37
|
Strategy,
|
@@ -45,33 +56,30 @@ from .stage import (
|
|
45
56
|
PyStage,
|
46
57
|
Stage,
|
47
58
|
TriggerStage,
|
48
|
-
extract_hook,
|
49
59
|
)
|
50
|
-
from .
|
60
|
+
from .templates import (
|
51
61
|
FILTERS,
|
52
62
|
FilterFunc,
|
53
63
|
FilterRegistry,
|
54
|
-
|
55
|
-
|
64
|
+
custom_filter,
|
65
|
+
get_args_const,
|
66
|
+
has_template,
|
67
|
+
make_filter_registry,
|
68
|
+
map_post_filter,
|
69
|
+
not_in_template,
|
70
|
+
param2template,
|
71
|
+
str2template,
|
72
|
+
)
|
73
|
+
from .utils import (
|
56
74
|
batch,
|
57
75
|
cross_product,
|
58
|
-
custom_filter,
|
59
76
|
dash2underscore,
|
60
77
|
delay,
|
61
78
|
filter_func,
|
62
79
|
gen_id,
|
63
|
-
get_args_const,
|
64
80
|
get_diff_sec,
|
65
81
|
get_dt_now,
|
66
|
-
has_template,
|
67
82
|
make_exec,
|
68
|
-
make_filter_registry,
|
69
|
-
make_registry,
|
70
|
-
map_post_filter,
|
71
|
-
not_in_template,
|
72
|
-
param2template,
|
73
|
-
str2template,
|
74
|
-
tag,
|
75
83
|
)
|
76
84
|
from .workflow import (
|
77
85
|
Workflow,
|
ddeutil/workflow/conf.py
CHANGED
@@ -13,27 +13,30 @@ from collections.abc import Iterator
|
|
13
13
|
from datetime import datetime, timedelta
|
14
14
|
from functools import cached_property, lru_cache
|
15
15
|
from pathlib import Path
|
16
|
-
from typing import ClassVar, Optional,
|
16
|
+
from typing import ClassVar, Optional, Union
|
17
17
|
from zoneinfo import ZoneInfo
|
18
18
|
|
19
19
|
from ddeutil.core import str2bool
|
20
20
|
from ddeutil.io import YamlFlResolve
|
21
|
-
from dotenv import load_dotenv
|
22
21
|
from pydantic import BaseModel, Field
|
23
22
|
from pydantic.functional_validators import model_validator
|
24
23
|
from typing_extensions import Self
|
25
24
|
|
26
25
|
from .__types import DictData, TupleStr
|
27
26
|
|
28
|
-
AnyModel = TypeVar("AnyModel", bound=BaseModel)
|
29
|
-
AnyModelType = type[AnyModel]
|
30
27
|
|
31
|
-
|
28
|
+
def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
|
29
|
+
return os.getenv(f"WORKFLOW_{var}", default)
|
30
|
+
|
31
|
+
|
32
|
+
def glob_files(path: Path) -> Iterator[Path]: # pragma: no cov
|
33
|
+
yield from (file for file in path.rglob("*") if file.is_file())
|
32
34
|
|
33
|
-
env = os.getenv
|
34
35
|
|
35
36
|
__all__: TupleStr = (
|
37
|
+
"env",
|
36
38
|
"get_logger",
|
39
|
+
"get_log",
|
37
40
|
"Config",
|
38
41
|
"SimLoad",
|
39
42
|
"Loader",
|
@@ -52,6 +55,14 @@ def get_logger(name: str):
|
|
52
55
|
:param name: A module name that want to log.
|
53
56
|
"""
|
54
57
|
lg = logging.getLogger(name)
|
58
|
+
|
59
|
+
# NOTE: Developers using this package can then disable all logging just for
|
60
|
+
# this package by;
|
61
|
+
#
|
62
|
+
# `logging.getLogger('ddeutil.workflow').propagate = False`
|
63
|
+
#
|
64
|
+
lg.addHandler(logging.NullHandler())
|
65
|
+
|
55
66
|
formatter = logging.Formatter(
|
56
67
|
fmt=(
|
57
68
|
"%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
|
@@ -68,115 +79,139 @@ def get_logger(name: str):
|
|
68
79
|
return lg
|
69
80
|
|
70
81
|
|
71
|
-
class Config:
|
82
|
+
class Config: # pragma: no cov
|
72
83
|
"""Config object for keeping application configuration on current session
|
73
84
|
without changing when if the application still running.
|
74
85
|
"""
|
75
86
|
|
76
87
|
# NOTE: Core
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
88
|
+
@property
|
89
|
+
def root_path(self) -> Path:
|
90
|
+
return Path(env("ROOT_PATH", "."))
|
91
|
+
|
92
|
+
@property
|
93
|
+
def conf_path(self) -> Path:
|
94
|
+
"""Config path that use root_path class argument for this construction.
|
95
|
+
|
96
|
+
:rtype: Path
|
97
|
+
"""
|
98
|
+
return self.root_path / env("CORE_PATH_CONF", "conf")
|
99
|
+
|
100
|
+
@property
|
101
|
+
def tz(self) -> ZoneInfo:
|
102
|
+
return ZoneInfo(env("CORE_TIMEZONE", "UTC"))
|
103
|
+
|
104
|
+
@property
|
105
|
+
def gen_id_simple_mode(self) -> bool:
|
106
|
+
return str2bool(env("CORE_GENERATE_ID_SIMPLE_MODE", "true"))
|
82
107
|
|
83
108
|
# NOTE: Register
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
109
|
+
@property
|
110
|
+
def regis_hook(self) -> list[str]:
|
111
|
+
regis_hook_str: str = env("CORE_REGISTRY", "src")
|
112
|
+
return [r.strip() for r in regis_hook_str.split(",")]
|
113
|
+
|
114
|
+
@property
|
115
|
+
def regis_filter(self) -> list[str]:
|
116
|
+
regis_filter_str: str = env(
|
117
|
+
"CORE_REGISTRY_FILTER", "ddeutil.workflow.templates"
|
118
|
+
)
|
119
|
+
return [r.strip() for r in regis_filter_str.split(",")]
|
90
120
|
|
91
121
|
# NOTE: Logging
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
122
|
+
@property
|
123
|
+
def log_path(self) -> Path:
|
124
|
+
return Path(env("LOG_PATH", "./logs"))
|
125
|
+
|
126
|
+
@property
|
127
|
+
def debug(self) -> bool:
|
128
|
+
return str2bool(env("LOG_DEBUG_MODE", "true"))
|
129
|
+
|
130
|
+
@property
|
131
|
+
def enable_write_log(self) -> bool:
|
132
|
+
return str2bool(env("LOG_ENABLE_WRITE", "false"))
|
97
133
|
|
98
134
|
# NOTE: Stage
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
stage_default_id: bool = str2bool(
|
103
|
-
env("WORKFLOW_CORE_STAGE_DEFAULT_ID", "false")
|
104
|
-
)
|
135
|
+
@property
|
136
|
+
def stage_raise_error(self) -> bool:
|
137
|
+
return str2bool(env("CORE_STAGE_RAISE_ERROR", "false"))
|
105
138
|
|
106
|
-
|
107
|
-
|
108
|
-
env("
|
109
|
-
)
|
110
|
-
job_default_id: bool = str2bool(
|
111
|
-
env("WORKFLOW_CORE_JOB_DEFAULT_ID", "false")
|
112
|
-
)
|
139
|
+
@property
|
140
|
+
def stage_default_id(self) -> bool:
|
141
|
+
return str2bool(env("CORE_STAGE_DEFAULT_ID", "false"))
|
113
142
|
|
114
|
-
# NOTE:
|
115
|
-
|
116
|
-
|
117
|
-
env("
|
118
|
-
)
|
119
|
-
max_poking_pool_worker: int = int(
|
120
|
-
os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
|
121
|
-
)
|
122
|
-
max_on_per_workflow: int = int(
|
123
|
-
env("WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW", "5")
|
124
|
-
)
|
125
|
-
max_queue_complete_hist: int = int(
|
126
|
-
os.getenv("WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST", "16")
|
127
|
-
)
|
143
|
+
# NOTE: Job
|
144
|
+
@property
|
145
|
+
def job_raise_error(self) -> bool:
|
146
|
+
return str2bool(env("CORE_JOB_RAISE_ERROR", "true"))
|
128
147
|
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
env("WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS", "100")
|
133
|
-
)
|
134
|
-
stop_boundary_delta_str: str = env(
|
135
|
-
"WORKFLOW_APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
|
136
|
-
)
|
148
|
+
@property
|
149
|
+
def job_default_id(self) -> bool:
|
150
|
+
return str2bool(env("CORE_JOB_DEFAULT_ID", "false"))
|
137
151
|
|
138
|
-
# NOTE:
|
139
|
-
|
140
|
-
|
141
|
-
env("
|
142
|
-
)
|
143
|
-
enable_route_schedule: bool = str2bool(
|
144
|
-
env("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")
|
145
|
-
)
|
152
|
+
# NOTE: Workflow
|
153
|
+
@property
|
154
|
+
def max_job_parallel(self) -> int:
|
155
|
+
max_job_parallel = int(env("CORE_MAX_JOB_PARALLEL", "2"))
|
146
156
|
|
147
|
-
def __init__(self) -> None:
|
148
157
|
# VALIDATE: the MAX_JOB_PARALLEL value should not less than 0.
|
149
|
-
if
|
158
|
+
if max_job_parallel < 0:
|
150
159
|
raise ValueError(
|
151
|
-
f"``
|
152
|
-
f"{
|
160
|
+
f"``WORKFLOW_MAX_JOB_PARALLEL`` should more than 0 but got "
|
161
|
+
f"{max_job_parallel}."
|
153
162
|
)
|
163
|
+
return max_job_parallel
|
164
|
+
|
165
|
+
@property
|
166
|
+
def max_job_exec_timeout(self) -> int:
|
167
|
+
return int(env("CORE_MAX_JOB_EXEC_TIMEOUT", "600"))
|
168
|
+
|
169
|
+
@property
|
170
|
+
def max_poking_pool_worker(self) -> int:
|
171
|
+
return int(env("CORE_MAX_NUM_POKING", "4"))
|
172
|
+
|
173
|
+
@property
|
174
|
+
def max_on_per_workflow(self) -> int:
|
175
|
+
return int(env("CORE_MAX_CRON_PER_WORKFLOW", "5"))
|
176
|
+
|
177
|
+
@property
|
178
|
+
def max_queue_complete_hist(self) -> int:
|
179
|
+
return int(env("CORE_MAX_QUEUE_COMPLETE_HIST", "16"))
|
180
|
+
|
181
|
+
# NOTE: Schedule App
|
182
|
+
@property
|
183
|
+
def max_schedule_process(self) -> int:
|
184
|
+
return int(env("APP_MAX_PROCESS", "2"))
|
154
185
|
|
186
|
+
@property
|
187
|
+
def max_schedule_per_process(self) -> int:
|
188
|
+
return int(env("APP_MAX_SCHEDULE_PER_PROCESS", "100"))
|
189
|
+
|
190
|
+
@property
|
191
|
+
def stop_boundary_delta(self) -> timedelta:
|
192
|
+
stop_boundary_delta_str: str = env(
|
193
|
+
"APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
|
194
|
+
)
|
155
195
|
try:
|
156
|
-
|
157
|
-
**json.loads(self.stop_boundary_delta_str)
|
158
|
-
)
|
196
|
+
return timedelta(**json.loads(stop_boundary_delta_str))
|
159
197
|
except Exception as err:
|
160
198
|
raise ValueError(
|
161
199
|
"Config ``WORKFLOW_APP_STOP_BOUNDARY_DELTA`` can not parsing to"
|
162
|
-
f"timedelta with {
|
200
|
+
f"timedelta with {stop_boundary_delta_str}."
|
163
201
|
) from err
|
164
202
|
|
203
|
+
# NOTE: API
|
165
204
|
@property
|
166
|
-
def
|
167
|
-
"""
|
168
|
-
|
169
|
-
:rtype: Path
|
170
|
-
"""
|
171
|
-
return self.root_path / os.getenv("WORKFLOW_CORE_PATH_CONF", "conf")
|
205
|
+
def prefix_path(self) -> str:
|
206
|
+
return env("API_PREFIX_PATH", "/api/v1")
|
172
207
|
|
173
208
|
@property
|
174
|
-
def
|
175
|
-
return
|
209
|
+
def enable_route_workflow(self) -> bool:
|
210
|
+
return str2bool(env("API_ENABLE_ROUTE_WORKFLOW", "true"))
|
176
211
|
|
177
212
|
@property
|
178
|
-
def
|
179
|
-
return
|
213
|
+
def enable_route_schedule(self) -> bool:
|
214
|
+
return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
|
180
215
|
|
181
216
|
|
182
217
|
class SimLoad:
|
@@ -206,14 +241,9 @@ class SimLoad:
|
|
206
241
|
externals: DictData | None = None,
|
207
242
|
) -> None:
|
208
243
|
self.data: DictData = {}
|
209
|
-
for file in conf.conf_path
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
if data := self.filter_suffix(
|
214
|
-
file,
|
215
|
-
name,
|
216
|
-
):
|
244
|
+
for file in glob_files(conf.conf_path):
|
245
|
+
|
246
|
+
if data := self.filter_suffix(file, name):
|
217
247
|
self.data = data
|
218
248
|
|
219
249
|
# VALIDATE: check the data that reading should not empty.
|
@@ -245,10 +275,7 @@ class SimLoad:
|
|
245
275
|
:rtype: Iterator[tuple[str, DictData]]
|
246
276
|
"""
|
247
277
|
exclude: list[str] = excluded or []
|
248
|
-
for file in conf.conf_path
|
249
|
-
|
250
|
-
if not file.is_file():
|
251
|
-
continue
|
278
|
+
for file in glob_files(conf.conf_path):
|
252
279
|
|
253
280
|
for key, data in cls.filter_suffix(file).items():
|
254
281
|
|
@@ -274,7 +301,7 @@ class SimLoad:
|
|
274
301
|
"""Return object of string type which implement on any registry. The
|
275
302
|
object type.
|
276
303
|
|
277
|
-
:rtype:
|
304
|
+
:rtype: str
|
278
305
|
"""
|
279
306
|
if _typ := self.data.get("type"):
|
280
307
|
return _typ
|
@@ -283,6 +310,10 @@ class SimLoad:
|
|
283
310
|
)
|
284
311
|
|
285
312
|
|
313
|
+
config = Config()
|
314
|
+
logger = get_logger("ddeutil.workflow")
|
315
|
+
|
316
|
+
|
286
317
|
class Loader(SimLoad):
|
287
318
|
"""Loader Object that get the config `yaml` file from current path.
|
288
319
|
|
@@ -308,15 +339,11 @@ class Loader(SimLoad):
|
|
308
339
|
:rtype: Iterator[tuple[str, DictData]]
|
309
340
|
"""
|
310
341
|
return super().finds(
|
311
|
-
obj=obj, conf=
|
342
|
+
obj=obj, conf=config, included=included, excluded=excluded
|
312
343
|
)
|
313
344
|
|
314
345
|
def __init__(self, name: str, externals: DictData) -> None:
|
315
|
-
super().__init__(name, conf=
|
316
|
-
|
317
|
-
|
318
|
-
config = Config()
|
319
|
-
logger = get_logger("ddeutil.workflow")
|
346
|
+
super().__init__(name, conf=config, externals=externals)
|
320
347
|
|
321
348
|
|
322
349
|
class BaseLog(BaseModel, ABC):
|
@@ -398,8 +425,8 @@ class FileLog(BaseLog):
|
|
398
425
|
workflow name and release values. If a release does not pass to an input
|
399
426
|
argument, it will return the latest release from the current log path.
|
400
427
|
|
401
|
-
:param name:
|
402
|
-
:param release:
|
428
|
+
:param name: A workflow name that want to search log.
|
429
|
+
:param release: A release datetime that want to search log.
|
403
430
|
|
404
431
|
:raise FileNotFoundError:
|
405
432
|
:raise NotImplementedError:
|
@@ -463,8 +490,14 @@ class FileLog(BaseLog):
|
|
463
490
|
|
464
491
|
:rtype: Self
|
465
492
|
"""
|
493
|
+
from .utils import cut_id
|
494
|
+
|
466
495
|
# NOTE: Check environ variable was set for real writing.
|
467
496
|
if not config.enable_write_log:
|
497
|
+
logger.debug(
|
498
|
+
f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
|
499
|
+
f"config was set"
|
500
|
+
)
|
468
501
|
return self
|
469
502
|
|
470
503
|
log_file: Path = self.pointer() / f"{self.run_id}.log"
|
@@ -481,7 +514,32 @@ class FileLog(BaseLog):
|
|
481
514
|
|
482
515
|
class SQLiteLog(BaseLog): # pragma: no cov
|
483
516
|
|
517
|
+
table: str = "workflow_log"
|
518
|
+
ddl: str = """
|
519
|
+
workflow str,
|
520
|
+
release int,
|
521
|
+
type str,
|
522
|
+
context json,
|
523
|
+
parent_run_id int,
|
524
|
+
run_id int,
|
525
|
+
update datetime
|
526
|
+
primary key ( run_id )
|
527
|
+
"""
|
528
|
+
|
484
529
|
def save(self, excluded: list[str] | None) -> None:
|
530
|
+
"""Save logging data that receive a context data from a workflow
|
531
|
+
execution result.
|
532
|
+
"""
|
533
|
+
from .utils import cut_id
|
534
|
+
|
535
|
+
# NOTE: Check environ variable was set for real writing.
|
536
|
+
if not config.enable_write_log:
|
537
|
+
logger.debug(
|
538
|
+
f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
|
539
|
+
f"config was set"
|
540
|
+
)
|
541
|
+
return self
|
542
|
+
|
485
543
|
raise NotImplementedError("SQLiteLog does not implement yet.")
|
486
544
|
|
487
545
|
|
@@ -489,3 +547,9 @@ Log = Union[
|
|
489
547
|
FileLog,
|
490
548
|
SQLiteLog,
|
491
549
|
]
|
550
|
+
|
551
|
+
|
552
|
+
def get_log() -> Log: # pragma: no cov
|
553
|
+
if config.log_path.is_file():
|
554
|
+
return SQLiteLog
|
555
|
+
return FileLog
|
ddeutil/workflow/exceptions.py
CHANGED
ddeutil/workflow/hook.py
ADDED
@@ -0,0 +1,153 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
import inspect
|
9
|
+
import logging
|
10
|
+
from dataclasses import dataclass
|
11
|
+
from functools import wraps
|
12
|
+
from importlib import import_module
|
13
|
+
from typing import Any, Callable, Protocol, TypeVar
|
14
|
+
|
15
|
+
try:
|
16
|
+
from typing import ParamSpec
|
17
|
+
except ImportError:
|
18
|
+
from typing_extensions import ParamSpec
|
19
|
+
|
20
|
+
from ddeutil.core import lazy
|
21
|
+
|
22
|
+
from .__types import Re
|
23
|
+
from .conf import config
|
24
|
+
|
25
|
+
T = TypeVar("T")
|
26
|
+
P = ParamSpec("P")
|
27
|
+
|
28
|
+
logger = logging.getLogger("ddeutil.workflow")
|
29
|
+
|
30
|
+
|
31
|
+
class TagFunc(Protocol):
|
32
|
+
"""Tag Function Protocol"""
|
33
|
+
|
34
|
+
name: str
|
35
|
+
tag: str
|
36
|
+
|
37
|
+
def __call__(self, *args, **kwargs): ... # pragma: no cov
|
38
|
+
|
39
|
+
|
40
|
+
ReturnTagFunc = Callable[P, TagFunc]
|
41
|
+
DecoratorTagFunc = Callable[[Callable[[...], Any]], ReturnTagFunc]
|
42
|
+
|
43
|
+
|
44
|
+
def tag(
|
45
|
+
name: str, alias: str | None = None
|
46
|
+
) -> DecoratorTagFunc: # pragma: no cov
|
47
|
+
"""Tag decorator function that set function attributes, ``tag`` and ``name``
|
48
|
+
for making registries variable.
|
49
|
+
|
50
|
+
:param: name: A tag name for make different use-case of a function.
|
51
|
+
:param: alias: A alias function name that keeping in registries. If this
|
52
|
+
value does not supply, it will use original function name from __name__.
|
53
|
+
:rtype: Callable[P, TagFunc]
|
54
|
+
"""
|
55
|
+
|
56
|
+
def func_internal(func: Callable[[...], Any]) -> ReturnTagFunc:
|
57
|
+
func.tag = name
|
58
|
+
func.name = alias or func.__name__.replace("_", "-")
|
59
|
+
|
60
|
+
@wraps(func)
|
61
|
+
def wrapped(*args, **kwargs):
|
62
|
+
# NOTE: Able to do anything before calling hook function.
|
63
|
+
return func(*args, **kwargs)
|
64
|
+
|
65
|
+
return wrapped
|
66
|
+
|
67
|
+
return func_internal
|
68
|
+
|
69
|
+
|
70
|
+
Registry = dict[str, Callable[[], TagFunc]]
|
71
|
+
|
72
|
+
|
73
|
+
def make_registry(submodule: str) -> dict[str, Registry]:
|
74
|
+
"""Return registries of all functions that able to called with task.
|
75
|
+
|
76
|
+
:param submodule: A module prefix that want to import registry.
|
77
|
+
:rtype: dict[str, Registry]
|
78
|
+
"""
|
79
|
+
rs: dict[str, Registry] = {}
|
80
|
+
for module in config.regis_hook:
|
81
|
+
# NOTE: try to sequential import task functions
|
82
|
+
try:
|
83
|
+
importer = import_module(f"{module}.{submodule}")
|
84
|
+
except ModuleNotFoundError:
|
85
|
+
continue
|
86
|
+
|
87
|
+
for fstr, func in inspect.getmembers(importer, inspect.isfunction):
|
88
|
+
# NOTE: check function attribute that already set tag by
|
89
|
+
# ``utils.tag`` decorator.
|
90
|
+
if not hasattr(func, "tag"):
|
91
|
+
continue
|
92
|
+
|
93
|
+
# NOTE: Create new register name if it not exists
|
94
|
+
if func.name not in rs:
|
95
|
+
rs[func.name] = {func.tag: lazy(f"{module}.{submodule}.{fstr}")}
|
96
|
+
continue
|
97
|
+
|
98
|
+
if func.tag in rs[func.name]:
|
99
|
+
raise ValueError(
|
100
|
+
f"The tag {func.tag!r} already exists on "
|
101
|
+
f"{module}.{submodule}, you should change this tag name or "
|
102
|
+
f"change it func name."
|
103
|
+
)
|
104
|
+
rs[func.name][func.tag] = lazy(f"{module}.{submodule}.{fstr}")
|
105
|
+
|
106
|
+
return rs
|
107
|
+
|
108
|
+
|
109
|
+
@dataclass(frozen=True)
|
110
|
+
class HookSearchData:
|
111
|
+
"""Hook Search dataclass that use for receive regular expression grouping
|
112
|
+
dict from searching hook string value.
|
113
|
+
"""
|
114
|
+
|
115
|
+
path: str
|
116
|
+
func: str
|
117
|
+
tag: str
|
118
|
+
|
119
|
+
|
120
|
+
def extract_hook(hook: str) -> Callable[[], TagFunc]:
|
121
|
+
"""Extract Hook function from string value to hook partial function that
|
122
|
+
does run it at runtime.
|
123
|
+
|
124
|
+
:raise NotImplementedError: When the searching hook's function result does
|
125
|
+
not exist in the registry.
|
126
|
+
:raise NotImplementedError: When the searching hook's tag result does not
|
127
|
+
exists in the registry with its function key.
|
128
|
+
|
129
|
+
:param hook: A hook value that able to match with Task regex.
|
130
|
+
:rtype: Callable[[], TagFunc]
|
131
|
+
"""
|
132
|
+
if not (found := Re.RE_TASK_FMT.search(hook)):
|
133
|
+
raise ValueError(
|
134
|
+
f"Hook {hook!r} does not match with hook format regex."
|
135
|
+
)
|
136
|
+
|
137
|
+
# NOTE: Pass the searching hook string to `path`, `func`, and `tag`.
|
138
|
+
hook: HookSearchData = HookSearchData(**found.groupdict())
|
139
|
+
|
140
|
+
# NOTE: Registry object should implement on this package only.
|
141
|
+
rgt: dict[str, Registry] = make_registry(f"{hook.path}")
|
142
|
+
if hook.func not in rgt:
|
143
|
+
raise NotImplementedError(
|
144
|
+
f"``REGISTER-MODULES.{hook.path}.registries`` does not "
|
145
|
+
f"implement registry: {hook.func!r}."
|
146
|
+
)
|
147
|
+
|
148
|
+
if hook.tag not in rgt[hook.func]:
|
149
|
+
raise NotImplementedError(
|
150
|
+
f"tag: {hook.tag!r} does not found on registry func: "
|
151
|
+
f"``REGISTER-MODULES.{hook.path}.registries.{hook.func}``"
|
152
|
+
)
|
153
|
+
return rgt[hook.func][hook.tag]
|
ddeutil/workflow/job.py
CHANGED
@@ -38,13 +38,13 @@ from .exceptions import (
|
|
38
38
|
)
|
39
39
|
from .result import Result
|
40
40
|
from .stage import Stage
|
41
|
+
from .templates import has_template
|
41
42
|
from .utils import (
|
42
43
|
cross_product,
|
43
44
|
cut_id,
|
44
45
|
dash2underscore,
|
45
46
|
filter_func,
|
46
47
|
gen_id,
|
47
|
-
has_template,
|
48
48
|
)
|
49
49
|
|
50
50
|
logger = get_logger("ddeutil.workflow")
|
ddeutil/workflow/scheduler.py
CHANGED
@@ -51,7 +51,7 @@ except ImportError: # pragma: no cov
|
|
51
51
|
|
52
52
|
from .__cron import CronRunner
|
53
53
|
from .__types import DictData, TupleStr
|
54
|
-
from .conf import
|
54
|
+
from .conf import Loader, Log, config, get_log, get_logger
|
55
55
|
from .cron import On
|
56
56
|
from .exceptions import WorkflowException
|
57
57
|
from .utils import (
|
@@ -493,7 +493,7 @@ def schedule_control(
|
|
493
493
|
"Should install schedule package before use this module."
|
494
494
|
) from None
|
495
495
|
|
496
|
-
log: type[Log] = log or
|
496
|
+
log: type[Log] = log or get_log()
|
497
497
|
scheduler: Scheduler = Scheduler()
|
498
498
|
start_date: datetime = datetime.now(tz=config.tz)
|
499
499
|
stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
|