ddeutil-workflow 0.0.60__py3-none-any.whl → 0.0.62__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__cron.py +24 -25
- ddeutil/workflow/event.py +23 -0
- ddeutil/workflow/logs.py +12 -11
- ddeutil/workflow/params.py +54 -21
- ddeutil/workflow/reusables.py +67 -13
- ddeutil/workflow/stages.py +26 -7
- ddeutil/workflow/utils.py +23 -1
- ddeutil/workflow/workflow.py +203 -207
- {ddeutil_workflow-0.0.60.dist-info → ddeutil_workflow-0.0.62.dist-info}/METADATA +1 -1
- {ddeutil_workflow-0.0.60.dist-info → ddeutil_workflow-0.0.62.dist-info}/RECORD +15 -15
- {ddeutil_workflow-0.0.60.dist-info → ddeutil_workflow-0.0.62.dist-info}/WHEEL +1 -1
- {ddeutil_workflow-0.0.60.dist-info → ddeutil_workflow-0.0.62.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.60.dist-info → ddeutil_workflow-0.0.62.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.60.dist-info → ddeutil_workflow-0.0.62.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.62"
|
ddeutil/workflow/__cron.py
CHANGED
@@ -18,7 +18,7 @@ from ddeutil.core import (
|
|
18
18
|
isinstance_check,
|
19
19
|
must_split,
|
20
20
|
)
|
21
|
-
from ddeutil.core.dtutils import next_date, replace_date
|
21
|
+
from ddeutil.core.dtutils import DatetimeMode, next_date, replace_date
|
22
22
|
|
23
23
|
WEEKDAYS: dict[str, int] = {
|
24
24
|
"Sun": 0,
|
@@ -31,7 +31,8 @@ WEEKDAYS: dict[str, int] = {
|
|
31
31
|
}
|
32
32
|
|
33
33
|
|
34
|
-
class
|
34
|
+
class YearReachLimit(Exception):
|
35
|
+
""""""
|
35
36
|
|
36
37
|
|
37
38
|
def str2cron(value: str) -> str: # pragma: no cov
|
@@ -178,7 +179,7 @@ class CronPart:
|
|
178
179
|
def __init__(
|
179
180
|
self,
|
180
181
|
unit: Unit,
|
181
|
-
values: str
|
182
|
+
values: Union[str, list[int]],
|
182
183
|
options: Options,
|
183
184
|
) -> None:
|
184
185
|
self.unit: Unit = unit
|
@@ -229,19 +230,21 @@ class CronPart:
|
|
229
230
|
f"(unit={self.unit}, values={self.__str__()!r})"
|
230
231
|
)
|
231
232
|
|
232
|
-
def __lt__(self, other) -> bool:
|
233
|
+
def __lt__(self, other: Union[CronPart, list]) -> bool:
|
233
234
|
"""Override __lt__ method."""
|
234
235
|
if isinstance(other, CronPart):
|
235
236
|
return self.values < other.values
|
236
237
|
elif isinstance(other, list):
|
237
238
|
return self.values < other
|
239
|
+
return NotImplemented
|
238
240
|
|
239
|
-
def __eq__(self, other) -> bool:
|
241
|
+
def __eq__(self, other: Union[CronPart, list]) -> bool:
|
240
242
|
"""Override __eq__ method."""
|
241
243
|
if isinstance(other, CronPart):
|
242
244
|
return self.values == other.values
|
243
245
|
elif isinstance(other, list):
|
244
246
|
return self.values == other
|
247
|
+
return NotImplemented
|
245
248
|
|
246
249
|
@property
|
247
250
|
def min(self) -> int:
|
@@ -271,6 +274,7 @@ class CronPart:
|
|
271
274
|
and (step := self.values[1] - self.values[0]) > 1
|
272
275
|
):
|
273
276
|
return step
|
277
|
+
return None
|
274
278
|
|
275
279
|
@property
|
276
280
|
def is_full(self) -> bool:
|
@@ -355,6 +359,8 @@ class CronPart:
|
|
355
359
|
f"Invalid interval step value {value_step!r} for "
|
356
360
|
f"{self.unit.name!r}"
|
357
361
|
)
|
362
|
+
elif value_step:
|
363
|
+
value_step: int = int(value_step)
|
358
364
|
|
359
365
|
# NOTE: Generate interval that has step
|
360
366
|
interval_list.append(self._interval(value_range_list, value_step))
|
@@ -375,7 +381,9 @@ class CronPart:
|
|
375
381
|
value: str = value.replace(alt, str(self.unit.min + i))
|
376
382
|
return value
|
377
383
|
|
378
|
-
def replace_weekday(
|
384
|
+
def replace_weekday(
|
385
|
+
self, values: Union[list[int], Iterator[int]]
|
386
|
+
) -> list[int]:
|
379
387
|
"""Replaces all 7 with 0 as Sunday can be represented by both.
|
380
388
|
|
381
389
|
:param values: list or iter of int that want to mode by 7
|
@@ -433,12 +441,12 @@ class CronPart:
|
|
433
441
|
def _interval(
|
434
442
|
self,
|
435
443
|
values: list[int],
|
436
|
-
step: int
|
444
|
+
step: Optional[int] = None,
|
437
445
|
) -> list[int]:
|
438
446
|
"""Applies an interval step to a collection of values.
|
439
447
|
|
440
448
|
:param values:
|
441
|
-
:param step:
|
449
|
+
:param step: (int) A step
|
442
450
|
|
443
451
|
:rtype: list[int]
|
444
452
|
"""
|
@@ -515,7 +523,7 @@ class CronPart:
|
|
515
523
|
start_number: Optional[int] = value
|
516
524
|
return multi_dim_values
|
517
525
|
|
518
|
-
def filler(self, value: int) -> int
|
526
|
+
def filler(self, value: int) -> Union[int, str]:
|
519
527
|
"""Formats weekday and month names as string when the relevant options
|
520
528
|
are set.
|
521
529
|
|
@@ -765,12 +773,12 @@ class CronRunner:
|
|
765
773
|
|
766
774
|
def __init__(
|
767
775
|
self,
|
768
|
-
cron: CronJob
|
776
|
+
cron: Union[CronJob, CronJobYear],
|
769
777
|
date: Optional[datetime] = None,
|
770
778
|
*,
|
771
|
-
tz: str
|
779
|
+
tz: Optional[Union[str, ZoneInfo]] = None,
|
772
780
|
) -> None:
|
773
|
-
self.tz: ZoneInfo
|
781
|
+
self.tz: Optional[ZoneInfo] = None
|
774
782
|
if tz:
|
775
783
|
if isinstance(tz, ZoneInfo):
|
776
784
|
self.tz = tz
|
@@ -810,7 +818,7 @@ class CronRunner:
|
|
810
818
|
)
|
811
819
|
|
812
820
|
self.__start_date: datetime = self.date
|
813
|
-
self.cron: CronJob
|
821
|
+
self.cron: Union[CronJob, CronJobYear] = cron
|
814
822
|
self.is_year: bool = isinstance(cron, CronJobYear)
|
815
823
|
self.reset_flag: bool = True
|
816
824
|
|
@@ -863,7 +871,7 @@ class CronRunner:
|
|
863
871
|
|
864
872
|
raise RecursionError("Unable to find execution time for schedule")
|
865
873
|
|
866
|
-
def __shift_date(self, mode:
|
874
|
+
def __shift_date(self, mode: DatetimeMode, reverse: bool = False) -> bool:
|
867
875
|
"""Increments the mode of date value ("month", "day", "hour", "minute")
|
868
876
|
until matches with the schedule.
|
869
877
|
|
@@ -897,8 +905,8 @@ class CronRunner:
|
|
897
905
|
getattr(self.date, mode)
|
898
906
|
> (max_year := max(self.cron.year.values))
|
899
907
|
):
|
900
|
-
raise
|
901
|
-
f"The year is
|
908
|
+
raise YearReachLimit(
|
909
|
+
f"The year is reach the limit with this crontab setting: "
|
902
910
|
f"{max_year}."
|
903
911
|
)
|
904
912
|
|
@@ -917,12 +925,3 @@ class CronRunner:
|
|
917
925
|
|
918
926
|
# NOTE: Return False if the date that match with condition.
|
919
927
|
return False
|
920
|
-
|
921
|
-
|
922
|
-
__all__ = (
|
923
|
-
"CronJob",
|
924
|
-
"CronJobYear",
|
925
|
-
"CronRunner",
|
926
|
-
"Options",
|
927
|
-
"WEEKDAYS",
|
928
|
-
)
|
ddeutil/workflow/event.py
CHANGED
@@ -314,3 +314,26 @@ class CrontabYear(Crontab):
|
|
314
314
|
if isinstance(value, str)
|
315
315
|
else value
|
316
316
|
)
|
317
|
+
|
318
|
+
|
319
|
+
class ReleaseEvent(BaseModel): # pragma: no cov
|
320
|
+
"""Release trigger event."""
|
321
|
+
|
322
|
+
release: list[str] = Field(
|
323
|
+
description=(
|
324
|
+
"A list of workflow name that want to receive event from release"
|
325
|
+
"trigger."
|
326
|
+
)
|
327
|
+
)
|
328
|
+
|
329
|
+
|
330
|
+
Event = Annotated[
|
331
|
+
Union[
|
332
|
+
CronJobYear,
|
333
|
+
CronJob,
|
334
|
+
],
|
335
|
+
Field(
|
336
|
+
union_mode="smart",
|
337
|
+
description="An event models.",
|
338
|
+
),
|
339
|
+
] # pragma: no cov
|
ddeutil/workflow/logs.py
CHANGED
@@ -23,7 +23,7 @@ from inspect import Traceback, currentframe, getframeinfo
|
|
23
23
|
from pathlib import Path
|
24
24
|
from threading import get_ident
|
25
25
|
from types import FrameType
|
26
|
-
from typing import ClassVar, Literal, Optional, TypeVar, Union
|
26
|
+
from typing import ClassVar, Final, Literal, Optional, TypeVar, Union
|
27
27
|
|
28
28
|
from pydantic import BaseModel, ConfigDict, Field
|
29
29
|
from pydantic.functional_validators import model_validator
|
@@ -74,7 +74,7 @@ def get_dt_tznow() -> datetime: # pragma: no cov
|
|
74
74
|
return get_dt_now(tz=config.tz)
|
75
75
|
|
76
76
|
|
77
|
-
PREFIX_LOGS: dict[str, dict] = {
|
77
|
+
PREFIX_LOGS: Final[dict[str, dict]] = {
|
78
78
|
"CALLER": {
|
79
79
|
"emoji": "📍",
|
80
80
|
"desc": "logs from any usage from custom caller function.",
|
@@ -85,7 +85,7 @@ PREFIX_LOGS: dict[str, dict] = {
|
|
85
85
|
"RELEASE": {"emoji": "📅", "desc": "logs from release workflow method."},
|
86
86
|
"POKING": {"emoji": "⏰", "desc": "logs from poke workflow method."},
|
87
87
|
} # pragma: no cov
|
88
|
-
PREFIX_DEFAULT: str = "CALLER"
|
88
|
+
PREFIX_DEFAULT: Final[str] = "CALLER"
|
89
89
|
PREFIX_LOGS_REGEX: re.Pattern[str] = re.compile(
|
90
90
|
rf"(^\[(?P<name>{'|'.join(PREFIX_LOGS)})]:\s?)?(?P<message>.*)",
|
91
91
|
re.MULTILINE | re.DOTALL | re.ASCII | re.VERBOSE,
|
@@ -103,6 +103,9 @@ class PrefixMsg(BaseModel):
|
|
103
103
|
def prepare(self, extras: Optional[DictData] = None) -> str:
|
104
104
|
"""Prepare message with force add prefix before writing trace log.
|
105
105
|
|
106
|
+
:param extras: (DictData) An extra parameter that want to get the
|
107
|
+
`log_add_emoji` flag.
|
108
|
+
|
106
109
|
:rtype: str
|
107
110
|
"""
|
108
111
|
name: str = self.name or PREFIX_DEFAULT
|
@@ -332,9 +335,7 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
|
|
332
335
|
|
333
336
|
:param message: (str) A message that want to log.
|
334
337
|
"""
|
335
|
-
msg: str =
|
336
|
-
self.make_message(extract_msg_prefix(message).prepare(self.extras))
|
337
|
-
)
|
338
|
+
msg: str = self.make_message(message)
|
338
339
|
|
339
340
|
if mode != "debug" or (
|
340
341
|
mode == "debug" and dynamic("debug", extras=self.extras)
|
@@ -391,9 +392,7 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
|
|
391
392
|
|
392
393
|
:param message: (str) A message that want to log.
|
393
394
|
"""
|
394
|
-
msg: str =
|
395
|
-
self.make_message(extract_msg_prefix(message).prepare(self.extras))
|
396
|
-
)
|
395
|
+
msg: str = self.make_message(message)
|
397
396
|
|
398
397
|
if mode != "debug" or (
|
399
398
|
mode == "debug" and dynamic("debug", extras=self.extras)
|
@@ -514,13 +513,15 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
514
513
|
return f"{cut_parent_run_id} -> {cut_run_id}"
|
515
514
|
|
516
515
|
def make_message(self, message: str) -> str:
|
517
|
-
"""Prepare and Make a message before write and log
|
516
|
+
"""Prepare and Make a message before write and log steps.
|
518
517
|
|
519
518
|
:param message: (str) A message that want to prepare and make before.
|
520
519
|
|
521
520
|
:rtype: str
|
522
521
|
"""
|
523
|
-
return
|
522
|
+
return prepare_newline(
|
523
|
+
f"({self.cut_id}) {extract_msg_prefix(message).prepare(self.extras)}"
|
524
|
+
)
|
524
525
|
|
525
526
|
def writer(self, message: str, level: str, is_err: bool = False) -> None:
|
526
527
|
"""Write a trace message after making to target file and write metadata
|
ddeutil/workflow/params.py
CHANGED
@@ -20,6 +20,7 @@ from typing import Annotated, Any, Literal, Optional, TypeVar, Union
|
|
20
20
|
from ddeutil.core import str2dict, str2list
|
21
21
|
from pydantic import BaseModel, Field
|
22
22
|
|
23
|
+
from .__types import StrOrInt
|
23
24
|
from .exceptions import ParamValueException
|
24
25
|
from .utils import get_d_now, get_dt_now
|
25
26
|
|
@@ -159,10 +160,11 @@ class StrParam(DefaultParam):
|
|
159
160
|
|
160
161
|
type: Literal["str"] = "str"
|
161
162
|
|
162
|
-
def receive(self, value: Optional[
|
163
|
+
def receive(self, value: Optional[Any] = None) -> Optional[str]:
|
163
164
|
"""Receive value that match with str.
|
164
165
|
|
165
|
-
:param value: A value that want to validate with string parameter
|
166
|
+
:param value: (Any) A value that want to validate with string parameter
|
167
|
+
type.
|
166
168
|
:rtype: Optional[str]
|
167
169
|
"""
|
168
170
|
if value is None:
|
@@ -175,7 +177,7 @@ class IntParam(DefaultParam):
|
|
175
177
|
|
176
178
|
type: Literal["int"] = "int"
|
177
179
|
|
178
|
-
def receive(self, value: Optional[
|
180
|
+
def receive(self, value: Optional[StrOrInt] = None) -> Optional[int]:
|
179
181
|
"""Receive value that match with int.
|
180
182
|
|
181
183
|
:param value: A value that want to validate with integer parameter type.
|
@@ -200,13 +202,24 @@ class FloatParam(DefaultParam): # pragma: no cov
|
|
200
202
|
precision: int = 6
|
201
203
|
|
202
204
|
def rounding(self, value: float) -> float:
|
203
|
-
"""Rounding float value with the specific precision field.
|
205
|
+
"""Rounding float value with the specific precision field.
|
206
|
+
|
207
|
+
:param value: A float value that want to round with the precision value.
|
208
|
+
|
209
|
+
:rtype: float
|
210
|
+
"""
|
204
211
|
round_str: str = f"{{0:.{self.precision}f}}"
|
205
212
|
return float(round_str.format(round(value, self.precision)))
|
206
213
|
|
207
|
-
def receive(
|
214
|
+
def receive(
|
215
|
+
self, value: Optional[Union[float, int, str]] = None
|
216
|
+
) -> Optional[float]:
|
217
|
+
"""Receive value that match with float.
|
208
218
|
|
209
|
-
|
219
|
+
:param value: A value that want to validate with float parameter type.
|
220
|
+
:rtype: float | None
|
221
|
+
"""
|
222
|
+
if value is None:
|
210
223
|
return self.default
|
211
224
|
|
212
225
|
if isinstance(value, float):
|
@@ -217,11 +230,7 @@ class FloatParam(DefaultParam): # pragma: no cov
|
|
217
230
|
raise TypeError(
|
218
231
|
"Received value type does not math with str, float, or int."
|
219
232
|
)
|
220
|
-
|
221
|
-
try:
|
222
|
-
return self.rounding(float(value))
|
223
|
-
except Exception:
|
224
|
-
raise
|
233
|
+
return self.rounding(float(value))
|
225
234
|
|
226
235
|
|
227
236
|
class DecimalParam(DefaultParam): # pragma: no cov
|
@@ -231,12 +240,28 @@ class DecimalParam(DefaultParam): # pragma: no cov
|
|
231
240
|
precision: int = 6
|
232
241
|
|
233
242
|
def rounding(self, value: Decimal) -> Decimal:
|
234
|
-
"""Rounding float value with the specific precision field.
|
243
|
+
"""Rounding float value with the specific precision field.
|
244
|
+
|
245
|
+
:param value: (Decimal) A Decimal value that want to round with the
|
246
|
+
precision value.
|
247
|
+
|
248
|
+
:rtype: Decimal
|
249
|
+
"""
|
235
250
|
return value.quantize(Decimal(10) ** -self.precision)
|
236
251
|
|
237
|
-
def receive(
|
252
|
+
def receive(
|
253
|
+
self, value: Optional[Union[float, int, str, Decimal]] = None
|
254
|
+
) -> Decimal:
|
255
|
+
"""Receive value that match with decimal.
|
238
256
|
|
239
|
-
|
257
|
+
:param value: (float | Decimal) A value that want to validate with
|
258
|
+
decimal parameter type.
|
259
|
+
:rtype: Decimal | None
|
260
|
+
"""
|
261
|
+
if value is None:
|
262
|
+
return self.default
|
263
|
+
|
264
|
+
if isinstance(value, (float, int)):
|
240
265
|
return self.rounding(Decimal(value))
|
241
266
|
elif isinstance(value, Decimal):
|
242
267
|
return self.rounding(value)
|
@@ -261,11 +286,12 @@ class ChoiceParam(BaseParam):
|
|
261
286
|
description="A list of choice parameters that able be str or int.",
|
262
287
|
)
|
263
288
|
|
264
|
-
def receive(self, value:
|
289
|
+
def receive(self, value: Optional[StrOrInt] = None) -> StrOrInt:
|
265
290
|
"""Receive value that match with options.
|
266
291
|
|
267
|
-
:param value: A value that want to select from the options
|
268
|
-
|
292
|
+
:param value: (str | int) A value that want to select from the options
|
293
|
+
field.
|
294
|
+
:rtype: str | int
|
269
295
|
"""
|
270
296
|
# NOTE:
|
271
297
|
# Return the first value in options if it does not pass any input
|
@@ -279,7 +305,7 @@ class ChoiceParam(BaseParam):
|
|
279
305
|
return value
|
280
306
|
|
281
307
|
|
282
|
-
class MapParam(DefaultParam):
|
308
|
+
class MapParam(DefaultParam):
|
283
309
|
"""Map parameter."""
|
284
310
|
|
285
311
|
type: Literal["map"] = "map"
|
@@ -295,6 +321,7 @@ class MapParam(DefaultParam): # pragma: no cov
|
|
295
321
|
"""Receive value that match with map type.
|
296
322
|
|
297
323
|
:param value: A value that want to validate with map parameter type.
|
324
|
+
|
298
325
|
:rtype: dict[Any, Any]
|
299
326
|
"""
|
300
327
|
if value is None:
|
@@ -316,7 +343,7 @@ class MapParam(DefaultParam): # pragma: no cov
|
|
316
343
|
return value
|
317
344
|
|
318
345
|
|
319
|
-
class ArrayParam(DefaultParam):
|
346
|
+
class ArrayParam(DefaultParam):
|
320
347
|
"""Array parameter."""
|
321
348
|
|
322
349
|
type: Literal["array"] = "array"
|
@@ -326,7 +353,7 @@ class ArrayParam(DefaultParam): # pragma: no cov
|
|
326
353
|
)
|
327
354
|
|
328
355
|
def receive(
|
329
|
-
self, value: Optional[Union[list[T], tuple[T, ...], str]] = None
|
356
|
+
self, value: Optional[Union[list[T], tuple[T, ...], set[T], str]] = None
|
330
357
|
) -> list[T]:
|
331
358
|
"""Receive value that match with array type.
|
332
359
|
|
@@ -365,5 +392,11 @@ Param = Annotated[
|
|
365
392
|
IntParam,
|
366
393
|
StrParam,
|
367
394
|
],
|
368
|
-
Field(
|
395
|
+
Field(
|
396
|
+
discriminator="type",
|
397
|
+
description=(
|
398
|
+
"A parameter models that use for validate and receive on the "
|
399
|
+
"workflow execution."
|
400
|
+
),
|
401
|
+
),
|
369
402
|
]
|
ddeutil/workflow/reusables.py
CHANGED
@@ -4,7 +4,7 @@
|
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
# [x] Use dynamic config
|
7
|
-
"""Reusables module that keep any
|
7
|
+
"""Reusables module that keep any template and template filter functions."""
|
8
8
|
from __future__ import annotations
|
9
9
|
|
10
10
|
import copy
|
@@ -14,7 +14,7 @@ from ast import Call, Constant, Expr, Module, Name, parse
|
|
14
14
|
from datetime import datetime
|
15
15
|
from functools import wraps
|
16
16
|
from importlib import import_module
|
17
|
-
from typing import Any, Callable, Optional, Protocol, TypeVar, Union
|
17
|
+
from typing import Any, Callable, Literal, Optional, Protocol, TypeVar, Union
|
18
18
|
|
19
19
|
try:
|
20
20
|
from typing import ParamSpec
|
@@ -32,7 +32,7 @@ from .exceptions import UtilException
|
|
32
32
|
T = TypeVar("T")
|
33
33
|
P = ParamSpec("P")
|
34
34
|
|
35
|
-
|
35
|
+
# NOTE: Adjust logging level of the `asyncio` to INFO level.
|
36
36
|
logging.getLogger("asyncio").setLevel(logging.INFO)
|
37
37
|
|
38
38
|
|
@@ -40,10 +40,14 @@ FILTERS: dict[str, Callable] = { # pragma: no cov
|
|
40
40
|
"abs": abs,
|
41
41
|
"str": str,
|
42
42
|
"int": int,
|
43
|
+
"list": list,
|
44
|
+
"dict": dict,
|
43
45
|
"title": lambda x: x.title(),
|
44
46
|
"upper": lambda x: x.upper(),
|
45
47
|
"lower": lambda x: x.lower(),
|
46
48
|
"rstr": [str, repr],
|
49
|
+
"keys": lambda x: x.keys(),
|
50
|
+
"values": lambda x: x.values(),
|
47
51
|
}
|
48
52
|
|
49
53
|
|
@@ -53,6 +57,7 @@ class FilterFunc(Protocol):
|
|
53
57
|
"""
|
54
58
|
|
55
59
|
filter: str
|
60
|
+
mark: Literal["filter"] = "filter"
|
56
61
|
|
57
62
|
def __call__(self, *args, **kwargs): ... # pragma: no cov
|
58
63
|
|
@@ -71,6 +76,7 @@ def custom_filter(name: str) -> Callable[P, FilterFunc]:
|
|
71
76
|
|
72
77
|
def func_internal(func: Callable[[...], Any]) -> FilterFunc:
|
73
78
|
func.filter = name
|
79
|
+
func.mark = "filter"
|
74
80
|
|
75
81
|
@wraps(func)
|
76
82
|
def wrapped(*args, **kwargs):
|
@@ -102,7 +108,10 @@ def make_filter_registry(
|
|
102
108
|
for fstr, func in inspect.getmembers(importer, inspect.isfunction):
|
103
109
|
# NOTE: check function attribute that already set tag by
|
104
110
|
# ``utils.tag`` decorator.
|
105
|
-
if not
|
111
|
+
if not (
|
112
|
+
hasattr(func, "filter")
|
113
|
+
and str(getattr(func, "mark", "NOT SET")) == "filter"
|
114
|
+
):
|
106
115
|
continue
|
107
116
|
|
108
117
|
func: FilterFunc
|
@@ -207,11 +216,9 @@ def map_post_filter(
|
|
207
216
|
value: T = func(value)
|
208
217
|
else:
|
209
218
|
value: T = f_func(value, *args, **kwargs)
|
210
|
-
except UtilException
|
211
|
-
logger.warning(str(err))
|
219
|
+
except UtilException:
|
212
220
|
raise
|
213
|
-
except Exception
|
214
|
-
logger.warning(str(err))
|
221
|
+
except Exception:
|
215
222
|
raise UtilException(
|
216
223
|
f"The post-filter: {func_name!r} does not fit with {value!r} "
|
217
224
|
f"(type: {type(value).__name__})."
|
@@ -301,7 +308,7 @@ def str2template(
|
|
301
308
|
getter: Any = getdot(caller, params)
|
302
309
|
except ValueError as err:
|
303
310
|
raise UtilException(
|
304
|
-
f"
|
311
|
+
f"Parameters does not get dot with caller: {caller!r}."
|
305
312
|
) from err
|
306
313
|
|
307
314
|
# NOTE:
|
@@ -330,7 +337,7 @@ def param2template(
|
|
330
337
|
filters: Optional[dict[str, FilterRegistry]] = None,
|
331
338
|
*,
|
332
339
|
extras: Optional[DictData] = None,
|
333
|
-
) ->
|
340
|
+
) -> Any:
|
334
341
|
"""Pass param to template string that can search by ``RE_CALLER`` regular
|
335
342
|
expression.
|
336
343
|
|
@@ -340,7 +347,7 @@ def param2template(
|
|
340
347
|
:param filters: A filter mapping for mapping with `map_post_filter` func.
|
341
348
|
:param extras: (Optional[list[str]]) An Override extras.
|
342
349
|
|
343
|
-
:rtype:
|
350
|
+
:rtype: Any
|
344
351
|
:returns: An any getter value from the params input.
|
345
352
|
"""
|
346
353
|
registers: Optional[list[str]] = (
|
@@ -367,6 +374,11 @@ def param2template(
|
|
367
374
|
def datetime_format(value: datetime, fmt: str = "%Y-%m-%d %H:%M:%S") -> str:
|
368
375
|
"""Format datetime object to string with the format.
|
369
376
|
|
377
|
+
Examples:
|
378
|
+
|
379
|
+
> ${{ start-date | fmt('%Y%m%d') }}
|
380
|
+
> ${{ start-date | fmt }}
|
381
|
+
|
370
382
|
:param value: (datetime) A datetime value that want to format to string
|
371
383
|
value.
|
372
384
|
:param fmt: (str) A format string pattern that passing to the `dt.strftime`
|
@@ -383,15 +395,54 @@ def datetime_format(value: datetime, fmt: str = "%Y-%m-%d %H:%M:%S") -> str:
|
|
383
395
|
|
384
396
|
@custom_filter("coalesce") # pragma: no cov
|
385
397
|
def coalesce(value: Optional[T], default: Any) -> T:
|
386
|
-
"""Coalesce with default value if the main value is None.
|
398
|
+
"""Coalesce with default value if the main value is None.
|
399
|
+
|
400
|
+
Examples:
|
401
|
+
|
402
|
+
> ${{ value | coalesce("foo") }}
|
403
|
+
|
404
|
+
:param value: A value that want to check nullable.
|
405
|
+
:param default: A default value that use to returned value if an input
|
406
|
+
value was null.
|
407
|
+
"""
|
387
408
|
return default if value is None else value
|
388
409
|
|
389
410
|
|
411
|
+
@custom_filter("getitem") # pragma: no cov
|
412
|
+
def get_item(
|
413
|
+
value: DictData, key: Union[str, int], default: Optional[Any] = None
|
414
|
+
) -> Any:
|
415
|
+
"""Get a value with an input specific key."""
|
416
|
+
if not isinstance(value, dict):
|
417
|
+
raise UtilException(
|
418
|
+
f"The value that pass to `getitem` filter should be `dict` not "
|
419
|
+
f"`{type(value)}`."
|
420
|
+
)
|
421
|
+
return value.get(key, default)
|
422
|
+
|
423
|
+
|
424
|
+
@custom_filter("getindex") # pragma: no cov
|
425
|
+
def get_index(value: list[Any], index: int):
|
426
|
+
if not isinstance(value, list):
|
427
|
+
raise UtilException(
|
428
|
+
f"The value that pass to `getindex` filter should be `list` not "
|
429
|
+
f"`{type(value)}`."
|
430
|
+
)
|
431
|
+
try:
|
432
|
+
return value[index]
|
433
|
+
except IndexError as e:
|
434
|
+
raise UtilException(
|
435
|
+
f"Index: {index} is out of range of value (The maximum range is "
|
436
|
+
f"{len(value)})."
|
437
|
+
) from e
|
438
|
+
|
439
|
+
|
390
440
|
class TagFunc(Protocol):
|
391
441
|
"""Tag Function Protocol"""
|
392
442
|
|
393
443
|
name: str
|
394
444
|
tag: str
|
445
|
+
mark: Literal["tag"] = "tag"
|
395
446
|
|
396
447
|
def __call__(self, *args, **kwargs): ... # pragma: no cov
|
397
448
|
|
@@ -419,6 +470,7 @@ def tag(
|
|
419
470
|
def func_internal(func: Callable[[...], Any]) -> ReturnTagFunc:
|
420
471
|
func.tag = name or "latest"
|
421
472
|
func.name = alias or func.__name__.replace("_", "-")
|
473
|
+
func.mark = "tag"
|
422
474
|
|
423
475
|
@wraps(func)
|
424
476
|
def wrapped(*args: P.args, **kwargs: P.kwargs) -> TagFunc:
|
@@ -466,7 +518,9 @@ def make_registry(
|
|
466
518
|
# NOTE: check function attribute that already set tag by
|
467
519
|
# ``utils.tag`` decorator.
|
468
520
|
if not (
|
469
|
-
hasattr(func, "tag")
|
521
|
+
hasattr(func, "tag")
|
522
|
+
and hasattr(func, "name")
|
523
|
+
and str(getattr(func, "mark", "NOT SET")) == "tag"
|
470
524
|
): # pragma: no cov
|
471
525
|
continue
|
472
526
|
|
ddeutil/workflow/stages.py
CHANGED
@@ -35,6 +35,7 @@ import json
|
|
35
35
|
import subprocess
|
36
36
|
import sys
|
37
37
|
import time
|
38
|
+
import traceback
|
38
39
|
import uuid
|
39
40
|
from abc import ABC, abstractmethod
|
40
41
|
from collections.abc import AsyncIterator, Iterator
|
@@ -65,12 +66,14 @@ from .result import CANCEL, FAILED, SUCCESS, WAIT, Result, Status
|
|
65
66
|
from .reusables import TagFunc, extract_call, not_in_template, param2template
|
66
67
|
from .utils import (
|
67
68
|
delay,
|
69
|
+
dump_all,
|
68
70
|
filter_func,
|
69
71
|
gen_id,
|
70
72
|
make_exec,
|
71
73
|
)
|
72
74
|
|
73
75
|
T = TypeVar("T")
|
76
|
+
DictOrModel = Union[DictData, BaseModel]
|
74
77
|
|
75
78
|
|
76
79
|
class BaseStage(BaseModel, ABC):
|
@@ -221,7 +224,10 @@ class BaseStage(BaseModel, ABC):
|
|
221
224
|
return self.execute(params, result=result, event=event)
|
222
225
|
except Exception as e:
|
223
226
|
e_name: str = e.__class__.__name__
|
224
|
-
result.trace.error(
|
227
|
+
result.trace.error(
|
228
|
+
f"[STAGE]: Error Handler:||{e_name}:||{e}||"
|
229
|
+
f"{traceback.format_exc()}"
|
230
|
+
)
|
225
231
|
if dynamic("stage_raise_error", f=raise_error, extras=self.extras):
|
226
232
|
if isinstance(e, StageException):
|
227
233
|
raise
|
@@ -1170,13 +1176,12 @@ class CallStage(BaseAsyncStage):
|
|
1170
1176
|
args.pop("result")
|
1171
1177
|
|
1172
1178
|
args = self.parse_model_args(call_func, args, result)
|
1173
|
-
|
1174
1179
|
if inspect.iscoroutinefunction(call_func):
|
1175
|
-
rs:
|
1180
|
+
rs: DictOrModel = await call_func(
|
1176
1181
|
**param2template(args, params, extras=self.extras)
|
1177
1182
|
)
|
1178
1183
|
else:
|
1179
|
-
rs:
|
1184
|
+
rs: DictOrModel = call_func(
|
1180
1185
|
**param2template(args, params, extras=self.extras)
|
1181
1186
|
)
|
1182
1187
|
|
@@ -1190,7 +1195,7 @@ class CallStage(BaseAsyncStage):
|
|
1190
1195
|
f"serialize, you must set return be `dict` or Pydantic "
|
1191
1196
|
f"model."
|
1192
1197
|
)
|
1193
|
-
return result.catch(status=SUCCESS, context=rs)
|
1198
|
+
return result.catch(status=SUCCESS, context=dump_all(rs, by_alias=True))
|
1194
1199
|
|
1195
1200
|
@staticmethod
|
1196
1201
|
def parse_model_args(
|
@@ -2528,7 +2533,11 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
2528
2533
|
deps: list[str],
|
2529
2534
|
run_id: StrOrNone = None,
|
2530
2535
|
) -> Iterator[str]:
|
2531
|
-
"""Create the
|
2536
|
+
"""Create the `.py` file and write an input Python statement and its
|
2537
|
+
Python dependency on the header of this file.
|
2538
|
+
|
2539
|
+
The format of Python dependency was followed by the `uv`
|
2540
|
+
recommended.
|
2532
2541
|
|
2533
2542
|
:param py: A Python string statement.
|
2534
2543
|
:param values: A variable that want to set before running this
|
@@ -2544,7 +2553,7 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
2544
2553
|
f"{var} = {value!r}" for var, value in values.items()
|
2545
2554
|
)
|
2546
2555
|
|
2547
|
-
# NOTE: uv supports PEP 723 — inline TOML metadata.
|
2556
|
+
# NOTE: `uv` supports PEP 723 — inline TOML metadata.
|
2548
2557
|
f.write(
|
2549
2558
|
dedent(
|
2550
2559
|
f"""
|
@@ -2603,6 +2612,16 @@ class VirtualPyStage(PyStage): # pragma: no cov
|
|
2603
2612
|
run_id=result.run_id,
|
2604
2613
|
) as py:
|
2605
2614
|
result.trace.debug(f"[STAGE]: ... Create `{py}` file.")
|
2615
|
+
try:
|
2616
|
+
import uv
|
2617
|
+
|
2618
|
+
_ = uv
|
2619
|
+
except ImportError:
|
2620
|
+
raise ImportError(
|
2621
|
+
"The VirtualPyStage need you to install `uv` before"
|
2622
|
+
"execution."
|
2623
|
+
) from None
|
2624
|
+
|
2606
2625
|
rs: CompletedProcess = subprocess.run(
|
2607
2626
|
["uv", "run", py, "--no-cache"],
|
2608
2627
|
# ["uv", "run", "--python", "3.9", py],
|
ddeutil/workflow/utils.py
CHANGED
@@ -15,10 +15,11 @@ from inspect import isfunction
|
|
15
15
|
from itertools import chain, islice, product
|
16
16
|
from pathlib import Path
|
17
17
|
from random import randrange
|
18
|
-
from typing import Any, Final, Optional, TypeVar, Union
|
18
|
+
from typing import Any, Final, Optional, TypeVar, Union, overload
|
19
19
|
from zoneinfo import ZoneInfo
|
20
20
|
|
21
21
|
from ddeutil.core import hash_str
|
22
|
+
from pydantic import BaseModel
|
22
23
|
|
23
24
|
from .__types import DictData, Matrix
|
24
25
|
|
@@ -289,3 +290,24 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
|
|
289
290
|
dt, simple = run_id.split("T", maxsplit=1)
|
290
291
|
return dt[:12] + simple[-num:]
|
291
292
|
return run_id[:12] + run_id[-num:]
|
293
|
+
|
294
|
+
|
295
|
+
@overload
|
296
|
+
def dump_all(value: BaseModel, by_alias: bool = False) -> DictData: ...
|
297
|
+
|
298
|
+
|
299
|
+
@overload
|
300
|
+
def dump_all(value: T, by_alias: bool = False) -> T: ...
|
301
|
+
|
302
|
+
|
303
|
+
def dump_all(
|
304
|
+
value: Union[T, BaseModel], by_alias: bool = False
|
305
|
+
) -> Union[T, DictData]:
|
306
|
+
"""Dump all BaseModel object to dict."""
|
307
|
+
if isinstance(value, dict):
|
308
|
+
return {k: dump_all(value[k], by_alias=by_alias) for k in value}
|
309
|
+
elif isinstance(value, (list, tuple, set)):
|
310
|
+
return type(value)([dump_all(i, by_alias=by_alias) for i in value])
|
311
|
+
elif isinstance(value, BaseModel):
|
312
|
+
return value.model_dump(by_alias=by_alias)
|
313
|
+
return value
|
ddeutil/workflow/workflow.py
CHANGED
@@ -39,7 +39,7 @@ from pydantic.functional_validators import field_validator, model_validator
|
|
39
39
|
from typing_extensions import Self
|
40
40
|
|
41
41
|
from .__cron import CronRunner
|
42
|
-
from .__types import DictData
|
42
|
+
from .__types import DictData
|
43
43
|
from .conf import FileLoad, Loader, dynamic
|
44
44
|
from .event import Crontab
|
45
45
|
from .exceptions import WorkflowException
|
@@ -57,14 +57,6 @@ from .utils import (
|
|
57
57
|
wait_until_next_minute,
|
58
58
|
)
|
59
59
|
|
60
|
-
__all__: TupleStr = (
|
61
|
-
"Release",
|
62
|
-
"ReleaseQueue",
|
63
|
-
"ReleaseType",
|
64
|
-
"Workflow",
|
65
|
-
"WorkflowTask",
|
66
|
-
)
|
67
|
-
|
68
60
|
|
69
61
|
class ReleaseType(str, Enum):
|
70
62
|
"""Release Type Enum support the type field on the Release dataclass."""
|
@@ -711,198 +703,6 @@ class Workflow(BaseModel):
|
|
711
703
|
},
|
712
704
|
)
|
713
705
|
|
714
|
-
def queue(
|
715
|
-
self,
|
716
|
-
offset: float,
|
717
|
-
end_date: datetime,
|
718
|
-
queue: ReleaseQueue,
|
719
|
-
audit: type[Audit],
|
720
|
-
*,
|
721
|
-
force_run: bool = False,
|
722
|
-
) -> ReleaseQueue:
|
723
|
-
"""Generate Release from all on values from the on field and store them
|
724
|
-
to the ReleaseQueue object.
|
725
|
-
|
726
|
-
:param offset: An offset in second unit for time travel.
|
727
|
-
:param end_date: An end datetime object.
|
728
|
-
:param queue: A workflow queue object.
|
729
|
-
:param audit: An audit class that want to make audit object.
|
730
|
-
:param force_run: A flag that allow to release workflow if the audit
|
731
|
-
with that release was pointed.
|
732
|
-
|
733
|
-
:rtype: ReleaseQueue
|
734
|
-
"""
|
735
|
-
for on in self.on:
|
736
|
-
|
737
|
-
queue.gen(
|
738
|
-
end_date,
|
739
|
-
audit,
|
740
|
-
on.next(get_dt_now(offset=offset).replace(microsecond=0)),
|
741
|
-
self.name,
|
742
|
-
force_run=force_run,
|
743
|
-
)
|
744
|
-
|
745
|
-
return queue
|
746
|
-
|
747
|
-
def poke(
|
748
|
-
self,
|
749
|
-
params: Optional[DictData] = None,
|
750
|
-
start_date: Optional[datetime] = None,
|
751
|
-
*,
|
752
|
-
run_id: Optional[str] = None,
|
753
|
-
periods: int = 1,
|
754
|
-
audit: Optional[Audit] = None,
|
755
|
-
force_run: bool = False,
|
756
|
-
timeout: int = 1800,
|
757
|
-
max_poking_pool_worker: int = 2,
|
758
|
-
) -> Result:
|
759
|
-
"""Poke workflow with a start datetime value that will pass to its
|
760
|
-
`on` field on the threading executor pool for execute the `release`
|
761
|
-
method (It run all schedules that was set on the `on` values).
|
762
|
-
|
763
|
-
This method will observe its `on` field that nearing to run with the
|
764
|
-
`self.release()` method.
|
765
|
-
|
766
|
-
The limitation of this method is not allow run a date that gather
|
767
|
-
than the current date.
|
768
|
-
|
769
|
-
:param params: (DictData) A parameter data.
|
770
|
-
:param start_date: (datetime) A start datetime object.
|
771
|
-
:param run_id: (str) A workflow running ID for this poke.
|
772
|
-
:param periods: (int) A periods in minutes value that use to run this
|
773
|
-
poking. (Default is 1)
|
774
|
-
:param audit: (Audit) An audit object that want to use on this poking
|
775
|
-
process.
|
776
|
-
:param force_run: (bool) A flag that allow to release workflow if the
|
777
|
-
audit with that release was pointed. (Default is False)
|
778
|
-
:param timeout: (int) A second value for timeout while waiting all
|
779
|
-
futures run completely.
|
780
|
-
:param max_poking_pool_worker: (int) The maximum poking pool worker.
|
781
|
-
(Default is 2 workers)
|
782
|
-
|
783
|
-
:raise WorkflowException: If the periods parameter less or equal than 0.
|
784
|
-
|
785
|
-
:rtype: Result
|
786
|
-
:return: A list of all results that return from `self.release` method.
|
787
|
-
"""
|
788
|
-
audit: type[Audit] = audit or get_audit(extras=self.extras)
|
789
|
-
result: Result = Result(
|
790
|
-
run_id=(run_id or gen_id(self.name, unique=True))
|
791
|
-
)
|
792
|
-
|
793
|
-
# VALIDATE: Check the periods value should gather than 0.
|
794
|
-
if periods <= 0:
|
795
|
-
raise WorkflowException(
|
796
|
-
"The period of poking should be `int` and grater or equal "
|
797
|
-
"than 1."
|
798
|
-
)
|
799
|
-
|
800
|
-
if len(self.on) == 0:
|
801
|
-
result.trace.warning(
|
802
|
-
f"[POKING]: {self.name!r} not have any schedule!!!"
|
803
|
-
)
|
804
|
-
return result.catch(status=SUCCESS, context={"outputs": []})
|
805
|
-
|
806
|
-
# NOTE: Create the current date that change microsecond to 0
|
807
|
-
current_date: datetime = datetime.now().replace(microsecond=0)
|
808
|
-
|
809
|
-
if start_date is None:
|
810
|
-
# NOTE: Force change start date if it gathers than the current date,
|
811
|
-
# or it does not pass to this method.
|
812
|
-
start_date: datetime = current_date
|
813
|
-
offset: float = 0
|
814
|
-
elif start_date <= current_date:
|
815
|
-
start_date = start_date.replace(microsecond=0)
|
816
|
-
offset: float = (current_date - start_date).total_seconds()
|
817
|
-
else:
|
818
|
-
raise WorkflowException(
|
819
|
-
f"The start datetime should less than or equal the current "
|
820
|
-
f"datetime, {current_date:%Y-%m-%d %H:%M:%S}."
|
821
|
-
)
|
822
|
-
|
823
|
-
# NOTE: The end date is using to stop generate queue with an input
|
824
|
-
# periods value. It will change to MM:59.
|
825
|
-
# For example:
|
826
|
-
# (input) start_date = 12:04:12, offset = 2
|
827
|
-
# (output) end_date = 12:06:59
|
828
|
-
end_date: datetime = start_date.replace(second=0) + timedelta(
|
829
|
-
minutes=periods + 1, seconds=-1
|
830
|
-
)
|
831
|
-
|
832
|
-
result.trace.info(
|
833
|
-
f"[POKING]: Execute Poking: {self.name!r} ("
|
834
|
-
f"{start_date:%Y-%m-%d %H:%M:%S} ==> {end_date:%Y-%m-%d %H:%M:%S})"
|
835
|
-
)
|
836
|
-
|
837
|
-
params: DictData = {} if params is None else params
|
838
|
-
context: list[Result] = []
|
839
|
-
q: ReleaseQueue = ReleaseQueue()
|
840
|
-
|
841
|
-
# NOTE: Create reusable partial function and add Release to the release
|
842
|
-
# queue object.
|
843
|
-
partial_queue = partial(
|
844
|
-
self.queue, offset, end_date, audit=audit, force_run=force_run
|
845
|
-
)
|
846
|
-
partial_queue(q)
|
847
|
-
if not q.is_queued:
|
848
|
-
result.trace.warning(
|
849
|
-
f"[POKING]: Skip {self.name!r}, not have any queue!!!"
|
850
|
-
)
|
851
|
-
return result.catch(status=SUCCESS, context={"outputs": []})
|
852
|
-
|
853
|
-
with ThreadPoolExecutor(
|
854
|
-
max_workers=dynamic(
|
855
|
-
"max_poking_pool_worker",
|
856
|
-
f=max_poking_pool_worker,
|
857
|
-
extras=self.extras,
|
858
|
-
),
|
859
|
-
thread_name_prefix="wf_poking_",
|
860
|
-
) as executor:
|
861
|
-
|
862
|
-
futures: list[Future] = []
|
863
|
-
|
864
|
-
while q.is_queued:
|
865
|
-
|
866
|
-
# NOTE: Pop the latest Release object from the release queue.
|
867
|
-
release: Release = heappop(q.queue)
|
868
|
-
|
869
|
-
if reach_next_minute(release.date, offset=offset):
|
870
|
-
result.trace.debug(
|
871
|
-
f"[POKING]: Skip Release: "
|
872
|
-
f"{release.date:%Y-%m-%d %H:%M:%S}"
|
873
|
-
)
|
874
|
-
heappush(q.queue, release)
|
875
|
-
wait_until_next_minute(get_dt_now(offset=offset))
|
876
|
-
|
877
|
-
# WARNING: I already call queue poking again because issue
|
878
|
-
# about the every minute crontab.
|
879
|
-
partial_queue(q)
|
880
|
-
continue
|
881
|
-
|
882
|
-
heappush(q.running, release)
|
883
|
-
futures.append(
|
884
|
-
executor.submit(
|
885
|
-
self.release,
|
886
|
-
release=release,
|
887
|
-
params=params,
|
888
|
-
audit=audit,
|
889
|
-
queue=q,
|
890
|
-
parent_run_id=result.run_id,
|
891
|
-
)
|
892
|
-
)
|
893
|
-
|
894
|
-
partial_queue(q)
|
895
|
-
|
896
|
-
# WARNING: This poking method does not allow to use fail-fast
|
897
|
-
# logic to catching parallel execution result.
|
898
|
-
for future in as_completed(futures, timeout=timeout):
|
899
|
-
context.append(future.result())
|
900
|
-
|
901
|
-
return result.catch(
|
902
|
-
status=SUCCESS,
|
903
|
-
context={"outputs": context},
|
904
|
-
)
|
905
|
-
|
906
706
|
def execute_job(
|
907
707
|
self,
|
908
708
|
job: Job,
|
@@ -974,7 +774,7 @@ class Workflow(BaseModel):
|
|
974
774
|
parent_run_id: Optional[str] = None,
|
975
775
|
result: Optional[Result] = None,
|
976
776
|
event: Optional[Event] = None,
|
977
|
-
timeout:
|
777
|
+
timeout: float = 3600,
|
978
778
|
max_job_parallel: int = 2,
|
979
779
|
) -> Result:
|
980
780
|
"""Execute workflow with passing a dynamic parameters to all jobs that
|
@@ -1005,10 +805,10 @@ class Workflow(BaseModel):
|
|
1005
805
|
:param result: (Result) A Result instance for return context and status.
|
1006
806
|
:param event: (Event) An Event manager instance that use to cancel this
|
1007
807
|
execution if it forces stopped by parent execution.
|
1008
|
-
:param timeout: (
|
1009
|
-
use for limit time of execution and waiting job dependency.
|
1010
|
-
value does not force stop the task that still running more than
|
1011
|
-
limit time. (Default: 60 * 60 seconds)
|
808
|
+
:param timeout: (float) A workflow execution time out in second unit
|
809
|
+
that use for limit time of execution and waiting job dependency.
|
810
|
+
This value does not force stop the task that still running more than
|
811
|
+
this limit time. (Default: 60 * 60 seconds)
|
1012
812
|
:param max_job_parallel: (int) The maximum workers that use for job
|
1013
813
|
execution in `PoolThreadExecutor` object. (Default: 2 workers)
|
1014
814
|
|
@@ -1040,7 +840,7 @@ class Workflow(BaseModel):
|
|
1040
840
|
job_queue.put(job_id)
|
1041
841
|
|
1042
842
|
not_timeout_flag: bool = True
|
1043
|
-
timeout:
|
843
|
+
timeout: float = dynamic(
|
1044
844
|
"max_job_exec_timeout", f=timeout, extras=self.extras
|
1045
845
|
)
|
1046
846
|
|
@@ -1140,6 +940,202 @@ class Workflow(BaseModel):
|
|
1140
940
|
)
|
1141
941
|
|
1142
942
|
|
943
|
+
class WorkflowPoke(Workflow):
|
944
|
+
"""Workflow Poke model that was implemented the poke method."""
|
945
|
+
|
946
|
+
def queue(
|
947
|
+
self,
|
948
|
+
offset: float,
|
949
|
+
end_date: datetime,
|
950
|
+
queue: ReleaseQueue,
|
951
|
+
audit: type[Audit],
|
952
|
+
*,
|
953
|
+
force_run: bool = False,
|
954
|
+
) -> ReleaseQueue:
|
955
|
+
"""Generate Release from all on values from the on field and store them
|
956
|
+
to the ReleaseQueue object.
|
957
|
+
|
958
|
+
:param offset: An offset in second unit for time travel.
|
959
|
+
:param end_date: An end datetime object.
|
960
|
+
:param queue: A workflow queue object.
|
961
|
+
:param audit: An audit class that want to make audit object.
|
962
|
+
:param force_run: A flag that allow to release workflow if the audit
|
963
|
+
with that release was pointed.
|
964
|
+
|
965
|
+
:rtype: ReleaseQueue
|
966
|
+
"""
|
967
|
+
for on in self.on:
|
968
|
+
|
969
|
+
queue.gen(
|
970
|
+
end_date,
|
971
|
+
audit,
|
972
|
+
on.next(get_dt_now(offset=offset).replace(microsecond=0)),
|
973
|
+
self.name,
|
974
|
+
force_run=force_run,
|
975
|
+
)
|
976
|
+
|
977
|
+
return queue
|
978
|
+
|
979
|
+
def poke(
|
980
|
+
self,
|
981
|
+
params: Optional[DictData] = None,
|
982
|
+
start_date: Optional[datetime] = None,
|
983
|
+
*,
|
984
|
+
run_id: Optional[str] = None,
|
985
|
+
periods: int = 1,
|
986
|
+
audit: Optional[Audit] = None,
|
987
|
+
force_run: bool = False,
|
988
|
+
timeout: int = 1800,
|
989
|
+
max_poking_pool_worker: int = 2,
|
990
|
+
) -> Result:
|
991
|
+
"""Poke workflow with a start datetime value that will pass to its
|
992
|
+
`on` field on the threading executor pool for execute the `release`
|
993
|
+
method (It run all schedules that was set on the `on` values).
|
994
|
+
|
995
|
+
This method will observe its `on` field that nearing to run with the
|
996
|
+
`self.release()` method.
|
997
|
+
|
998
|
+
The limitation of this method is not allow run a date that gather
|
999
|
+
than the current date.
|
1000
|
+
|
1001
|
+
:param params: (DictData) A parameter data.
|
1002
|
+
:param start_date: (datetime) A start datetime object.
|
1003
|
+
:param run_id: (str) A workflow running ID for this poke.
|
1004
|
+
:param periods: (int) A periods in minutes value that use to run this
|
1005
|
+
poking. (Default is 1)
|
1006
|
+
:param audit: (Audit) An audit object that want to use on this poking
|
1007
|
+
process.
|
1008
|
+
:param force_run: (bool) A flag that allow to release workflow if the
|
1009
|
+
audit with that release was pointed. (Default is False)
|
1010
|
+
:param timeout: (int) A second value for timeout while waiting all
|
1011
|
+
futures run completely.
|
1012
|
+
:param max_poking_pool_worker: (int) The maximum poking pool worker.
|
1013
|
+
(Default is 2 workers)
|
1014
|
+
|
1015
|
+
:raise WorkflowException: If the periods parameter less or equal than 0.
|
1016
|
+
|
1017
|
+
:rtype: Result
|
1018
|
+
:return: A list of all results that return from `self.release` method.
|
1019
|
+
"""
|
1020
|
+
audit: type[Audit] = audit or get_audit(extras=self.extras)
|
1021
|
+
result: Result = Result(
|
1022
|
+
run_id=(run_id or gen_id(self.name, unique=True))
|
1023
|
+
)
|
1024
|
+
|
1025
|
+
# VALIDATE: Check the periods value should gather than 0.
|
1026
|
+
if periods <= 0:
|
1027
|
+
raise WorkflowException(
|
1028
|
+
"The period of poking should be `int` and grater or equal "
|
1029
|
+
"than 1."
|
1030
|
+
)
|
1031
|
+
|
1032
|
+
if len(self.on) == 0:
|
1033
|
+
result.trace.warning(
|
1034
|
+
f"[POKING]: {self.name!r} not have any schedule!!!"
|
1035
|
+
)
|
1036
|
+
return result.catch(status=SUCCESS, context={"outputs": []})
|
1037
|
+
|
1038
|
+
# NOTE: Create the current date that change microsecond to 0
|
1039
|
+
current_date: datetime = datetime.now().replace(microsecond=0)
|
1040
|
+
|
1041
|
+
if start_date is None:
|
1042
|
+
# NOTE: Force change start date if it gathers than the current date,
|
1043
|
+
# or it does not pass to this method.
|
1044
|
+
start_date: datetime = current_date
|
1045
|
+
offset: float = 0
|
1046
|
+
elif start_date <= current_date:
|
1047
|
+
start_date = start_date.replace(microsecond=0)
|
1048
|
+
offset: float = (current_date - start_date).total_seconds()
|
1049
|
+
else:
|
1050
|
+
raise WorkflowException(
|
1051
|
+
f"The start datetime should less than or equal the current "
|
1052
|
+
f"datetime, {current_date:%Y-%m-%d %H:%M:%S}."
|
1053
|
+
)
|
1054
|
+
|
1055
|
+
# NOTE: The end date is using to stop generate queue with an input
|
1056
|
+
# periods value. It will change to MM:59.
|
1057
|
+
# For example:
|
1058
|
+
# (input) start_date = 12:04:12, offset = 2
|
1059
|
+
# (output) end_date = 12:06:59
|
1060
|
+
end_date: datetime = start_date.replace(second=0) + timedelta(
|
1061
|
+
minutes=periods + 1, seconds=-1
|
1062
|
+
)
|
1063
|
+
|
1064
|
+
result.trace.info(
|
1065
|
+
f"[POKING]: Execute Poking: {self.name!r} "
|
1066
|
+
f"({start_date:%Y-%m-%d %H:%M:%S} ==> {end_date:%Y-%m-%d %H:%M:%S})"
|
1067
|
+
)
|
1068
|
+
|
1069
|
+
params: DictData = {} if params is None else params
|
1070
|
+
context: list[Result] = []
|
1071
|
+
q: ReleaseQueue = ReleaseQueue()
|
1072
|
+
|
1073
|
+
# NOTE: Create reusable partial function and add Release to the release
|
1074
|
+
# queue object.
|
1075
|
+
partial_queue = partial(
|
1076
|
+
self.queue, offset, end_date, audit=audit, force_run=force_run
|
1077
|
+
)
|
1078
|
+
partial_queue(q)
|
1079
|
+
if not q.is_queued:
|
1080
|
+
result.trace.warning(
|
1081
|
+
f"[POKING]: Skip {self.name!r}, not have any queue!!!"
|
1082
|
+
)
|
1083
|
+
return result.catch(status=SUCCESS, context={"outputs": []})
|
1084
|
+
|
1085
|
+
with ThreadPoolExecutor(
|
1086
|
+
max_workers=dynamic(
|
1087
|
+
"max_poking_pool_worker",
|
1088
|
+
f=max_poking_pool_worker,
|
1089
|
+
extras=self.extras,
|
1090
|
+
),
|
1091
|
+
thread_name_prefix="wf_poking_",
|
1092
|
+
) as executor:
|
1093
|
+
|
1094
|
+
futures: list[Future] = []
|
1095
|
+
|
1096
|
+
while q.is_queued:
|
1097
|
+
|
1098
|
+
# NOTE: Pop the latest Release object from the release queue.
|
1099
|
+
release: Release = heappop(q.queue)
|
1100
|
+
|
1101
|
+
if reach_next_minute(release.date, offset=offset):
|
1102
|
+
result.trace.debug(
|
1103
|
+
f"[POKING]: Skip Release: "
|
1104
|
+
f"{release.date:%Y-%m-%d %H:%M:%S}"
|
1105
|
+
)
|
1106
|
+
heappush(q.queue, release)
|
1107
|
+
wait_until_next_minute(get_dt_now(offset=offset))
|
1108
|
+
|
1109
|
+
# WARNING: I already call queue poking again because issue
|
1110
|
+
# about the every minute crontab.
|
1111
|
+
partial_queue(q)
|
1112
|
+
continue
|
1113
|
+
|
1114
|
+
heappush(q.running, release)
|
1115
|
+
futures.append(
|
1116
|
+
executor.submit(
|
1117
|
+
self.release,
|
1118
|
+
release=release,
|
1119
|
+
params=params,
|
1120
|
+
audit=audit,
|
1121
|
+
queue=q,
|
1122
|
+
parent_run_id=result.run_id,
|
1123
|
+
)
|
1124
|
+
)
|
1125
|
+
|
1126
|
+
partial_queue(q)
|
1127
|
+
|
1128
|
+
# WARNING: This poking method does not allow to use fail-fast
|
1129
|
+
# logic to catching parallel execution result.
|
1130
|
+
for future in as_completed(futures, timeout=timeout):
|
1131
|
+
context.append(future.result())
|
1132
|
+
|
1133
|
+
return result.catch(
|
1134
|
+
status=SUCCESS,
|
1135
|
+
context={"outputs": context},
|
1136
|
+
)
|
1137
|
+
|
1138
|
+
|
1143
1139
|
@dataclass(config=ConfigDict(arbitrary_types_allowed=True))
|
1144
1140
|
class WorkflowTask:
|
1145
1141
|
"""Workflow task Pydantic dataclass object that use to keep mapping data and
|
@@ -1,20 +1,20 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=
|
2
|
-
ddeutil/workflow/__cron.py,sha256=
|
1
|
+
ddeutil/workflow/__about__.py,sha256=yjnDqQt_XdXe6-ocPUq4R7qNZAPra-nc1AMmD7lPaN4,28
|
2
|
+
ddeutil/workflow/__cron.py,sha256=BOKQcreiex0SAigrK1gnLxpvOeF3aca_rQwyz9Kfve4,28751
|
3
3
|
ddeutil/workflow/__init__.py,sha256=NXEhjzKFdIGa-jtIq9HXChLCjSXNPd8VJ8ltggxbBO8,1371
|
4
4
|
ddeutil/workflow/__main__.py,sha256=x-sYedl4T8p6054aySk-EQX6vhytvPR0HvaBNYxMzp0,364
|
5
5
|
ddeutil/workflow/__types.py,sha256=uNfoRbVmNK5O37UUMVnqcmoghD9oMS1q9fXC0APnjSI,4584
|
6
6
|
ddeutil/workflow/conf.py,sha256=NLvjZ8bpDsn4e0MG3m1vgMdAwtmii5hP1D0STKQyZeo,14907
|
7
|
-
ddeutil/workflow/event.py,sha256=
|
7
|
+
ddeutil/workflow/event.py,sha256=oY5C9E5CM5wRPs0Fb1OhduvNPMT-hY6p4iO8tICpWAc,10972
|
8
8
|
ddeutil/workflow/exceptions.py,sha256=TKHBIlfquz3yEb8_kg6UXpxVLKxstt3QA9a1XYsLPJk,2455
|
9
9
|
ddeutil/workflow/job.py,sha256=Php1b3n6c-jddel8PTSa61kAW22QBTetzoLVR4XXM4E,35240
|
10
|
-
ddeutil/workflow/logs.py,sha256=
|
11
|
-
ddeutil/workflow/params.py,sha256=
|
10
|
+
ddeutil/workflow/logs.py,sha256=iVtyl8i69y7t07tAuWkihc54WlkHCcBy_Ur0WtzJ_lM,31367
|
11
|
+
ddeutil/workflow/params.py,sha256=1u8gXs1ZyMq-2eD9H8L7Yjfu5t7b_OzjA0fJvhxdYWY,12505
|
12
12
|
ddeutil/workflow/result.py,sha256=4M9VCcveI8Yz6ZrnI-67SZlry-Z8G7e0hziy1k-pklk,5906
|
13
|
-
ddeutil/workflow/reusables.py,sha256=
|
13
|
+
ddeutil/workflow/reusables.py,sha256=ogE7SwcVacSH_J6vK25fRxvmT4g0GE-jjcteis23Tss,19268
|
14
14
|
ddeutil/workflow/scheduler.py,sha256=OsEyj2zscQ-3bDMk2z7UtKlCWLlgoGjaRFt17o1B1ew,27263
|
15
|
-
ddeutil/workflow/stages.py,sha256=
|
16
|
-
ddeutil/workflow/utils.py,sha256=
|
17
|
-
ddeutil/workflow/workflow.py,sha256=
|
15
|
+
ddeutil/workflow/stages.py,sha256=xH_f7IRohFCnUrtyD-QW86BlJ72p64JmpRTRrLPoF6A,93241
|
16
|
+
ddeutil/workflow/utils.py,sha256=rcaDwXaEs4SCdcBKWx4ZCEtpnNfPI8du7Er6b_rg8t4,9569
|
17
|
+
ddeutil/workflow/workflow.py,sha256=8Z_h8OtNHkaGf8MJixTHNeXsyA4mBlYtHDqj0oEVFBs,44858
|
18
18
|
ddeutil/workflow/api/__init__.py,sha256=kY30dL8HPY8tY_GBmm7y_3OdoXzB1-EA2a96PLU0AQw,5278
|
19
19
|
ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
|
20
20
|
ddeutil/workflow/api/utils.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
|
@@ -23,9 +23,9 @@ ddeutil/workflow/api/routes/job.py,sha256=8X5VLDJH6PumyNIY6JGRNBsf2gWN0eG9DzxRPS
|
|
23
23
|
ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
|
24
24
|
ddeutil/workflow/api/routes/schedules.py,sha256=14RnaJKEGMSJtncI1H_QQVZNBe_jDS40PPRO6qFc3i0,4805
|
25
25
|
ddeutil/workflow/api/routes/workflows.py,sha256=GJu5PiXEylswrXylEImpncySjeU9chrvrtjhiMCw2RQ,4529
|
26
|
-
ddeutil_workflow-0.0.
|
27
|
-
ddeutil_workflow-0.0.
|
28
|
-
ddeutil_workflow-0.0.
|
29
|
-
ddeutil_workflow-0.0.
|
30
|
-
ddeutil_workflow-0.0.
|
31
|
-
ddeutil_workflow-0.0.
|
26
|
+
ddeutil_workflow-0.0.62.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
27
|
+
ddeutil_workflow-0.0.62.dist-info/METADATA,sha256=wo8CyK5cEyeEYiQkcaDtWwxd8b6BWMv_GKXhGCRWi6k,19427
|
28
|
+
ddeutil_workflow-0.0.62.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
|
29
|
+
ddeutil_workflow-0.0.62.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
|
30
|
+
ddeutil_workflow-0.0.62.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
31
|
+
ddeutil_workflow-0.0.62.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|