ddeutil-workflow 0.0.13__py3-none-any.whl → 0.0.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +4 -1
- ddeutil/workflow/__types.py +24 -8
- ddeutil/workflow/api.py +2 -2
- ddeutil/workflow/conf.py +41 -0
- ddeutil/workflow/cron.py +19 -12
- ddeutil/workflow/job.py +189 -153
- ddeutil/workflow/log.py +28 -14
- ddeutil/workflow/scheduler.py +233 -112
- ddeutil/workflow/stage.py +66 -33
- ddeutil/workflow/utils.py +106 -40
- {ddeutil_workflow-0.0.13.dist-info → ddeutil_workflow-0.0.14.dist-info}/METADATA +6 -4
- ddeutil_workflow-0.0.14.dist-info/RECORD +22 -0
- {ddeutil_workflow-0.0.13.dist-info → ddeutil_workflow-0.0.14.dist-info}/WHEEL +1 -1
- ddeutil_workflow-0.0.13.dist-info/RECORD +0 -21
- {ddeutil_workflow-0.0.13.dist-info → ddeutil_workflow-0.0.14.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.13.dist-info → ddeutil_workflow-0.0.14.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.13.dist-info → ddeutil_workflow-0.0.14.dist-info}/top_level.txt +0 -0
ddeutil/workflow/stage.py
CHANGED
@@ -12,14 +12,16 @@ can tracking logs.
|
|
12
12
|
handle stage error on this stage model. I think stage model should have a lot of
|
13
13
|
usecase and it does not worry when I want to create a new one.
|
14
14
|
|
15
|
-
Execution
|
16
|
-
|
15
|
+
Execution --> Ok --> Result with 0
|
16
|
+
--> Error --> Raise StageException
|
17
|
+
|
18
|
+
On the context I/O that pass to stage object at execute process. The execute
|
19
|
+
method receive `{"params": {...}}` for mapping to template.
|
17
20
|
"""
|
18
21
|
from __future__ import annotations
|
19
22
|
|
20
23
|
import contextlib
|
21
24
|
import inspect
|
22
|
-
import os
|
23
25
|
import subprocess
|
24
26
|
import sys
|
25
27
|
import uuid
|
@@ -38,12 +40,12 @@ try:
|
|
38
40
|
except ImportError:
|
39
41
|
from typing_extensions import ParamSpec
|
40
42
|
|
41
|
-
from ddeutil.core import str2bool
|
42
43
|
from pydantic import BaseModel, Field
|
43
44
|
from pydantic.functional_validators import model_validator
|
44
45
|
from typing_extensions import Self
|
45
46
|
|
46
47
|
from .__types import DictData, DictStr, Re, TupleStr
|
48
|
+
from .conf import config
|
47
49
|
from .exceptions import StageException
|
48
50
|
from .log import get_logger
|
49
51
|
from .utils import (
|
@@ -69,7 +71,7 @@ __all__: TupleStr = (
|
|
69
71
|
"HookStage",
|
70
72
|
"TriggerStage",
|
71
73
|
"Stage",
|
72
|
-
"
|
74
|
+
"HookSearchData",
|
73
75
|
"extract_hook",
|
74
76
|
"handler_result",
|
75
77
|
)
|
@@ -87,8 +89,18 @@ def handler_result(message: str | None = None) -> Callable[P, Result]:
|
|
87
89
|
--> Error --> Raise StageException
|
88
90
|
--> Result with 1 (if env var was set)
|
89
91
|
|
92
|
+
On the last step, it will set the running ID on a return result object
|
93
|
+
from current stage ID before release the final result.
|
94
|
+
|
90
95
|
:param message: A message that want to add at prefix of exception statement.
|
96
|
+
:rtype: Callable[P, Result]
|
91
97
|
"""
|
98
|
+
# NOTE: The prefix message string that want to add on the first exception
|
99
|
+
# message dialog.
|
100
|
+
#
|
101
|
+
# ... ValueError: {message}
|
102
|
+
# ... raise value error from the stage execution process.
|
103
|
+
#
|
92
104
|
message: str = message or ""
|
93
105
|
|
94
106
|
def decorator(func: Callable[P, Result]) -> Callable[P, Result]:
|
@@ -103,9 +115,7 @@ def handler_result(message: str | None = None) -> Callable[P, Result]:
|
|
103
115
|
logger.error(
|
104
116
|
f"({self.run_id}) [STAGE]: {err.__class__.__name__}: {err}"
|
105
117
|
)
|
106
|
-
if
|
107
|
-
os.getenv("WORKFLOW_CORE_STAGE_RAISE_ERROR", "true")
|
108
|
-
):
|
118
|
+
if config.stage_raise_error:
|
109
119
|
# NOTE: If error that raise from stage execution course by
|
110
120
|
# itself, it will return that error with previous
|
111
121
|
# dependency.
|
@@ -119,14 +129,14 @@ def handler_result(message: str | None = None) -> Callable[P, Result]:
|
|
119
129
|
) from None
|
120
130
|
|
121
131
|
# NOTE: Catching exception error object to result with
|
122
|
-
# error_message
|
123
|
-
|
132
|
+
# error_message and error keys.
|
133
|
+
return Result(
|
124
134
|
status=1,
|
125
135
|
context={
|
136
|
+
"error": err,
|
126
137
|
"error_message": f"{err.__class__.__name__}: {err}",
|
127
138
|
},
|
128
|
-
)
|
129
|
-
return rs.set_run_id(self.run_id)
|
139
|
+
).set_run_id(self.run_id)
|
130
140
|
|
131
141
|
return wrapped
|
132
142
|
|
@@ -162,10 +172,12 @@ class BaseStage(BaseModel, ABC):
|
|
162
172
|
)
|
163
173
|
|
164
174
|
@model_validator(mode="after")
|
165
|
-
def __prepare_running_id(self):
|
175
|
+
def __prepare_running_id(self) -> Self:
|
166
176
|
"""Prepare stage running ID that use default value of field and this
|
167
177
|
method will validate name and id fields should not contain any template
|
168
178
|
parameter (exclude matrix template).
|
179
|
+
|
180
|
+
:rtype: Self
|
169
181
|
"""
|
170
182
|
if self.run_id is None:
|
171
183
|
self.run_id = gen_id(self.name + (self.id or ""), unique=True)
|
@@ -199,16 +211,28 @@ class BaseStage(BaseModel, ABC):
|
|
199
211
|
raise NotImplementedError("Stage should implement ``execute`` method.")
|
200
212
|
|
201
213
|
def set_outputs(self, output: DictData, to: DictData) -> DictData:
|
202
|
-
"""Set an outputs from execution process to
|
214
|
+
"""Set an outputs from execution process to the receive context. The
|
215
|
+
result from execution will pass to value of ``outputs`` key.
|
216
|
+
|
217
|
+
For example of setting output method, If you receive execute output
|
218
|
+
and want to set on the `to` like;
|
219
|
+
|
220
|
+
... (i) output: {'foo': bar}
|
221
|
+
... (ii) to: {}
|
222
|
+
|
223
|
+
The result of the `to` variable will be;
|
224
|
+
|
225
|
+
... (iii) to: {
|
226
|
+
'stages': {
|
227
|
+
'<stage-id>': {'outputs': {'foo': 'bar'}}
|
228
|
+
}
|
229
|
+
}
|
203
230
|
|
204
231
|
:param output: A output data that want to extract to an output key.
|
205
232
|
:param to: A context data that want to add output result.
|
206
233
|
:rtype: DictData
|
207
234
|
"""
|
208
|
-
if not (
|
209
|
-
self.id
|
210
|
-
or str2bool(os.getenv("WORKFLOW_CORE_STAGE_DEFAULT_ID", "false"))
|
211
|
-
):
|
235
|
+
if not (self.id or config.stage_default_id):
|
212
236
|
logger.debug(
|
213
237
|
f"({self.run_id}) [STAGE]: Output does not set because this "
|
214
238
|
f"stage does not set ID or default stage ID config flag not be "
|
@@ -220,16 +244,15 @@ class BaseStage(BaseModel, ABC):
|
|
220
244
|
if "stages" not in to:
|
221
245
|
to["stages"] = {}
|
222
246
|
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
# NOTE: Set the output to that stage generated ID.
|
230
|
-
logger.debug(
|
231
|
-
f"({self.run_id}) [STAGE]: Set output complete with stage ID: {_id}"
|
247
|
+
# NOTE: If the stage ID did not set, it will use its name instead.
|
248
|
+
_id: str = (
|
249
|
+
param2template(self.id, params=to)
|
250
|
+
if self.id
|
251
|
+
else gen_id(param2template(self.name, params=to))
|
232
252
|
)
|
253
|
+
|
254
|
+
# NOTE: Set the output to that stage generated ID with ``outputs`` key.
|
255
|
+
logger.debug(f"({self.run_id}) [STAGE]: Set outputs on: {_id}")
|
233
256
|
to["stages"][_id] = {"outputs": output}
|
234
257
|
return to
|
235
258
|
|
@@ -240,10 +263,10 @@ class BaseStage(BaseModel, ABC):
|
|
240
263
|
:param params: A parameters that want to pass to condition template.
|
241
264
|
:rtype: bool
|
242
265
|
"""
|
243
|
-
params: DictData = params or {}
|
244
266
|
if self.condition is None:
|
245
267
|
return False
|
246
268
|
|
269
|
+
params: DictData = {} if params is None else params
|
247
270
|
_g: DictData = globals() | params
|
248
271
|
try:
|
249
272
|
rs: bool = eval(param2template(self.condition, params), _g, {})
|
@@ -462,12 +485,13 @@ class PyStage(BaseStage):
|
|
462
485
|
exec(run, _globals, _locals)
|
463
486
|
|
464
487
|
return Result(
|
465
|
-
status=0,
|
488
|
+
status=0,
|
489
|
+
context={"locals": _locals, "globals": _globals},
|
466
490
|
)
|
467
491
|
|
468
492
|
|
469
493
|
@dataclass(frozen=True)
|
470
|
-
class
|
494
|
+
class HookSearchData:
|
471
495
|
"""Hook Search dataclass that use for receive regular expression grouping
|
472
496
|
dict from searching hook string value.
|
473
497
|
"""
|
@@ -490,7 +514,7 @@ def extract_hook(hook: str) -> Callable[[], TagFunc]:
|
|
490
514
|
)
|
491
515
|
|
492
516
|
# NOTE: Pass the searching hook string to `path`, `func`, and `tag`.
|
493
|
-
hook:
|
517
|
+
hook: HookSearchData = HookSearchData(**found.groupdict())
|
494
518
|
|
495
519
|
# NOTE: Registry object should implement on this package only.
|
496
520
|
rgt: dict[str, Registry] = make_registry(f"{hook.path}")
|
@@ -596,7 +620,11 @@ class TriggerStage(BaseStage):
|
|
596
620
|
... }
|
597
621
|
"""
|
598
622
|
|
599
|
-
trigger: str = Field(
|
623
|
+
trigger: str = Field(
|
624
|
+
description=(
|
625
|
+
"A trigger workflow name that should already exist on the config."
|
626
|
+
),
|
627
|
+
)
|
600
628
|
params: DictData = Field(
|
601
629
|
default_factory=dict,
|
602
630
|
description="A parameter that want to pass to workflow execution.",
|
@@ -610,6 +638,7 @@ class TriggerStage(BaseStage):
|
|
610
638
|
:param params: A parameter data that want to use in this execution.
|
611
639
|
:rtype: Result
|
612
640
|
"""
|
641
|
+
# NOTE: Lazy import this workflow object.
|
613
642
|
from . import Workflow
|
614
643
|
|
615
644
|
# NOTE: Loading workflow object from trigger name.
|
@@ -624,7 +653,11 @@ class TriggerStage(BaseStage):
|
|
624
653
|
return wf.execute(params=param2template(self.params, params))
|
625
654
|
|
626
655
|
|
627
|
-
# NOTE:
|
656
|
+
# NOTE:
|
657
|
+
# An order of parsing stage model on the Job model with ``stages`` field.
|
658
|
+
# From the current build-in stages, they do not have stage that have the same
|
659
|
+
# fields that be cause of parsing on the Job's stages key.
|
660
|
+
#
|
628
661
|
Stage = Union[
|
629
662
|
PyStage,
|
630
663
|
BashStage,
|
ddeutil/workflow/utils.py
CHANGED
@@ -13,6 +13,7 @@ import time
|
|
13
13
|
from abc import ABC, abstractmethod
|
14
14
|
from ast import Call, Constant, Expr, Module, Name, parse
|
15
15
|
from collections.abc import Iterator
|
16
|
+
from dataclasses import field
|
16
17
|
from datetime import date, datetime
|
17
18
|
from functools import cached_property, wraps
|
18
19
|
from hashlib import md5
|
@@ -32,19 +33,22 @@ except ImportError:
|
|
32
33
|
from ddeutil.core import getdot, hasdot, hash_str, import_string, lazy, str2bool
|
33
34
|
from ddeutil.io import PathData, PathSearch, YamlFlResolve, search_env_replace
|
34
35
|
from ddeutil.io.models.lineage import dt_now
|
35
|
-
from pydantic import BaseModel, ConfigDict, Field
|
36
|
+
from pydantic import BaseModel, ConfigDict, Field
|
37
|
+
from pydantic.dataclasses import dataclass
|
36
38
|
from pydantic.functional_serializers import field_serializer
|
37
39
|
from pydantic.functional_validators import model_validator
|
38
40
|
from typing_extensions import Self
|
39
41
|
|
40
42
|
from .__types import DictData, Matrix, Re
|
43
|
+
from .conf import config
|
41
44
|
from .exceptions import ParamValueException, UtilException
|
42
45
|
|
43
|
-
logger = logging.getLogger("ddeutil.workflow")
|
44
46
|
P = ParamSpec("P")
|
45
47
|
AnyModel = TypeVar("AnyModel", bound=BaseModel)
|
46
48
|
AnyModelType = type[AnyModel]
|
47
49
|
|
50
|
+
logger = logging.getLogger("ddeutil.workflow")
|
51
|
+
|
48
52
|
|
49
53
|
def get_diff_sec(dt: datetime, tz: ZoneInfo | None = None) -> int:
|
50
54
|
"""Return second value that come from diff of an input datetime and the
|
@@ -110,7 +114,7 @@ class ConfParams(BaseModel):
|
|
110
114
|
)
|
111
115
|
|
112
116
|
|
113
|
-
def
|
117
|
+
def load_config() -> ConfParams:
|
114
118
|
"""Load Config data from ``workflows-conf.yaml`` file.
|
115
119
|
|
116
120
|
Configuration Docs:
|
@@ -158,7 +162,7 @@ class SimLoad:
|
|
158
162
|
:param externals: An external parameters
|
159
163
|
|
160
164
|
Noted:
|
161
|
-
|
165
|
+
|
162
166
|
The config data should have ``type`` key for modeling validation that
|
163
167
|
make this loader know what is config should to do pass to.
|
164
168
|
|
@@ -248,11 +252,11 @@ class Loader(SimLoad):
|
|
248
252
|
) -> DictData:
|
249
253
|
"""Override the find class method from the Simple Loader object."""
|
250
254
|
return super().finds(
|
251
|
-
obj=obj, params=
|
255
|
+
obj=obj, params=load_config(), include=include, exclude=exclude
|
252
256
|
)
|
253
257
|
|
254
258
|
def __init__(self, name: str, externals: DictData) -> None:
|
255
|
-
super().__init__(name,
|
259
|
+
super().__init__(name, load_config(), externals)
|
256
260
|
|
257
261
|
|
258
262
|
def gen_id(
|
@@ -275,15 +279,14 @@ def gen_id(
|
|
275
279
|
if not isinstance(value, str):
|
276
280
|
value: str = str(value)
|
277
281
|
|
278
|
-
tz: ZoneInfo = ZoneInfo(os.getenv("WORKFLOW_CORE_TIMEZONE", "UTC"))
|
279
282
|
if str2bool(os.getenv("WORKFLOW_CORE_PIPELINE_ID_SIMPLE", "true")):
|
280
283
|
return hash_str(f"{(value if sensitive else value.lower())}", n=10) + (
|
281
|
-
f"{datetime.now(tz=tz):%Y%m%d%H%M%S%f}" if unique else ""
|
284
|
+
f"{datetime.now(tz=config.tz):%Y%m%d%H%M%S%f}" if unique else ""
|
282
285
|
)
|
283
286
|
return md5(
|
284
287
|
(
|
285
288
|
f"{(value if sensitive else value.lower())}"
|
286
|
-
+ (f"{datetime.now(tz=tz):%Y%m%d%H%M%S%f}" if unique else "")
|
289
|
+
+ (f"{datetime.now(tz=config.tz):%Y%m%d%H%M%S%f}" if unique else "")
|
287
290
|
).encode()
|
288
291
|
).hexdigest()
|
289
292
|
|
@@ -317,13 +320,14 @@ class TagFunc(Protocol):
|
|
317
320
|
def __call__(self, *args, **kwargs): ...
|
318
321
|
|
319
322
|
|
320
|
-
def tag(name: str, alias: str | None = None):
|
323
|
+
def tag(name: str, alias: str | None = None) -> Callable[P, TagFunc]:
|
321
324
|
"""Tag decorator function that set function attributes, ``tag`` and ``name``
|
322
325
|
for making registries variable.
|
323
326
|
|
324
|
-
:param: name: A tag
|
327
|
+
:param: name: A tag name for make different use-case of a function.
|
325
328
|
:param: alias: A alias function name that keeping in registries. If this
|
326
329
|
value does not supply, it will use original function name from __name__.
|
330
|
+
:rtype: Callable[P, TagFunc]
|
327
331
|
"""
|
328
332
|
|
329
333
|
def func_internal(func: Callable[[...], Any]) -> TagFunc:
|
@@ -350,7 +354,7 @@ def make_registry(submodule: str) -> dict[str, Registry]:
|
|
350
354
|
:rtype: dict[str, Registry]
|
351
355
|
"""
|
352
356
|
rs: dict[str, Registry] = {}
|
353
|
-
for module in
|
357
|
+
for module in load_config().engine.registry:
|
354
358
|
# NOTE: try to sequential import task functions
|
355
359
|
try:
|
356
360
|
importer = import_module(f"{module}.{submodule}")
|
@@ -515,29 +519,50 @@ Param = Union[
|
|
515
519
|
]
|
516
520
|
|
517
521
|
|
518
|
-
|
519
|
-
|
520
|
-
|
522
|
+
@dataclass
|
523
|
+
class Result:
|
524
|
+
"""Result Pydantic Model for passing and receiving data context from any
|
525
|
+
module execution process like stage execution, job execution, or workflow
|
526
|
+
execution.
|
527
|
+
|
528
|
+
For comparison property, this result will use ``status``, ``context``,
|
529
|
+
and ``_run_id`` fields to comparing with other result instance.
|
521
530
|
"""
|
522
531
|
|
523
|
-
status: int =
|
524
|
-
context: DictData =
|
532
|
+
status: int = field(default=2)
|
533
|
+
context: DictData = field(default_factory=dict)
|
534
|
+
start_at: datetime = field(default_factory=dt_now, compare=False)
|
535
|
+
end_at: Optional[datetime] = field(default=None, compare=False)
|
525
536
|
|
526
537
|
# NOTE: Ignore this field to compare another result model with __eq__.
|
527
|
-
|
528
|
-
|
538
|
+
_run_id: Optional[str] = field(default=None)
|
539
|
+
_parent_run_id: Optional[str] = field(default=None, compare=False)
|
529
540
|
|
530
541
|
@model_validator(mode="after")
|
531
|
-
def __prepare_run_id(self):
|
532
|
-
if
|
533
|
-
|
542
|
+
def __prepare_run_id(self) -> Self:
|
543
|
+
"""Prepare running ID which use default ID if it initialize at the first
|
544
|
+
time
|
545
|
+
|
546
|
+
:rtype: Self
|
547
|
+
"""
|
548
|
+
self._run_id = gen_id("manual", unique=True)
|
534
549
|
return self
|
535
550
|
|
536
551
|
def set_run_id(self, running_id: str) -> Self:
|
552
|
+
"""Set a running ID.
|
553
|
+
|
554
|
+
:param running_id: A running ID that want to update on this model.
|
555
|
+
:rtype: Self
|
556
|
+
"""
|
537
557
|
self._run_id = running_id
|
538
558
|
return self
|
539
559
|
|
540
560
|
def set_parent_run_id(self, running_id: str) -> Self:
|
561
|
+
"""Set a parent running ID.
|
562
|
+
|
563
|
+
:param running_id: A running ID that want to update on this model.
|
564
|
+
:rtype: Self
|
565
|
+
"""
|
541
566
|
self._parent_run_id = running_id
|
542
567
|
return self
|
543
568
|
|
@@ -549,33 +574,55 @@ class Result(BaseModel):
|
|
549
574
|
def run_id(self):
|
550
575
|
return self._run_id
|
551
576
|
|
552
|
-
def
|
577
|
+
def catch(self, status: int, context: DictData) -> Self:
|
578
|
+
"""Catch the status and context to current data."""
|
579
|
+
self.__dict__["status"] = status
|
580
|
+
self.__dict__["context"].update(context)
|
581
|
+
return self
|
582
|
+
|
583
|
+
def receive(self, result: Result) -> Self:
|
584
|
+
"""Receive context from another result object.
|
585
|
+
|
586
|
+
:rtype: Self
|
587
|
+
"""
|
553
588
|
self.__dict__["status"] = result.status
|
554
589
|
self.__dict__["context"].update(result.context)
|
590
|
+
|
591
|
+
# NOTE: Update running ID from an incoming result.
|
555
592
|
self._parent_run_id = result.parent_run_id
|
556
593
|
self._run_id = result.run_id
|
557
594
|
return self
|
558
595
|
|
559
|
-
def receive_jobs(self, result: Result) ->
|
596
|
+
def receive_jobs(self, result: Result) -> Self:
|
597
|
+
"""Receive context from another result object that use on the workflow
|
598
|
+
execution which create a ``jobs`` keys on the context if it do not
|
599
|
+
exist.
|
600
|
+
|
601
|
+
:rtype: Self
|
602
|
+
"""
|
560
603
|
self.__dict__["status"] = result.status
|
561
604
|
|
562
605
|
# NOTE: Check the context has jobs key.
|
563
606
|
if "jobs" not in self.__dict__["context"]:
|
564
607
|
self.__dict__["context"]["jobs"] = {}
|
565
|
-
|
566
608
|
self.__dict__["context"]["jobs"].update(result.context)
|
609
|
+
|
610
|
+
# NOTE: Update running ID from an incoming result.
|
567
611
|
self._parent_run_id = result.parent_run_id
|
568
612
|
self._run_id = result.run_id
|
569
613
|
return self
|
570
614
|
|
571
615
|
|
572
|
-
def make_exec(path: str | Path):
|
573
|
-
"""Change mode of file to be executable file.
|
616
|
+
def make_exec(path: str | Path) -> None: # pragma: no cov
|
617
|
+
"""Change mode of file to be executable file.
|
618
|
+
|
619
|
+
:param path: A file path that want to make executable permission.
|
620
|
+
"""
|
574
621
|
f: Path = Path(path) if isinstance(path, str) else path
|
575
622
|
f.chmod(f.stat().st_mode | stat.S_IEXEC)
|
576
623
|
|
577
624
|
|
578
|
-
FILTERS: dict[str, callable] = {
|
625
|
+
FILTERS: dict[str, callable] = { # pragma: no cov
|
579
626
|
"abs": abs,
|
580
627
|
"str": str,
|
581
628
|
"int": int,
|
@@ -590,17 +637,18 @@ class FilterFunc(Protocol):
|
|
590
637
|
|
591
638
|
name: str
|
592
639
|
|
593
|
-
def __call__(self, *args, **kwargs): ...
|
640
|
+
def __call__(self, *args, **kwargs): ... # pragma: no cov
|
594
641
|
|
595
642
|
|
596
|
-
def custom_filter(name: str) -> Callable[P,
|
643
|
+
def custom_filter(name: str) -> Callable[P, FilterFunc]:
|
597
644
|
"""Custom filter decorator function that set function attributes, ``filter``
|
598
645
|
for making filter registries variable.
|
599
646
|
|
600
647
|
:param: name: A filter name for make different use-case of a function.
|
648
|
+
:rtype: Callable[P, FilterFunc]
|
601
649
|
"""
|
602
650
|
|
603
|
-
def func_internal(func: Callable[[...], Any]) ->
|
651
|
+
def func_internal(func: Callable[[...], Any]) -> FilterFunc:
|
604
652
|
func.filter = name
|
605
653
|
|
606
654
|
@wraps(func)
|
@@ -622,7 +670,7 @@ def make_filter_registry() -> dict[str, FilterRegistry]:
|
|
622
670
|
:rtype: dict[str, Registry]
|
623
671
|
"""
|
624
672
|
rs: dict[str, Registry] = {}
|
625
|
-
for module in
|
673
|
+
for module in load_config().engine.registry_filter:
|
626
674
|
# NOTE: try to sequential import task functions
|
627
675
|
try:
|
628
676
|
importer = import_module(module)
|
@@ -644,7 +692,10 @@ def make_filter_registry() -> dict[str, FilterRegistry]:
|
|
644
692
|
def get_args_const(
|
645
693
|
expr: str,
|
646
694
|
) -> tuple[str, list[Constant], dict[str, Constant]]:
|
647
|
-
"""Get arguments and keyword-arguments from function calling string.
|
695
|
+
"""Get arguments and keyword-arguments from function calling string.
|
696
|
+
|
697
|
+
:rtype: tuple[str, list[Constant], dict[str, Constant]]
|
698
|
+
"""
|
648
699
|
try:
|
649
700
|
mod: Module = parse(expr)
|
650
701
|
except SyntaxError:
|
@@ -678,6 +729,7 @@ def get_args_const(
|
|
678
729
|
|
679
730
|
@custom_filter("fmt")
|
680
731
|
def datetime_format(value: datetime, fmt: str = "%Y-%m-%d %H:%M:%S") -> str:
|
732
|
+
"""Format datetime object to string with the format."""
|
681
733
|
if isinstance(value, datetime):
|
682
734
|
return value.strftime(fmt)
|
683
735
|
raise UtilException(
|
@@ -699,8 +751,8 @@ def map_post_filter(
|
|
699
751
|
"""
|
700
752
|
for _filter in post_filter:
|
701
753
|
func_name, _args, _kwargs = get_args_const(_filter)
|
702
|
-
args = [arg.value for arg in _args]
|
703
|
-
kwargs = {k: v.value for k, v in _kwargs.items()}
|
754
|
+
args: list = [arg.value for arg in _args]
|
755
|
+
kwargs: dict = {k: v.value for k, v in _kwargs.items()}
|
704
756
|
|
705
757
|
if func_name not in filters:
|
706
758
|
raise UtilException(
|
@@ -845,8 +897,12 @@ def param2template(
|
|
845
897
|
|
846
898
|
|
847
899
|
def filter_func(value: Any) -> Any:
|
848
|
-
"""Filter own created function
|
849
|
-
function name. If it is built-in function, it does not
|
900
|
+
"""Filter out an own created function of any value of mapping context by
|
901
|
+
replacing it to its function name. If it is built-in function, it does not
|
902
|
+
have any changing.
|
903
|
+
|
904
|
+
:param value: A value context data that want to filter out function value.
|
905
|
+
:type: The same type of an input ``value``.
|
850
906
|
"""
|
851
907
|
if isinstance(value, dict):
|
852
908
|
return {k: filter_func(value[k]) for k in value}
|
@@ -869,14 +925,20 @@ def dash2underscore(
|
|
869
925
|
*,
|
870
926
|
fixed: str | None = None,
|
871
927
|
) -> DictData:
|
872
|
-
"""Change key name that has dash to underscore.
|
928
|
+
"""Change key name that has dash to underscore.
|
929
|
+
|
930
|
+
:rtype: DictData
|
931
|
+
"""
|
873
932
|
if key in values:
|
874
933
|
values[(fixed or key.replace("-", "_"))] = values.pop(key)
|
875
934
|
return values
|
876
935
|
|
877
936
|
|
878
937
|
def cross_product(matrix: Matrix) -> Iterator[DictData]:
|
879
|
-
"""Iterator of products value from matrix.
|
938
|
+
"""Iterator of products value from matrix.
|
939
|
+
|
940
|
+
:rtype: Iterator[DictData]
|
941
|
+
"""
|
880
942
|
yield from (
|
881
943
|
{_k: _v for e in mapped for _k, _v in e.items()}
|
882
944
|
for mapped in product(
|
@@ -897,7 +959,7 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
|
|
897
959
|
"""
|
898
960
|
if n < 1:
|
899
961
|
raise ValueError("n must be at least one")
|
900
|
-
it = iter(iterable)
|
962
|
+
it: Iterator[Any] = iter(iterable)
|
901
963
|
while True:
|
902
964
|
chunk_it = islice(it, n)
|
903
965
|
try:
|
@@ -905,3 +967,7 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
|
|
905
967
|
except StopIteration:
|
906
968
|
return
|
907
969
|
yield chain((first_el,), chunk_it)
|
970
|
+
|
971
|
+
|
972
|
+
def queue2str(queue: list[datetime]) -> Iterator[str]: # pragma: no cov
|
973
|
+
return (f"{q:%Y-%m-%d %H:%M:%S}" for q in queue)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.14
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -22,12 +22,13 @@ Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Requires-Python: >=3.9.13
|
23
23
|
Description-Content-Type: text/markdown
|
24
24
|
License-File: LICENSE
|
25
|
-
Requires-Dist: ddeutil
|
25
|
+
Requires-Dist: ddeutil >=0.4.0
|
26
|
+
Requires-Dist: ddeutil-io >=0.1.13
|
26
27
|
Requires-Dist: python-dotenv ==1.0.1
|
27
28
|
Requires-Dist: typer <1.0.0,==0.12.5
|
28
29
|
Requires-Dist: schedule <2.0.0,==1.2.2
|
29
30
|
Provides-Extra: api
|
30
|
-
Requires-Dist: fastapi <1.0.0,>=0.
|
31
|
+
Requires-Dist: fastapi <1.0.0,>=0.115.0 ; extra == 'api'
|
31
32
|
|
32
33
|
# Workflow
|
33
34
|
|
@@ -62,7 +63,7 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
62
63
|
|
63
64
|
> [!NOTE]
|
64
65
|
> _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
|
65
|
-
> `.yml` files and all of config file from several data orchestration framework
|
66
|
+
> with `.yml` files and all of config file from several data orchestration framework
|
66
67
|
> tools from my experience on Data Engineer. :grimacing:
|
67
68
|
>
|
68
69
|
> Other workflow that I interest on them and pick some interested feature to this
|
@@ -92,6 +93,7 @@ this package with application add-ons, you should add `app` in installation;
|
|
92
93
|
> | ddeutil-workflow:python3.10 | `3.10` | :x: |
|
93
94
|
> | ddeutil-workflow:python3.11 | `3.11` | :x: |
|
94
95
|
> | ddeutil-workflow:python3.12 | `3.12` | :x: |
|
96
|
+
> | ddeutil-workflow:python3.12 | `3.13` | :x: |
|
95
97
|
|
96
98
|
## :beers: Usage
|
97
99
|
|
@@ -0,0 +1,22 @@
|
|
1
|
+
ddeutil/workflow/__about__.py,sha256=Xas_M3BaGwGfZOJTvMWUlmlW09aGC-Apst-NtkPddY4,28
|
2
|
+
ddeutil/workflow/__init__.py,sha256=-DIy8SGFsD7_wqp-V-K8v8jTxacmqrcyj_SFx1WS6qg,687
|
3
|
+
ddeutil/workflow/__types.py,sha256=aBbytylSPIe_cip2KIyqLN2eUloMOJdkayqKWCBrwhk,2353
|
4
|
+
ddeutil/workflow/api.py,sha256=cwju_qhY6m0kLtaoa77QLglC9tl7RjjZ4UnJYV3SlQQ,4810
|
5
|
+
ddeutil/workflow/cli.py,sha256=Ikcq526WeIl-737-v55T0PwAZ2pNiZFxlN0Y-DjhDbQ,3374
|
6
|
+
ddeutil/workflow/conf.py,sha256=j19G7rDxQRGgSRQW3pxIYtK5lB3fZv0eG_CAoqoIhPw,1140
|
7
|
+
ddeutil/workflow/cron.py,sha256=naWefHc3EnVo41Yf1zQeXOzF27YlTlnfj0XnQ6_HO-U,25514
|
8
|
+
ddeutil/workflow/exceptions.py,sha256=Uf1-Tn8rAzj0aiVHSqo4fBqO80W0za7UFZgKv24E-tg,706
|
9
|
+
ddeutil/workflow/job.py,sha256=zEefiEAxyC34NvbNVpKexTVU1E_031446308zGMdcmE,21488
|
10
|
+
ddeutil/workflow/log.py,sha256=Ev-Szi0KC_MmbFY4g4BWv6tUSmcLKWKZ03ZInmYPmgU,6490
|
11
|
+
ddeutil/workflow/on.py,sha256=vsZG19mNoztDSB_ObD_4ZWPKgHYpBDJMWw97ZiTavNE,7237
|
12
|
+
ddeutil/workflow/repeat.py,sha256=e3dekPTlMlxCCizfBYsZ8dD8Juy4rtfqDZJU3Iky2oA,5011
|
13
|
+
ddeutil/workflow/route.py,sha256=ABEk-WlVo9XGFc7zCPbckX33URCNH7woQFU1keX_8PQ,6970
|
14
|
+
ddeutil/workflow/scheduler.py,sha256=CcUFichnvPbQzSEk_ikNgFwZimTObGHfXxHChuysAo4,45706
|
15
|
+
ddeutil/workflow/stage.py,sha256=Tt5QQrO_dN8MO9gPtiziOqVrd64UTJZwbgifWeXBCIA,23574
|
16
|
+
ddeutil/workflow/utils.py,sha256=epJMTsA4BPQa0gECgcWJ38IENlejpnF3OTBNc0eaqYE,30715
|
17
|
+
ddeutil_workflow-0.0.14.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
18
|
+
ddeutil_workflow-0.0.14.dist-info/METADATA,sha256=sf9kiPoGazaRxqymTjaxZyH47yNyGj0RElc-NLmUa4w,11653
|
19
|
+
ddeutil_workflow-0.0.14.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
20
|
+
ddeutil_workflow-0.0.14.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
|
21
|
+
ddeutil_workflow-0.0.14.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
22
|
+
ddeutil_workflow-0.0.14.dist-info/RECORD,,
|
@@ -1,21 +0,0 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=StSv8QbtF16HmqqJ8TfZlgbD1BgLyYHcubwplM-eSto,28
|
2
|
-
ddeutil/workflow/__init__.py,sha256=aEQiEWwTPGhfwpzzdb99xXaHchi5ABWUHl2iLIyT18E,664
|
3
|
-
ddeutil/workflow/__types.py,sha256=SYMoxbENQX8uPsiCZkjtpHAqqHOh8rUrarAFicAJd0E,1773
|
4
|
-
ddeutil/workflow/api.py,sha256=xVP8eGu1nnR8HM0ULTwxs9TV9tsxCOjZ68cAffw2f3o,4802
|
5
|
-
ddeutil/workflow/cli.py,sha256=Ikcq526WeIl-737-v55T0PwAZ2pNiZFxlN0Y-DjhDbQ,3374
|
6
|
-
ddeutil/workflow/cron.py,sha256=uhp3E5pl_tX_H88bsDujcwdhZmOE53csyV-ouPpPdK8,25321
|
7
|
-
ddeutil/workflow/exceptions.py,sha256=Uf1-Tn8rAzj0aiVHSqo4fBqO80W0za7UFZgKv24E-tg,706
|
8
|
-
ddeutil/workflow/job.py,sha256=iwiDUGgnId6QFkzqLZuiWFYUNfY-qYJebaGwhFnMKH8,20633
|
9
|
-
ddeutil/workflow/log.py,sha256=bZyyqf3oNBB8oRf8RI0YvII7wHHoj4wC-nmW_pQjQ1c,6036
|
10
|
-
ddeutil/workflow/on.py,sha256=vsZG19mNoztDSB_ObD_4ZWPKgHYpBDJMWw97ZiTavNE,7237
|
11
|
-
ddeutil/workflow/repeat.py,sha256=e3dekPTlMlxCCizfBYsZ8dD8Juy4rtfqDZJU3Iky2oA,5011
|
12
|
-
ddeutil/workflow/route.py,sha256=ABEk-WlVo9XGFc7zCPbckX33URCNH7woQFU1keX_8PQ,6970
|
13
|
-
ddeutil/workflow/scheduler.py,sha256=fe9NGobU8zN95C0FY2PB7eYI9tzyvyh-_K7vcUFFBO8,41674
|
14
|
-
ddeutil/workflow/stage.py,sha256=rGFdLLYj6eo8aqSRr4lkBBdah4KIzCzKefJeg0hk0O8,22289
|
15
|
-
ddeutil/workflow/utils.py,sha256=TbqgPkDDYBpqCZ7HV2TU3AH1_Mv-zfrJdwVL-l2SPUo,28559
|
16
|
-
ddeutil_workflow-0.0.13.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
17
|
-
ddeutil_workflow-0.0.13.dist-info/METADATA,sha256=HuSRkM94JcefbkiCR6_3khXeUiAsb0FMirS3d7qWGHk,11556
|
18
|
-
ddeutil_workflow-0.0.13.dist-info/WHEEL,sha256=5Mi1sN9lKoFv_gxcPtisEVrJZihrm_beibeg5R6xb4I,91
|
19
|
-
ddeutil_workflow-0.0.13.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
|
20
|
-
ddeutil_workflow-0.0.13.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
21
|
-
ddeutil_workflow-0.0.13.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|