ddeutil-workflow 0.0.58__py3-none-any.whl → 0.0.60__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__cron.py +3 -3
- ddeutil/workflow/__types.py +9 -2
- ddeutil/workflow/conf.py +6 -4
- ddeutil/workflow/event.py +17 -14
- ddeutil/workflow/exceptions.py +6 -5
- ddeutil/workflow/job.py +40 -39
- ddeutil/workflow/logs.py +171 -73
- ddeutil/workflow/params.py +9 -5
- ddeutil/workflow/result.py +19 -19
- ddeutil/workflow/reusables.py +9 -9
- ddeutil/workflow/scheduler.py +8 -8
- ddeutil/workflow/stages.py +96 -85
- ddeutil/workflow/utils.py +11 -10
- ddeutil/workflow/workflow.py +33 -32
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.60.dist-info}/METADATA +8 -3
- ddeutil_workflow-0.0.60.dist-info/RECORD +31 -0
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.60.dist-info}/WHEEL +1 -1
- ddeutil_workflow-0.0.58.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.60.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.60.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.60.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.60"
|
ddeutil/workflow/__cron.py
CHANGED
@@ -699,9 +699,9 @@ class CronJob:
|
|
699
699
|
|
700
700
|
def schedule(
|
701
701
|
self,
|
702
|
-
date: datetime
|
702
|
+
date: Optional[datetime] = None,
|
703
703
|
*,
|
704
|
-
tz: str
|
704
|
+
tz: Optional[str] = None,
|
705
705
|
) -> CronRunner:
|
706
706
|
"""Returns CronRunner instance that be datetime runner with this
|
707
707
|
cronjob. It can use `next`, `prev`, or `reset` methods to generate
|
@@ -766,7 +766,7 @@ class CronRunner:
|
|
766
766
|
def __init__(
|
767
767
|
self,
|
768
768
|
cron: CronJob | CronJobYear,
|
769
|
-
date: datetime
|
769
|
+
date: Optional[datetime] = None,
|
770
770
|
*,
|
771
771
|
tz: str | ZoneInfo | None = None,
|
772
772
|
) -> None:
|
ddeutil/workflow/__types.py
CHANGED
@@ -20,6 +20,7 @@ from typing import Any, Optional, TypedDict, Union
|
|
20
20
|
|
21
21
|
from typing_extensions import Self
|
22
22
|
|
23
|
+
StrOrNone = Optional[str]
|
23
24
|
StrOrInt = Union[str, int]
|
24
25
|
TupleStr = tuple[str, ...]
|
25
26
|
DictData = dict[str, Any]
|
@@ -42,7 +43,7 @@ class CallerRe:
|
|
42
43
|
|
43
44
|
full: str
|
44
45
|
caller: str
|
45
|
-
caller_prefix:
|
46
|
+
caller_prefix: StrOrNone
|
46
47
|
caller_last: str
|
47
48
|
post_filters: str
|
48
49
|
|
@@ -50,6 +51,9 @@ class CallerRe:
|
|
50
51
|
def from_regex(cls, match: Match[str]) -> Self:
|
51
52
|
"""Class construct from matching result.
|
52
53
|
|
54
|
+
:param match: A match string object for contract this Caller regex data
|
55
|
+
class.
|
56
|
+
|
53
57
|
:rtype: Self
|
54
58
|
"""
|
55
59
|
return cls(full=match.group(0), **match.groupdict())
|
@@ -121,10 +125,13 @@ class Re:
|
|
121
125
|
)
|
122
126
|
|
123
127
|
@classmethod
|
124
|
-
def finditer_caller(cls, value) -> Iterator[CallerRe]:
|
128
|
+
def finditer_caller(cls, value: str) -> Iterator[CallerRe]:
|
125
129
|
"""Generate CallerRe object that create from matching object that
|
126
130
|
extract with re.finditer function.
|
127
131
|
|
132
|
+
:param value: (str) A string value that want to finditer with the caller
|
133
|
+
regular expression.
|
134
|
+
|
128
135
|
:rtype: Iterator[CallerRe]
|
129
136
|
"""
|
130
137
|
for found in cls.RE_CALLER.finditer(value):
|
ddeutil/workflow/conf.py
CHANGED
@@ -5,6 +5,7 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
+
import copy
|
8
9
|
import json
|
9
10
|
import os
|
10
11
|
from abc import ABC, abstractmethod
|
@@ -26,13 +27,13 @@ T = TypeVar("T")
|
|
26
27
|
PREFIX: Final[str] = "WORKFLOW"
|
27
28
|
|
28
29
|
|
29
|
-
def env(var: str, default: str
|
30
|
+
def env(var: str, default: Optional[str] = None) -> Optional[str]:
|
30
31
|
"""Get environment variable with uppercase and adding prefix string.
|
31
32
|
|
32
33
|
:param var: (str) A env variable name.
|
33
|
-
:param default: (str
|
34
|
+
:param default: (Optional[str]) A default value if an env var does not set.
|
34
35
|
|
35
|
-
:rtype: str
|
36
|
+
:rtype: Optional[str]
|
36
37
|
"""
|
37
38
|
return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
|
38
39
|
|
@@ -298,6 +299,7 @@ class FileLoad(BaseLoad):
|
|
298
299
|
f"Multi-config paths does not support for type: {type(paths)}"
|
299
300
|
)
|
300
301
|
else:
|
302
|
+
paths: list[Path] = copy.deepcopy(paths)
|
301
303
|
paths.append(path)
|
302
304
|
|
303
305
|
all_data: list[tuple[float, DictData]] = []
|
@@ -398,7 +400,7 @@ class FileLoad(BaseLoad):
|
|
398
400
|
return is_ignored(file, read_ignore(path / ignore_filename))
|
399
401
|
|
400
402
|
@classmethod
|
401
|
-
def filter_yaml(cls, file: Path, name: str
|
403
|
+
def filter_yaml(cls, file: Path, name: Optional[str] = None) -> DictData:
|
402
404
|
"""Read a YAML file context from an input file path and specific name.
|
403
405
|
|
404
406
|
:param file: (Path) A file path that want to extract YAML context.
|
ddeutil/workflow/event.py
CHANGED
@@ -3,14 +3,15 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
"""Event module
|
7
|
-
`CrontabYear`
|
6
|
+
"""Event module include all event object for trigger the Workflow to release.
|
7
|
+
Now, it has only `Crontab` and `CrontabYear` event models on this module because
|
8
|
+
I think it is the core event for workflow orchestration.
|
8
9
|
"""
|
9
10
|
from __future__ import annotations
|
10
11
|
|
11
12
|
from dataclasses import fields
|
12
13
|
from datetime import datetime
|
13
|
-
from typing import Annotated, Any, Literal, Union
|
14
|
+
from typing import Annotated, Any, Literal, Optional, Union
|
14
15
|
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
15
16
|
|
16
17
|
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
|
@@ -28,7 +29,7 @@ Interval = Literal["daily", "weekly", "monthly"]
|
|
28
29
|
def interval2crontab(
|
29
30
|
interval: Interval,
|
30
31
|
*,
|
31
|
-
day: str
|
32
|
+
day: Optional[str] = None,
|
32
33
|
time: str = "00:00",
|
33
34
|
) -> str:
|
34
35
|
"""Return the crontab string that was generated from specific values.
|
@@ -86,7 +87,7 @@ class Crontab(BaseModel):
|
|
86
87
|
CronJob,
|
87
88
|
Field(
|
88
89
|
description=(
|
89
|
-
"A Cronjob object that use for validate and generate datetime."
|
90
|
+
"A Cronjob object that use for validate and generate datetime."
|
90
91
|
),
|
91
92
|
),
|
92
93
|
]
|
@@ -117,7 +118,6 @@ class Crontab(BaseModel):
|
|
117
118
|
passing["cronjob"] = interval2crontab(
|
118
119
|
**{v: value[v] for v in value if v in ("interval", "day", "time")}
|
119
120
|
)
|
120
|
-
print(passing)
|
121
121
|
return cls(extras=extras | passing.pop("extras", {}), **passing)
|
122
122
|
|
123
123
|
@classmethod
|
@@ -170,9 +170,10 @@ class Crontab(BaseModel):
|
|
170
170
|
|
171
171
|
@model_validator(mode="before")
|
172
172
|
def __prepare_values(cls, data: Any) -> Any:
|
173
|
-
"""Extract tz key from
|
173
|
+
"""Extract a `tz` key from data and change the key name from `tz` to
|
174
|
+
`timezone`.
|
174
175
|
|
175
|
-
:param data: (DictData) A data that want to pass for create
|
176
|
+
:param data: (DictData) A data that want to pass for create a Crontab
|
176
177
|
model.
|
177
178
|
|
178
179
|
:rtype: DictData
|
@@ -198,7 +199,7 @@ class Crontab(BaseModel):
|
|
198
199
|
"cronjob", mode="before", json_schema_input_type=Union[CronJob, str]
|
199
200
|
)
|
200
201
|
def __prepare_cronjob(
|
201
|
-
cls, value: str
|
202
|
+
cls, value: Union[str, CronJob], info: ValidationInfo
|
202
203
|
) -> CronJob:
|
203
204
|
"""Prepare crontab value that able to receive with string type.
|
204
205
|
This step will get options kwargs from extras field and pass to the
|
@@ -234,7 +235,7 @@ class Crontab(BaseModel):
|
|
234
235
|
"""
|
235
236
|
return str(value)
|
236
237
|
|
237
|
-
def generate(self, start: str
|
238
|
+
def generate(self, start: Union[str, datetime]) -> CronRunner:
|
238
239
|
"""Return CronRunner object from an initial datetime.
|
239
240
|
|
240
241
|
:param start: (str | datetime) A string or datetime for generate the
|
@@ -248,7 +249,7 @@ class Crontab(BaseModel):
|
|
248
249
|
raise TypeError("start value should be str or datetime type.")
|
249
250
|
return self.cronjob.schedule(date=start, tz=self.tz)
|
250
251
|
|
251
|
-
def next(self, start: str
|
252
|
+
def next(self, start: Union[str, datetime]) -> CronRunner:
|
252
253
|
"""Return a next datetime from Cron runner object that start with any
|
253
254
|
date that given from input.
|
254
255
|
|
@@ -277,16 +278,18 @@ class CrontabYear(Crontab):
|
|
277
278
|
CronJobYear,
|
278
279
|
Field(
|
279
280
|
description=(
|
280
|
-
"A Cronjob object that use for validate and generate datetime."
|
281
|
+
"A Cronjob object that use for validate and generate datetime."
|
281
282
|
),
|
282
283
|
),
|
283
284
|
]
|
284
285
|
|
285
286
|
@field_validator(
|
286
|
-
"cronjob",
|
287
|
+
"cronjob",
|
288
|
+
mode="before",
|
289
|
+
json_schema_input_type=Union[CronJobYear, str],
|
287
290
|
)
|
288
291
|
def __prepare_cronjob(
|
289
|
-
cls, value:
|
292
|
+
cls, value: Union[CronJobYear, str], info: ValidationInfo
|
290
293
|
) -> CronJobYear:
|
291
294
|
"""Prepare crontab value that able to receive with string type.
|
292
295
|
This step will get options kwargs from extras field and pass to the
|
ddeutil/workflow/exceptions.py
CHANGED
@@ -9,7 +9,7 @@ annotate for handle error only.
|
|
9
9
|
"""
|
10
10
|
from __future__ import annotations
|
11
11
|
|
12
|
-
from typing import Literal, TypedDict, overload
|
12
|
+
from typing import Literal, Optional, TypedDict, Union, overload
|
13
13
|
|
14
14
|
|
15
15
|
class ErrorData(TypedDict):
|
@@ -39,9 +39,9 @@ class BaseWorkflowException(Exception):
|
|
39
39
|
making an error context to the result context.
|
40
40
|
"""
|
41
41
|
|
42
|
-
def __init__(self, message: str, *, refs: str
|
42
|
+
def __init__(self, message: str, *, refs: Optional[str] = None):
|
43
43
|
super().__init__(message)
|
44
|
-
self.refs: str
|
44
|
+
self.refs: Optional[str] = refs
|
45
45
|
|
46
46
|
@overload
|
47
47
|
def to_dict(
|
@@ -55,8 +55,9 @@ class BaseWorkflowException(Exception):
|
|
55
55
|
|
56
56
|
def to_dict(
|
57
57
|
self, with_refs: bool = False
|
58
|
-
) -> ErrorData
|
59
|
-
"""Return ErrorData data from the current exception object.
|
58
|
+
) -> Union[ErrorData, dict[str, ErrorData]]:
|
59
|
+
"""Return ErrorData data from the current exception object. If with_refs
|
60
|
+
flag was set, it will return mapping of refs and itself data.
|
60
61
|
|
61
62
|
:rtype: ErrorData
|
62
63
|
"""
|
ddeutil/workflow/job.py
CHANGED
@@ -19,6 +19,7 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
import copy
|
21
21
|
import time
|
22
|
+
from collections.abc import Iterator
|
22
23
|
from concurrent.futures import (
|
23
24
|
FIRST_EXCEPTION,
|
24
25
|
CancelledError,
|
@@ -38,7 +39,7 @@ from pydantic import BaseModel, Discriminator, Field, SecretStr, Tag
|
|
38
39
|
from pydantic.functional_validators import field_validator, model_validator
|
39
40
|
from typing_extensions import Self
|
40
41
|
|
41
|
-
from .__types import DictData, DictStr, Matrix
|
42
|
+
from .__types import DictData, DictStr, Matrix, StrOrNone
|
42
43
|
from .exceptions import (
|
43
44
|
JobException,
|
44
45
|
StageException,
|
@@ -67,8 +68,8 @@ def make(
|
|
67
68
|
|
68
69
|
:param matrix: (Matrix) A matrix values that want to cross product to
|
69
70
|
possible parallelism values.
|
70
|
-
:param include:
|
71
|
-
:param exclude:
|
71
|
+
:param include: A list of additional matrix that want to adds-in.
|
72
|
+
:param exclude: A list of exclude matrix that want to filter-out.
|
72
73
|
|
73
74
|
:rtype: list[DictStr]
|
74
75
|
"""
|
@@ -191,22 +192,22 @@ class Strategy(BaseModel):
|
|
191
192
|
class Rule(str, Enum):
|
192
193
|
"""Rule enum object for assign trigger option."""
|
193
194
|
|
194
|
-
ALL_SUCCESS
|
195
|
-
ALL_FAILED
|
196
|
-
ALL_DONE
|
197
|
-
ONE_FAILED
|
198
|
-
ONE_SUCCESS
|
199
|
-
NONE_FAILED
|
200
|
-
NONE_SKIPPED
|
195
|
+
ALL_SUCCESS = "all_success"
|
196
|
+
ALL_FAILED = "all_failed"
|
197
|
+
ALL_DONE = "all_done"
|
198
|
+
ONE_FAILED = "one_failed"
|
199
|
+
ONE_SUCCESS = "one_success"
|
200
|
+
NONE_FAILED = "none_failed"
|
201
|
+
NONE_SKIPPED = "none_skipped"
|
201
202
|
|
202
203
|
|
203
204
|
class RunsOn(str, Enum):
|
204
205
|
"""Runs-On enum object."""
|
205
206
|
|
206
|
-
LOCAL
|
207
|
-
SELF_HOSTED
|
208
|
-
AZ_BATCH
|
209
|
-
DOCKER
|
207
|
+
LOCAL = "local"
|
208
|
+
SELF_HOSTED = "self_hosted"
|
209
|
+
AZ_BATCH = "azure_batch"
|
210
|
+
DOCKER = "docker"
|
210
211
|
|
211
212
|
|
212
213
|
class BaseRunsOn(BaseModel): # pragma: no cov
|
@@ -328,14 +329,14 @@ class Job(BaseModel):
|
|
328
329
|
... }
|
329
330
|
"""
|
330
331
|
|
331
|
-
id:
|
332
|
+
id: StrOrNone = Field(
|
332
333
|
default=None,
|
333
334
|
description=(
|
334
335
|
"A job ID that was set from Workflow model after initialize step. "
|
335
336
|
"If this model create standalone, it will be None."
|
336
337
|
),
|
337
338
|
)
|
338
|
-
desc:
|
339
|
+
desc: StrOrNone = Field(
|
339
340
|
default=None,
|
340
341
|
description="A job description that can be markdown syntax.",
|
341
342
|
)
|
@@ -344,7 +345,7 @@ class Job(BaseModel):
|
|
344
345
|
description="A target node for this job to use for execution.",
|
345
346
|
alias="runs-on",
|
346
347
|
)
|
347
|
-
condition:
|
348
|
+
condition: StrOrNone = Field(
|
348
349
|
default=None,
|
349
350
|
description="A job condition statement to allow job executable.",
|
350
351
|
alias="if",
|
@@ -525,7 +526,7 @@ class Job(BaseModel):
|
|
525
526
|
output: DictData,
|
526
527
|
to: DictData,
|
527
528
|
*,
|
528
|
-
job_id:
|
529
|
+
job_id: StrOrNone = None,
|
529
530
|
) -> DictData:
|
530
531
|
"""Set an outputs from execution result context to the received context
|
531
532
|
with a `to` input parameter. The result context from job strategy
|
@@ -566,7 +567,7 @@ class Job(BaseModel):
|
|
566
567
|
:param output: (DictData) A result data context that want to extract
|
567
568
|
and transfer to the `strategies` key in receive context.
|
568
569
|
:param to: (DictData) A received context data.
|
569
|
-
:param job_id: (
|
570
|
+
:param job_id: (StrOrNone) A job ID if the `id` field does not set.
|
570
571
|
|
571
572
|
:rtype: DictData
|
572
573
|
"""
|
@@ -606,9 +607,9 @@ class Job(BaseModel):
|
|
606
607
|
self,
|
607
608
|
params: DictData,
|
608
609
|
*,
|
609
|
-
run_id:
|
610
|
-
parent_run_id:
|
611
|
-
event: Event
|
610
|
+
run_id: StrOrNone = None,
|
611
|
+
parent_run_id: StrOrNone = None,
|
612
|
+
event: Optional[Event] = None,
|
612
613
|
) -> Result:
|
613
614
|
"""Job execution with passing dynamic parameters from the workflow
|
614
615
|
execution. It will generate matrix values at the first step and run
|
@@ -676,8 +677,8 @@ def local_execute_strategy(
|
|
676
677
|
strategy: DictData,
|
677
678
|
params: DictData,
|
678
679
|
*,
|
679
|
-
result: Result
|
680
|
-
event: Event
|
680
|
+
result: Optional[Result] = None,
|
681
|
+
event: Optional[Event] = None,
|
681
682
|
) -> Result:
|
682
683
|
"""Local strategy execution with passing dynamic parameters from the
|
683
684
|
job execution and strategy matrix.
|
@@ -799,9 +800,9 @@ def local_execute(
|
|
799
800
|
job: Job,
|
800
801
|
params: DictData,
|
801
802
|
*,
|
802
|
-
run_id:
|
803
|
-
parent_run_id:
|
804
|
-
event: Event
|
803
|
+
run_id: StrOrNone = None,
|
804
|
+
parent_run_id: StrOrNone = None,
|
805
|
+
event: Optional[Event] = None,
|
805
806
|
) -> Result:
|
806
807
|
"""Local job execution with passing dynamic parameters from the workflow
|
807
808
|
execution or directly. It will generate matrix values at the first
|
@@ -874,10 +875,10 @@ def local_execute(
|
|
874
875
|
status: Status = SUCCESS
|
875
876
|
|
876
877
|
if not fail_fast_flag:
|
877
|
-
done:
|
878
|
+
done: Iterator[Future] = as_completed(futures)
|
878
879
|
else:
|
879
880
|
done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
|
880
|
-
if len(done) != len(futures):
|
881
|
+
if len(list(done)) != len(futures):
|
881
882
|
result.trace.warning(
|
882
883
|
"[JOB]: Handler Fail-Fast: Got exception and set event."
|
883
884
|
)
|
@@ -895,7 +896,7 @@ def local_execute(
|
|
895
896
|
else ""
|
896
897
|
)
|
897
898
|
result.trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
|
898
|
-
done:
|
899
|
+
done: Iterator[Future] = as_completed(futures)
|
899
900
|
|
900
901
|
for future in done:
|
901
902
|
try:
|
@@ -918,9 +919,9 @@ def self_hosted_execute(
|
|
918
919
|
job: Job,
|
919
920
|
params: DictData,
|
920
921
|
*,
|
921
|
-
run_id:
|
922
|
-
parent_run_id:
|
923
|
-
event: Event
|
922
|
+
run_id: StrOrNone = None,
|
923
|
+
parent_run_id: StrOrNone = None,
|
924
|
+
event: Optional[Event] = None,
|
924
925
|
) -> Result: # pragma: no cov
|
925
926
|
"""Self-Hosted job execution with passing dynamic parameters from the
|
926
927
|
workflow execution or itself execution. It will make request to the
|
@@ -981,9 +982,9 @@ def azure_batch_execute(
|
|
981
982
|
job: Job,
|
982
983
|
params: DictData,
|
983
984
|
*,
|
984
|
-
run_id:
|
985
|
-
parent_run_id:
|
986
|
-
event: Event
|
985
|
+
run_id: StrOrNone = None,
|
986
|
+
parent_run_id: StrOrNone = None,
|
987
|
+
event: Optional[Event] = None,
|
987
988
|
) -> Result: # pragma: no cov
|
988
989
|
"""Azure Batch job execution that will run all job's stages on the Azure
|
989
990
|
Batch Node and extract the result file to be returning context result.
|
@@ -1035,9 +1036,9 @@ def docker_execution(
|
|
1035
1036
|
job: Job,
|
1036
1037
|
params: DictData,
|
1037
1038
|
*,
|
1038
|
-
run_id:
|
1039
|
-
parent_run_id:
|
1040
|
-
event: Event
|
1039
|
+
run_id: StrOrNone = None,
|
1040
|
+
parent_run_id: StrOrNone = None,
|
1041
|
+
event: Optional[Event] = None,
|
1041
1042
|
): # pragma: no cov
|
1042
1043
|
"""Docker job execution.
|
1043
1044
|
|