ddeutil-workflow 0.0.80__py3-none-any.whl → 0.0.82__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +2 -1
- ddeutil/workflow/__init__.py +19 -6
- ddeutil/workflow/__main__.py +280 -1
- ddeutil/workflow/api/routes/job.py +2 -2
- ddeutil/workflow/api/routes/logs.py +8 -61
- ddeutil/workflow/audits.py +46 -17
- ddeutil/workflow/conf.py +64 -44
- ddeutil/workflow/errors.py +12 -2
- ddeutil/workflow/job.py +70 -16
- ddeutil/workflow/result.py +33 -11
- ddeutil/workflow/reusables.py +16 -17
- ddeutil/workflow/stages.py +172 -134
- ddeutil/workflow/traces.py +64 -24
- ddeutil/workflow/utils.py +7 -15
- ddeutil/workflow/workflow.py +73 -84
- {ddeutil_workflow-0.0.80.dist-info → ddeutil_workflow-0.0.82.dist-info}/METADATA +1 -1
- ddeutil_workflow-0.0.82.dist-info/RECORD +35 -0
- ddeutil/workflow/cli.py +0 -274
- ddeutil_workflow-0.0.80.dist-info/RECORD +0 -36
- {ddeutil_workflow-0.0.80.dist-info → ddeutil_workflow-0.0.82.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.80.dist-info → ddeutil_workflow-0.0.82.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.80.dist-info → ddeutil_workflow-0.0.82.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.80.dist-info → ddeutil_workflow-0.0.82.dist-info}/top_level.txt +0 -0
ddeutil/workflow/traces.py
CHANGED
@@ -29,7 +29,16 @@ from inspect import Traceback, currentframe, getframeinfo
|
|
29
29
|
from pathlib import Path
|
30
30
|
from threading import Lock, get_ident
|
31
31
|
from types import FrameType
|
32
|
-
from typing import
|
32
|
+
from typing import (
|
33
|
+
Annotated,
|
34
|
+
Any,
|
35
|
+
ClassVar,
|
36
|
+
Final,
|
37
|
+
Literal,
|
38
|
+
Optional,
|
39
|
+
TypeVar,
|
40
|
+
Union,
|
41
|
+
)
|
33
42
|
from zoneinfo import ZoneInfo
|
34
43
|
|
35
44
|
from pydantic import BaseModel, Field, PrivateAttr
|
@@ -86,9 +95,10 @@ PREFIX_LOGS: Final[dict[str, dict]] = {
|
|
86
95
|
"emoji": "⚙️",
|
87
96
|
"desc": "logs from any usage from custom caller function.",
|
88
97
|
},
|
98
|
+
"NESTED": {"emoji": "⛓️", "desc": "logs from stages module."},
|
89
99
|
"STAGE": {"emoji": "🔗", "desc": "logs from stages module."},
|
90
|
-
"JOB": {"emoji": "
|
91
|
-
"WORKFLOW": {"emoji": "
|
100
|
+
"JOB": {"emoji": "🏗", "desc": "logs from job module."},
|
101
|
+
"WORKFLOW": {"emoji": "👟", "desc": "logs from workflow module."},
|
92
102
|
"RELEASE": {"emoji": "📅", "desc": "logs from release workflow method."},
|
93
103
|
"POKING": {"emoji": "⏰", "desc": "logs from poke workflow method."},
|
94
104
|
"AUDIT": {"emoji": "📌", "desc": "logs from audit model."},
|
@@ -427,6 +437,9 @@ class BaseHandler(BaseModel, ABC):
|
|
427
437
|
self, metadata: list[Metadata], *, extra: Optional[DictData] = None
|
428
438
|
) -> None: ...
|
429
439
|
|
440
|
+
def pre(self) -> None:
|
441
|
+
"""Pre-process of handler that will execute when start create trance."""
|
442
|
+
|
430
443
|
|
431
444
|
class ConsoleHandler(BaseHandler):
|
432
445
|
"""Console Handler model."""
|
@@ -460,14 +473,20 @@ class FileHandler(BaseHandler):
|
|
460
473
|
metadata_filename: ClassVar[str] = "metadata.txt"
|
461
474
|
|
462
475
|
type: Literal["file"] = "file"
|
463
|
-
path: str = Field(
|
476
|
+
path: str = Field(
|
477
|
+
description=(
|
478
|
+
"A file path that use to save all trace log files that include "
|
479
|
+
"stdout, stderr, and metadata."
|
480
|
+
)
|
481
|
+
)
|
464
482
|
format: str = Field(
|
465
483
|
default=(
|
466
484
|
"{datetime} ({process:5d}, {thread:5d}) ({cut_id}) {message:120s} "
|
467
485
|
"({filename}:{lineno})"
|
468
|
-
)
|
486
|
+
),
|
487
|
+
description="A trace log format that write on stdout and stderr files.",
|
469
488
|
)
|
470
|
-
buffer_size: int = 8192
|
489
|
+
buffer_size: int = Field(default=8192)
|
471
490
|
|
472
491
|
# NOTE: Private attrs for the internal process.
|
473
492
|
_lock: Lock = PrivateAttr(default_factory=Lock)
|
@@ -488,7 +507,9 @@ class FileHandler(BaseHandler):
|
|
488
507
|
log_file.mkdir(parents=True)
|
489
508
|
return log_file
|
490
509
|
|
491
|
-
def pre(self) -> None:
|
510
|
+
def pre(self) -> None: # pragma: no cov
|
511
|
+
if not (p := Path(self.path)).exists():
|
512
|
+
p.mkdir(parents=True)
|
492
513
|
|
493
514
|
def emit(
|
494
515
|
self,
|
@@ -496,6 +517,7 @@ class FileHandler(BaseHandler):
|
|
496
517
|
*,
|
497
518
|
extra: Optional[DictData] = None,
|
498
519
|
) -> None:
|
520
|
+
"""Emit trace log."""
|
499
521
|
pointer: Path = self.pointer(metadata.pointer_id)
|
500
522
|
std_file = "stderr" if metadata.error_flag else "stdout"
|
501
523
|
with self._lock:
|
@@ -518,7 +540,9 @@ class FileHandler(BaseHandler):
|
|
518
540
|
try:
|
519
541
|
import aiofiles
|
520
542
|
except ImportError as e:
|
521
|
-
raise ImportError(
|
543
|
+
raise ImportError(
|
544
|
+
"Async mode need to install `aiofiles` package first"
|
545
|
+
) from e
|
522
546
|
|
523
547
|
with self._lock:
|
524
548
|
pointer: Path = self.pointer(metadata.pointer_id)
|
@@ -538,6 +562,7 @@ class FileHandler(BaseHandler):
|
|
538
562
|
def flush(
|
539
563
|
self, metadata: list[Metadata], *, extra: Optional[DictData] = None
|
540
564
|
) -> None:
|
565
|
+
"""Flush logs."""
|
541
566
|
with self._lock:
|
542
567
|
pointer: Path = self.pointer(metadata[0].pointer_id)
|
543
568
|
stdout_file = open(
|
@@ -613,7 +638,7 @@ class FileHandler(BaseHandler):
|
|
613
638
|
"""Find trace logs.
|
614
639
|
|
615
640
|
Args:
|
616
|
-
path: A trace path that want to find.
|
641
|
+
path (Path | None, default None): A trace path that want to find.
|
617
642
|
"""
|
618
643
|
for file in sorted(
|
619
644
|
(path or Path(self.path)).glob("./run_id=*"),
|
@@ -634,6 +659,9 @@ class FileHandler(BaseHandler):
|
|
634
659
|
run_id: A running ID of trace log.
|
635
660
|
force_raise: Whether to raise an exception if not found.
|
636
661
|
path: Optional path override.
|
662
|
+
|
663
|
+
Returns:
|
664
|
+
TraceData: A TranceData instance that already passed searching data.
|
637
665
|
"""
|
638
666
|
base_path: Path = path or self.path
|
639
667
|
file: Path = base_path / f"run_id={run_id}"
|
@@ -757,7 +785,8 @@ class SQLiteHandler(BaseHandler): # pragma: no cov
|
|
757
785
|
metadata: Metadata,
|
758
786
|
*,
|
759
787
|
extra: Optional[DictData] = None,
|
760
|
-
) -> None:
|
788
|
+
) -> None:
|
789
|
+
raise NotImplementedError("Does not implement async emit yet.")
|
761
790
|
|
762
791
|
def flush(
|
763
792
|
self, metadata: list[Metadata], *, extra: Optional[DictData] = None
|
@@ -1506,7 +1535,6 @@ class ElasticHandler(BaseHandler): # pragma: no cov
|
|
1506
1535
|
try:
|
1507
1536
|
from elasticsearch import Elasticsearch
|
1508
1537
|
|
1509
|
-
# Create client
|
1510
1538
|
client = Elasticsearch(
|
1511
1539
|
hosts=es_hosts if isinstance(es_hosts, list) else [es_hosts],
|
1512
1540
|
basic_auth=(
|
@@ -1653,8 +1681,6 @@ class ElasticHandler(BaseHandler): # pragma: no cov
|
|
1653
1681
|
|
1654
1682
|
for hit in response["hits"]["hits"]:
|
1655
1683
|
source = hit["_source"]
|
1656
|
-
|
1657
|
-
# Convert to TraceMeta
|
1658
1684
|
trace_meta = Metadata(
|
1659
1685
|
run_id=source["run_id"],
|
1660
1686
|
parent_run_id=source["parent_run_id"],
|
@@ -1724,6 +1750,7 @@ class ElasticHandler(BaseHandler): # pragma: no cov
|
|
1724
1750
|
return TraceData(stdout="", stderr="")
|
1725
1751
|
|
1726
1752
|
|
1753
|
+
Handler = TypeVar("Handler", bound=BaseHandler)
|
1727
1754
|
TraceHandler = Annotated[
|
1728
1755
|
Union[
|
1729
1756
|
ConsoleHandler,
|
@@ -1866,7 +1893,7 @@ class BaseAsyncEmit(ABC):
|
|
1866
1893
|
await self.amit(msg, level="exception")
|
1867
1894
|
|
1868
1895
|
|
1869
|
-
class
|
1896
|
+
class Trace(BaseModel, BaseEmit, BaseAsyncEmit):
|
1870
1897
|
"""Trace Manager model that keep all trance handler and emit log to its
|
1871
1898
|
handler.
|
1872
1899
|
"""
|
@@ -1955,7 +1982,7 @@ class TraceManager(BaseModel, BaseEmit, BaseAsyncEmit):
|
|
1955
1982
|
any logging level.
|
1956
1983
|
|
1957
1984
|
Args:
|
1958
|
-
msg: A message that want to log.
|
1985
|
+
msg (str): A message that want to log.
|
1959
1986
|
level (Level): A logging mode.
|
1960
1987
|
"""
|
1961
1988
|
_msg: str = self.make_message(msg)
|
@@ -2005,10 +2032,12 @@ class TraceManager(BaseModel, BaseEmit, BaseAsyncEmit):
|
|
2005
2032
|
def get_trace(
|
2006
2033
|
run_id: str,
|
2007
2034
|
*,
|
2035
|
+
handlers: list[Union[DictData, Handler]] = None,
|
2008
2036
|
parent_run_id: Optional[str] = None,
|
2009
2037
|
extras: Optional[DictData] = None,
|
2010
|
-
|
2011
|
-
|
2038
|
+
auto_pre_process: bool = False,
|
2039
|
+
) -> Trace:
|
2040
|
+
"""Get dynamic Trace instance from the core config.
|
2012
2041
|
|
2013
2042
|
This factory function returns the appropriate trace implementation based on
|
2014
2043
|
configuration. It can be overridden by extras argument and accepts running ID
|
@@ -2017,16 +2046,27 @@ def get_trace(
|
|
2017
2046
|
Args:
|
2018
2047
|
run_id (str): A running ID.
|
2019
2048
|
parent_run_id (str | None, default None): A parent running ID.
|
2049
|
+
handlers:
|
2020
2050
|
extras: An extra parameter that want to override the core
|
2021
2051
|
config values.
|
2052
|
+
auto_pre_process (bool, default False)
|
2022
2053
|
|
2023
2054
|
Returns:
|
2024
|
-
|
2055
|
+
Trace: The appropriate trace instance.
|
2025
2056
|
"""
|
2026
|
-
handlers = dynamic(
|
2027
|
-
|
2028
|
-
|
2029
|
-
|
2030
|
-
|
2031
|
-
|
2057
|
+
handlers: list[DictData] = dynamic(
|
2058
|
+
"trace_handlers", f=handlers, extras=extras
|
2059
|
+
)
|
2060
|
+
trace = Trace.model_validate(
|
2061
|
+
{
|
2062
|
+
"run_id": run_id,
|
2063
|
+
"parent_run_id": parent_run_id,
|
2064
|
+
"handlers": handlers,
|
2065
|
+
"extras": extras or {},
|
2066
|
+
}
|
2032
2067
|
)
|
2068
|
+
# NOTE: Start pre-process when start create trace.
|
2069
|
+
if auto_pre_process:
|
2070
|
+
for handler in trace.handlers:
|
2071
|
+
handler.pre()
|
2072
|
+
return trace
|
ddeutil/workflow/utils.py
CHANGED
@@ -28,17 +28,6 @@ Functions:
|
|
28
28
|
cut_id: Cut running ID to specified length
|
29
29
|
dump_all: Serialize nested BaseModel objects to dictionaries
|
30
30
|
obj_name: Get object name or class name
|
31
|
-
|
32
|
-
Example:
|
33
|
-
```python
|
34
|
-
from ddeutil.workflow.utils import gen_id, get_dt_now
|
35
|
-
|
36
|
-
# Generate unique ID
|
37
|
-
run_id = gen_id("workflow")
|
38
|
-
|
39
|
-
# Get current datetime
|
40
|
-
now = get_dt_now()
|
41
|
-
```
|
42
31
|
"""
|
43
32
|
from __future__ import annotations
|
44
33
|
|
@@ -229,7 +218,10 @@ def gen_id(
|
|
229
218
|
hashing value length to 10 if simple mode is enabled.
|
230
219
|
|
231
220
|
Simple Mode Format:
|
232
|
-
|
221
|
+
|
222
|
+
The format of ID include full datetime and hashing identity.
|
223
|
+
|
224
|
+
YYYY MM DD HH MM SS ffffff T **********
|
233
225
|
year month day hour minute second microsecond sep simple-id
|
234
226
|
|
235
227
|
Args:
|
@@ -329,7 +321,7 @@ def cross_product(matrix: Matrix) -> Iterator[DictData]:
|
|
329
321
|
)
|
330
322
|
|
331
323
|
|
332
|
-
def cut_id(run_id: str, *, num: int =
|
324
|
+
def cut_id(run_id: str, *, num: int = 8) -> str:
|
333
325
|
"""Cut running ID to specified length.
|
334
326
|
|
335
327
|
Example:
|
@@ -345,8 +337,8 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
|
|
345
337
|
"""
|
346
338
|
if "T" in run_id:
|
347
339
|
dt, simple = run_id.split("T", maxsplit=1)
|
348
|
-
return dt[:
|
349
|
-
return run_id[
|
340
|
+
return dt[10:20] + simple[-num:]
|
341
|
+
return run_id[-num:]
|
350
342
|
|
351
343
|
|
352
344
|
@overload
|
ddeutil/workflow/workflow.py
CHANGED
@@ -30,19 +30,18 @@ from concurrent.futures import (
|
|
30
30
|
as_completed,
|
31
31
|
)
|
32
32
|
from datetime import datetime
|
33
|
-
from enum import Enum
|
34
33
|
from pathlib import Path
|
35
34
|
from queue import Queue
|
36
35
|
from textwrap import dedent
|
37
36
|
from threading import Event as ThreadEvent
|
38
|
-
from typing import Any, Optional, Union
|
37
|
+
from typing import Any, Literal, Optional, Union
|
39
38
|
|
40
39
|
from pydantic import BaseModel, Field
|
41
40
|
from pydantic.functional_validators import field_validator, model_validator
|
42
41
|
from typing_extensions import Self
|
43
42
|
|
44
43
|
from .__types import DictData
|
45
|
-
from .audits import Audit, get_audit
|
44
|
+
from .audits import NORMAL, RERUN, Audit, ReleaseType, get_audit
|
46
45
|
from .conf import YamlParser, dynamic
|
47
46
|
from .errors import WorkflowCancelError, WorkflowError, WorkflowTimeoutError
|
48
47
|
from .event import Event
|
@@ -61,7 +60,7 @@ from .result import (
|
|
61
60
|
validate_statuses,
|
62
61
|
)
|
63
62
|
from .reusables import has_template, param2template
|
64
|
-
from .traces import
|
63
|
+
from .traces import Trace, get_trace
|
65
64
|
from .utils import (
|
66
65
|
UTC,
|
67
66
|
gen_id,
|
@@ -70,31 +69,6 @@ from .utils import (
|
|
70
69
|
)
|
71
70
|
|
72
71
|
|
73
|
-
class ReleaseType(str, Enum):
|
74
|
-
"""Release type enumeration for workflow execution modes.
|
75
|
-
|
76
|
-
This enum defines the different types of workflow releases that can be
|
77
|
-
triggered, each with specific behavior and use cases.
|
78
|
-
|
79
|
-
Attributes:
|
80
|
-
NORMAL: Standard workflow release execution
|
81
|
-
RERUN: Re-execution of previously failed workflow
|
82
|
-
EVENT: Event-triggered workflow execution
|
83
|
-
FORCE: Forced execution bypassing normal conditions
|
84
|
-
"""
|
85
|
-
|
86
|
-
NORMAL = "normal"
|
87
|
-
RERUN = "rerun"
|
88
|
-
EVENT = "event"
|
89
|
-
FORCE = "force"
|
90
|
-
|
91
|
-
|
92
|
-
NORMAL = ReleaseType.NORMAL
|
93
|
-
RERUN = ReleaseType.RERUN
|
94
|
-
EVENT = ReleaseType.EVENT
|
95
|
-
FORCE = ReleaseType.FORCE
|
96
|
-
|
97
|
-
|
98
72
|
class Workflow(BaseModel):
|
99
73
|
"""Main workflow orchestration model for job and schedule management.
|
100
74
|
|
@@ -113,17 +87,6 @@ class Workflow(BaseModel):
|
|
113
87
|
on (list[Crontab]): Schedule definitions using cron expressions
|
114
88
|
jobs (dict[str, Job]): Collection of jobs within this workflow
|
115
89
|
|
116
|
-
Example:
|
117
|
-
Create and execute a workflow:
|
118
|
-
|
119
|
-
```python
|
120
|
-
workflow = Workflow.from_conf('my-workflow')
|
121
|
-
result = workflow.execute({
|
122
|
-
'param1': 'value1',
|
123
|
-
'param2': 'value2'
|
124
|
-
})
|
125
|
-
```
|
126
|
-
|
127
90
|
Note:
|
128
91
|
Workflows can be executed immediately or scheduled for background
|
129
92
|
execution using the cron-like scheduling system.
|
@@ -134,6 +97,7 @@ class Workflow(BaseModel):
|
|
134
97
|
description="An extra parameters that want to override config values.",
|
135
98
|
)
|
136
99
|
name: str = Field(description="A workflow name.")
|
100
|
+
type: Literal["Workflow"] = Field(default="workflow")
|
137
101
|
desc: Optional[str] = Field(
|
138
102
|
default=None,
|
139
103
|
description=(
|
@@ -198,22 +162,19 @@ class Workflow(BaseModel):
|
|
198
162
|
FileNotFoundError: If workflow configuration file not found
|
199
163
|
|
200
164
|
Example:
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
)
|
211
|
-
```
|
165
|
+
>>> # NOTE: Load from default config path
|
166
|
+
>>> workflow = Workflow.from_conf('data-pipeline')
|
167
|
+
|
168
|
+
>>> # NOTE: Load with custom path and extras
|
169
|
+
>>> workflow = Workflow.from_conf(
|
170
|
+
... 'data-pipeline',
|
171
|
+
... path=Path('./custom-configs'),
|
172
|
+
... extras={'env': 'prod'}
|
173
|
+
... )
|
212
174
|
"""
|
213
175
|
load: YamlParser = YamlParser(name, path=path, extras=extras, obj=cls)
|
214
176
|
data: DictData = copy.deepcopy(load.data)
|
215
177
|
data["name"] = name
|
216
|
-
|
217
178
|
if extras:
|
218
179
|
data["extras"] = extras
|
219
180
|
|
@@ -289,7 +250,11 @@ class Workflow(BaseModel):
|
|
289
250
|
return self.model_dump(by_alias=True)
|
290
251
|
|
291
252
|
def md(self, author: Optional[str] = None) -> str: # pragma: no cov
|
292
|
-
"""Generate the markdown template.
|
253
|
+
"""Generate the markdown template from this Workflow model data.
|
254
|
+
|
255
|
+
Args:
|
256
|
+
author (str | None, default None): An author name.
|
257
|
+
"""
|
293
258
|
|
294
259
|
def align_newline(value: str) -> str:
|
295
260
|
return value.rstrip("\n").replace("\n", "\n ")
|
@@ -439,7 +404,7 @@ class Workflow(BaseModel):
|
|
439
404
|
override_log_name: Optional[str] = None,
|
440
405
|
timeout: int = 600,
|
441
406
|
audit_excluded: Optional[list[str]] = None,
|
442
|
-
audit:
|
407
|
+
audit: Audit = None,
|
443
408
|
) -> Result:
|
444
409
|
"""Release the workflow which is executes workflow with writing audit
|
445
410
|
log tracking. The method is overriding parameter with the release
|
@@ -473,6 +438,7 @@ class Workflow(BaseModel):
|
|
473
438
|
method.
|
474
439
|
"""
|
475
440
|
name: str = override_log_name or self.name
|
441
|
+
audit: Audit = audit or get_audit(extras=self.extras)
|
476
442
|
|
477
443
|
# NOTE: Generate the parent running ID with not None value.
|
478
444
|
if run_id:
|
@@ -483,7 +449,15 @@ class Workflow(BaseModel):
|
|
483
449
|
parent_run_id: str = run_id
|
484
450
|
|
485
451
|
context: DictData = {"status": WAIT}
|
486
|
-
|
452
|
+
audit_data: DictData = {
|
453
|
+
"name": name,
|
454
|
+
"release": release,
|
455
|
+
"type": release_type,
|
456
|
+
"run_id": run_id,
|
457
|
+
"parent_run_id": parent_run_id,
|
458
|
+
"extras": self.extras,
|
459
|
+
}
|
460
|
+
trace: Trace = get_trace(
|
487
461
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
488
462
|
)
|
489
463
|
release: datetime = self.validate_release(dt=release)
|
@@ -500,6 +474,25 @@ class Workflow(BaseModel):
|
|
500
474
|
},
|
501
475
|
extras=self.extras,
|
502
476
|
)
|
477
|
+
|
478
|
+
if release_type == NORMAL and audit.is_pointed(data=audit_data):
|
479
|
+
trace.info("[RELEASE]: Skip this release because it already audit.")
|
480
|
+
return Result(
|
481
|
+
run_id=run_id,
|
482
|
+
parent_run_id=parent_run_id,
|
483
|
+
status=SKIP,
|
484
|
+
context=catch(context, status=SKIP),
|
485
|
+
extras=self.extras,
|
486
|
+
)
|
487
|
+
|
488
|
+
if release_type == RERUN:
|
489
|
+
# TODO: It will load previous audit and use this data to run with
|
490
|
+
# the `rerun` method.
|
491
|
+
raise NotImplementedError(
|
492
|
+
"Release does not support for rerun type yet. Please use the "
|
493
|
+
"`rerun` method instead."
|
494
|
+
)
|
495
|
+
|
503
496
|
rs: Result = self.execute(
|
504
497
|
params=values,
|
505
498
|
run_id=parent_run_id,
|
@@ -509,15 +502,10 @@ class Workflow(BaseModel):
|
|
509
502
|
trace.info(f"[RELEASE]: End {name!r} : {release:%Y-%m-%d %H:%M:%S}")
|
510
503
|
trace.debug(f"[RELEASE]: Writing audit: {name!r}.")
|
511
504
|
(
|
512
|
-
|
513
|
-
data=
|
514
|
-
|
515
|
-
"release": release,
|
516
|
-
"type": release_type,
|
505
|
+
audit.save(
|
506
|
+
data=audit_data
|
507
|
+
| {
|
517
508
|
"context": context,
|
518
|
-
"parent_run_id": parent_run_id,
|
519
|
-
"run_id": run_id,
|
520
|
-
"extras": self.extras,
|
521
509
|
"runs_metadata": (
|
522
510
|
(runs_metadata or {})
|
523
511
|
| rs.info
|
@@ -581,7 +569,7 @@ class Workflow(BaseModel):
|
|
581
569
|
Returns:
|
582
570
|
tuple[Status, DictData]: The pair of status and result context data.
|
583
571
|
"""
|
584
|
-
trace:
|
572
|
+
trace: Trace = get_trace(
|
585
573
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
586
574
|
)
|
587
575
|
if event and event.is_set():
|
@@ -697,8 +685,8 @@ class Workflow(BaseModel):
|
|
697
685
|
"""
|
698
686
|
ts: float = time.monotonic()
|
699
687
|
parent_run_id: Optional[str] = run_id
|
700
|
-
run_id: str = gen_id(self.name, extras=self.extras)
|
701
|
-
trace:
|
688
|
+
run_id: str = gen_id(self.name, unique=True, extras=self.extras)
|
689
|
+
trace: Trace = get_trace(
|
702
690
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
703
691
|
)
|
704
692
|
context: DictData = self.parameterize(params)
|
@@ -735,6 +723,11 @@ class Workflow(BaseModel):
|
|
735
723
|
)
|
736
724
|
catch(context, status=WAIT)
|
737
725
|
if event and event.is_set():
|
726
|
+
err_msg: str = (
|
727
|
+
"Execution was canceled from the event was set "
|
728
|
+
"before workflow execution."
|
729
|
+
)
|
730
|
+
trace.error(f"[WORKFLOW]: {err_msg}")
|
738
731
|
return Result(
|
739
732
|
run_id=run_id,
|
740
733
|
parent_run_id=parent_run_id,
|
@@ -742,12 +735,7 @@ class Workflow(BaseModel):
|
|
742
735
|
context=catch(
|
743
736
|
context,
|
744
737
|
status=CANCEL,
|
745
|
-
updated={
|
746
|
-
"errors": WorkflowCancelError(
|
747
|
-
"Execution was canceled from the event was set "
|
748
|
-
"before workflow execution."
|
749
|
-
).to_dict(),
|
750
|
-
},
|
738
|
+
updated={"errors": WorkflowCancelError(err_msg).to_dict()},
|
751
739
|
),
|
752
740
|
info={"execution_time": time.monotonic() - ts},
|
753
741
|
extras=self.extras,
|
@@ -801,7 +789,7 @@ class Workflow(BaseModel):
|
|
801
789
|
)
|
802
790
|
elif check == SKIP: # pragma: no cov
|
803
791
|
trace.info(
|
804
|
-
f"[JOB]: Skip job: {job_id!r} from trigger rule."
|
792
|
+
f"[JOB]: ⏭️ Skip job: {job_id!r} from trigger rule."
|
805
793
|
)
|
806
794
|
job.set_outputs(output={"status": SKIP}, to=context)
|
807
795
|
job_queue.task_done()
|
@@ -925,16 +913,17 @@ class Workflow(BaseModel):
|
|
925
913
|
) -> Result: # pragma: no cov
|
926
914
|
"""Re-Execute workflow with passing the error context data.
|
927
915
|
|
928
|
-
:
|
929
|
-
|
930
|
-
|
931
|
-
|
932
|
-
|
933
|
-
|
934
|
-
|
935
|
-
|
936
|
-
|
937
|
-
|
916
|
+
Args:
|
917
|
+
context: A context result that get the failed status.
|
918
|
+
run_id: (Optional[str]) A workflow running ID.
|
919
|
+
event: (Event) An Event manager instance that use to cancel this
|
920
|
+
execution if it forces stopped by parent execution.
|
921
|
+
timeout: (float) A workflow execution time out in second unit
|
922
|
+
that use for limit time of execution and waiting job dependency.
|
923
|
+
This value does not force stop the task that still running more
|
924
|
+
than this limit time. (Default: 60 * 60 seconds)
|
925
|
+
max_job_parallel: (int) The maximum workers that use for job
|
926
|
+
execution in `ThreadPoolExecutor` object. (Default: 2 workers)
|
938
927
|
|
939
928
|
Returns
|
940
929
|
Result: Return Result object that create from execution context with
|
@@ -943,7 +932,7 @@ class Workflow(BaseModel):
|
|
943
932
|
ts: float = time.monotonic()
|
944
933
|
parent_run_id: str = run_id
|
945
934
|
run_id: str = gen_id(self.name, extras=self.extras)
|
946
|
-
trace:
|
935
|
+
trace: Trace = get_trace(
|
947
936
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
948
937
|
)
|
949
938
|
if context["status"] == SUCCESS:
|
@@ -0,0 +1,35 @@
|
|
1
|
+
ddeutil/workflow/__about__.py,sha256=GJmjvBMMhA0y8IwyPpPJLqLsuBWa6J90Pleir68QW5I,60
|
2
|
+
ddeutil/workflow/__cron.py,sha256=avOagaHl9xXOmizeRWm13cOrty9Tw0vRjFq-xoEgpAY,29167
|
3
|
+
ddeutil/workflow/__init__.py,sha256=elWSX2JPbjORH-CJIH_zR_nrdd8Xw95NjLK49R4Kqdg,3434
|
4
|
+
ddeutil/workflow/__main__.py,sha256=Nqk5aO-HsZVKV2BmuJYeJEufJluipvCD9R1k2kMoJ3Y,8581
|
5
|
+
ddeutil/workflow/__types.py,sha256=tA2vsr6mzTSzbWB1sb62c5GgxODlfVRz6FvgLNJtQao,4788
|
6
|
+
ddeutil/workflow/audits.py,sha256=YPnWQYvhILPwPS6RhYKK0OG3lCeYFLuUNBxJvMlXf5w,26109
|
7
|
+
ddeutil/workflow/conf.py,sha256=VfPmwaBYEgOj8bu4eim13ayZwJ4Liy7I702aQf7vS8g,17644
|
8
|
+
ddeutil/workflow/errors.py,sha256=Rqtuf1MGxA-hKGP5wMAkaeeayst-u4P2dX6Fp_pzbsA,5678
|
9
|
+
ddeutil/workflow/event.py,sha256=qm7QHw-Pozm6oIUzAIxpDkPzzVZVtHgJIUlIle0vEfQ,13943
|
10
|
+
ddeutil/workflow/job.py,sha256=WHWOVz0ErOUfbN_aqpDeNmvBvpbKhcFlzcvJmlCpJuI,48430
|
11
|
+
ddeutil/workflow/params.py,sha256=y9f6DEIyae1j4awbj3Kbeq75-U2UPFlKv9K57Hdo_Go,17188
|
12
|
+
ddeutil/workflow/result.py,sha256=3Lpyv2Jn6T1Uc-lRbweDucSCoBr0ZByHjffKj14bj6s,9492
|
13
|
+
ddeutil/workflow/reusables.py,sha256=SBLJSxR8ELoWJErBfSMZS3Rr1O_93T-fFBpfn2AvxuA,25007
|
14
|
+
ddeutil/workflow/stages.py,sha256=CCR_D6yqVo74PuMxfqhi8GeeAq8sRbyxReHob6yxrjI,123708
|
15
|
+
ddeutil/workflow/traces.py,sha256=YN4XuRfQK523cNy8EVgz2iPh6s6WB865K9JezCNdM7E,74637
|
16
|
+
ddeutil/workflow/utils.py,sha256=vQwFu-wPK-lDiX2L8AZIahCkKEF6I0MCrZ1LlP8xkoQ,12011
|
17
|
+
ddeutil/workflow/workflow.py,sha256=VN8i0mVyuCUw1kk_CqLkN-8dWLON9i4vTLbHp6lyS-s,42961
|
18
|
+
ddeutil/workflow/api/__init__.py,sha256=5DzYL3ngceoRshh5HYCSVWChqNJSiP01E1bEd8XxPi0,4799
|
19
|
+
ddeutil/workflow/api/log_conf.py,sha256=WfS3udDLSyrP-C80lWOvxxmhd_XWKvQPkwDqKblcH3E,1834
|
20
|
+
ddeutil/workflow/api/routes/__init__.py,sha256=JRaJZB0D6mgR17MbZo8yLtdYDtD62AA8MdKlFqhG84M,420
|
21
|
+
ddeutil/workflow/api/routes/job.py,sha256=-lbZ_hS9pEdSy6zeke5qrXEgdNxtQ2w9in7cHuM2Jzs,2536
|
22
|
+
ddeutil/workflow/api/routes/logs.py,sha256=9jiYsw8kepud4n3NyXB7SAr2OoQwRn5uNb9kIZ58XJM,3806
|
23
|
+
ddeutil/workflow/api/routes/workflows.py,sha256=0pEZEsIrscRFBXG9gf6nttKw0aNbcdw7NsAZKLoKWtk,4392
|
24
|
+
ddeutil/workflow/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
25
|
+
ddeutil/workflow/plugins/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
26
|
+
ddeutil/workflow/plugins/providers/aws.py,sha256=61uIFBEWt-_D5Sui24qUPier1Hiqlw_RP_eY-rXBCKc,31551
|
27
|
+
ddeutil/workflow/plugins/providers/az.py,sha256=o3dh011lEtmr7-d7FPZJPgXdT0ytFzKfc5xnVxSyXGU,34867
|
28
|
+
ddeutil/workflow/plugins/providers/container.py,sha256=DSN0RWxMjTJN5ANheeMauDaPa3X6Z2E1eGUcctYkENw,22134
|
29
|
+
ddeutil/workflow/plugins/providers/gcs.py,sha256=KgAOdMBvdbMLTH_z_FwVriBFtZfKEYx8_34jzUOVjTY,27460
|
30
|
+
ddeutil_workflow-0.0.82.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
31
|
+
ddeutil_workflow-0.0.82.dist-info/METADATA,sha256=lB4PBeyneqHLwG0S4Lt_Xz8eCz4Lxn2bkDDHIFcBpxE,16087
|
32
|
+
ddeutil_workflow-0.0.82.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
33
|
+
ddeutil_workflow-0.0.82.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
|
34
|
+
ddeutil_workflow-0.0.82.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
35
|
+
ddeutil_workflow-0.0.82.dist-info/RECORD,,
|