ddeutil-workflow 0.0.81__py3-none-any.whl → 0.0.83__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +2 -1
- ddeutil/workflow/__cron.py +1 -1
- ddeutil/workflow/__init__.py +21 -7
- ddeutil/workflow/__main__.py +280 -1
- ddeutil/workflow/__types.py +10 -1
- ddeutil/workflow/api/routes/job.py +2 -2
- ddeutil/workflow/api/routes/logs.py +8 -61
- ddeutil/workflow/audits.py +101 -49
- ddeutil/workflow/conf.py +45 -25
- ddeutil/workflow/errors.py +12 -0
- ddeutil/workflow/event.py +34 -11
- ddeutil/workflow/job.py +75 -31
- ddeutil/workflow/result.py +73 -22
- ddeutil/workflow/stages.py +625 -375
- ddeutil/workflow/traces.py +71 -27
- ddeutil/workflow/utils.py +41 -24
- ddeutil/workflow/workflow.py +97 -124
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/METADATA +1 -1
- ddeutil_workflow-0.0.83.dist-info/RECORD +35 -0
- ddeutil/workflow/cli.py +0 -284
- ddeutil_workflow-0.0.81.dist-info/RECORD +0 -36
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.81.dist-info → ddeutil_workflow-0.0.83.dist-info}/top_level.txt +0 -0
ddeutil/workflow/job.py
CHANGED
@@ -48,7 +48,7 @@ from enum import Enum
|
|
48
48
|
from functools import lru_cache
|
49
49
|
from textwrap import dedent
|
50
50
|
from threading import Event
|
51
|
-
from typing import Annotated, Any, Optional, Union
|
51
|
+
from typing import Annotated, Any, Literal, Optional, Union
|
52
52
|
|
53
53
|
from ddeutil.core import freeze_args
|
54
54
|
from pydantic import BaseModel, Discriminator, Field, SecretStr, Tag
|
@@ -72,8 +72,8 @@ from .result import (
|
|
72
72
|
)
|
73
73
|
from .reusables import has_template, param2template
|
74
74
|
from .stages import Stage
|
75
|
-
from .traces import
|
76
|
-
from .utils import cross_product, filter_func, gen_id
|
75
|
+
from .traces import Trace, get_trace
|
76
|
+
from .utils import cross_product, extract_id, filter_func, gen_id
|
77
77
|
|
78
78
|
MatrixFilter = list[dict[str, Union[str, int]]]
|
79
79
|
|
@@ -187,10 +187,8 @@ class Strategy(BaseModel):
|
|
187
187
|
),
|
188
188
|
alias="fail-fast",
|
189
189
|
)
|
190
|
-
max_parallel: int = Field(
|
190
|
+
max_parallel: Union[int, str] = Field(
|
191
191
|
default=1,
|
192
|
-
gt=0,
|
193
|
-
lt=10,
|
194
192
|
description=(
|
195
193
|
"The maximum number of executor thread pool that want to run "
|
196
194
|
"parallel. This value should gather than 0 and less than 10."
|
@@ -427,9 +425,9 @@ class OnGCPBatch(BaseRunsOn): # pragma: no cov
|
|
427
425
|
args: GCPBatchArgs = Field(alias="with")
|
428
426
|
|
429
427
|
|
430
|
-
def get_discriminator_runs_on(
|
428
|
+
def get_discriminator_runs_on(data: dict[str, Any]) -> RunsOn:
|
431
429
|
"""Get discriminator of the RunsOn models."""
|
432
|
-
t: str =
|
430
|
+
t: str = data.get("type")
|
433
431
|
return RunsOn(t) if t else LOCAL
|
434
432
|
|
435
433
|
|
@@ -538,13 +536,28 @@ class Job(BaseModel):
|
|
538
536
|
description="An extra override config values.",
|
539
537
|
)
|
540
538
|
|
539
|
+
@field_validator(
|
540
|
+
"runs_on",
|
541
|
+
mode="before",
|
542
|
+
json_schema_input_type=Union[RunsOnModel, Literal["local"]],
|
543
|
+
)
|
544
|
+
def __prepare_runs_on(cls, data: Any) -> Any:
|
545
|
+
"""Prepare runs on value that was passed with string type."""
|
546
|
+
if isinstance(data, str):
|
547
|
+
if data != "local":
|
548
|
+
raise ValueError(
|
549
|
+
"runs-on that pass with str type should be `local` only"
|
550
|
+
)
|
551
|
+
return {"type": data}
|
552
|
+
return data
|
553
|
+
|
541
554
|
@field_validator("desc", mode="after")
|
542
|
-
def ___prepare_desc__(cls,
|
555
|
+
def ___prepare_desc__(cls, data: str) -> str:
|
543
556
|
"""Prepare description string that was created on a template.
|
544
557
|
|
545
558
|
:rtype: str
|
546
559
|
"""
|
547
|
-
return dedent(
|
560
|
+
return dedent(data.lstrip("\n"))
|
548
561
|
|
549
562
|
@field_validator("stages", mode="after")
|
550
563
|
def __validate_stage_id__(cls, value: list[Stage]) -> list[Stage]:
|
@@ -877,9 +890,10 @@ class Job(BaseModel):
|
|
877
890
|
Result: Return Result object that create from execution context.
|
878
891
|
"""
|
879
892
|
ts: float = time.monotonic()
|
880
|
-
parent_run_id
|
881
|
-
|
882
|
-
|
893
|
+
parent_run_id, run_id = extract_id(
|
894
|
+
(self.id or "EMPTY"), run_id=run_id, extras=self.extras
|
895
|
+
)
|
896
|
+
trace: Trace = get_trace(
|
883
897
|
run_id, parent_run_id=parent_run_id, extras=self.extras
|
884
898
|
)
|
885
899
|
trace.info(
|
@@ -1016,7 +1030,7 @@ def local_execute_strategy(
|
|
1016
1030
|
|
1017
1031
|
:rtype: tuple[Status, DictData]
|
1018
1032
|
"""
|
1019
|
-
trace:
|
1033
|
+
trace: Trace = get_trace(
|
1020
1034
|
run_id, parent_run_id=parent_run_id, extras=job.extras
|
1021
1035
|
)
|
1022
1036
|
if strategy:
|
@@ -1152,7 +1166,7 @@ def local_execute(
|
|
1152
1166
|
ts: float = time.monotonic()
|
1153
1167
|
parent_run_id: StrOrNone = run_id
|
1154
1168
|
run_id: str = gen_id((job.id or "EMPTY"), unique=True)
|
1155
|
-
trace:
|
1169
|
+
trace: Trace = get_trace(
|
1156
1170
|
run_id, parent_run_id=parent_run_id, extras=job.extras
|
1157
1171
|
)
|
1158
1172
|
context: DictData = {"status": WAIT}
|
@@ -1174,11 +1188,52 @@ def local_execute(
|
|
1174
1188
|
|
1175
1189
|
event: Event = event or Event()
|
1176
1190
|
ls: str = "Fail-Fast" if job.strategy.fail_fast else "All-Completed"
|
1177
|
-
workers: int = job.strategy.max_parallel
|
1191
|
+
workers: Union[int, str] = job.strategy.max_parallel
|
1192
|
+
if isinstance(workers, str):
|
1193
|
+
try:
|
1194
|
+
workers: int = int(
|
1195
|
+
param2template(workers, params=params, extras=job.extras)
|
1196
|
+
)
|
1197
|
+
except Exception as err:
|
1198
|
+
trace.exception(
|
1199
|
+
"[JOB]: Got the error on call param2template to "
|
1200
|
+
f"max-parallel value: {workers}"
|
1201
|
+
)
|
1202
|
+
return Result(
|
1203
|
+
run_id=run_id,
|
1204
|
+
parent_run_id=parent_run_id,
|
1205
|
+
status=FAILED,
|
1206
|
+
context=catch(
|
1207
|
+
context,
|
1208
|
+
status=FAILED,
|
1209
|
+
updated={"errors": to_dict(err)},
|
1210
|
+
),
|
1211
|
+
info={"execution_time": time.monotonic() - ts},
|
1212
|
+
extras=job.extras,
|
1213
|
+
)
|
1214
|
+
if workers >= 10:
|
1215
|
+
err_msg: str = (
|
1216
|
+
f"The max-parallel value should not more than 10, the current value "
|
1217
|
+
f"was set: {workers}."
|
1218
|
+
)
|
1219
|
+
trace.error(f"[JOB]: {err_msg}")
|
1220
|
+
return Result(
|
1221
|
+
run_id=run_id,
|
1222
|
+
parent_run_id=parent_run_id,
|
1223
|
+
status=FAILED,
|
1224
|
+
context=catch(
|
1225
|
+
context,
|
1226
|
+
status=FAILED,
|
1227
|
+
updated={"errors": JobError(err_msg).to_dict()},
|
1228
|
+
),
|
1229
|
+
info={"execution_time": time.monotonic() - ts},
|
1230
|
+
extras=job.extras,
|
1231
|
+
)
|
1232
|
+
|
1178
1233
|
strategies: list[DictStr] = job.strategy.make()
|
1179
1234
|
len_strategy: int = len(strategies)
|
1180
1235
|
trace.info(
|
1181
|
-
f"[JOB]:
|
1236
|
+
f"[JOB]: Mode {ls}: {job.id!r} with {workers} "
|
1182
1237
|
f"worker{'s' if workers > 1 else ''}."
|
1183
1238
|
)
|
1184
1239
|
|
@@ -1218,7 +1273,6 @@ def local_execute(
|
|
1218
1273
|
|
1219
1274
|
errors: DictData = {}
|
1220
1275
|
statuses: list[Status] = [WAIT] * len_strategy
|
1221
|
-
fail_fast: bool = False
|
1222
1276
|
|
1223
1277
|
if not job.strategy.fail_fast:
|
1224
1278
|
done: Iterator[Future] = as_completed(futures)
|
@@ -1243,7 +1297,6 @@ def local_execute(
|
|
1243
1297
|
)
|
1244
1298
|
trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
|
1245
1299
|
done: Iterator[Future] = as_completed(futures)
|
1246
|
-
fail_fast: bool = True
|
1247
1300
|
|
1248
1301
|
for i, future in enumerate(done, start=0):
|
1249
1302
|
try:
|
@@ -1258,19 +1311,10 @@ def local_execute(
|
|
1258
1311
|
pass
|
1259
1312
|
|
1260
1313
|
status: Status = validate_statuses(statuses)
|
1261
|
-
|
1262
|
-
# NOTE: Prepare status because it does not cancel from parent event but
|
1263
|
-
# cancel from failed item execution.
|
1264
|
-
if fail_fast and status == CANCEL:
|
1265
|
-
status = FAILED
|
1266
|
-
|
1267
|
-
return Result(
|
1268
|
-
run_id=run_id,
|
1269
|
-
parent_run_id=parent_run_id,
|
1314
|
+
return Result.from_trace(trace).catch(
|
1270
1315
|
status=status,
|
1271
1316
|
context=catch(context, status=status, updated=errors),
|
1272
1317
|
info={"execution_time": time.monotonic() - ts},
|
1273
|
-
extras=job.extras,
|
1274
1318
|
)
|
1275
1319
|
|
1276
1320
|
|
@@ -1295,7 +1339,7 @@ def self_hosted_execute(
|
|
1295
1339
|
"""
|
1296
1340
|
parent_run_id: StrOrNone = run_id
|
1297
1341
|
run_id: str = gen_id((job.id or "EMPTY"), unique=True)
|
1298
|
-
trace:
|
1342
|
+
trace: Trace = get_trace(
|
1299
1343
|
run_id, parent_run_id=parent_run_id, extras=job.extras
|
1300
1344
|
)
|
1301
1345
|
context: DictData = {"status": WAIT}
|
@@ -1378,7 +1422,7 @@ def docker_execution(
|
|
1378
1422
|
"""
|
1379
1423
|
parent_run_id: StrOrNone = run_id
|
1380
1424
|
run_id: str = gen_id((job.id or "EMPTY"), unique=True)
|
1381
|
-
trace:
|
1425
|
+
trace: Trace = get_trace(
|
1382
1426
|
run_id, parent_run_id=parent_run_id, extras=job.extras
|
1383
1427
|
)
|
1384
1428
|
context: DictData = {"status": WAIT}
|
ddeutil/workflow/result.py
CHANGED
@@ -8,25 +8,17 @@
|
|
8
8
|
This module provides the core result and status management functionality for
|
9
9
|
workflow execution tracking. It includes the Status enumeration for execution
|
10
10
|
states and the Result dataclass for context transfer between workflow components.
|
11
|
-
|
12
|
-
Classes:
|
13
|
-
Status: Enumeration for execution status tracking
|
14
|
-
Result: Dataclass for execution context and result management
|
15
|
-
|
16
|
-
Functions:
|
17
|
-
validate_statuses: Determine final status from multiple status values
|
18
|
-
get_status_from_error: Convert exception types to appropriate status
|
19
11
|
"""
|
20
12
|
from __future__ import annotations
|
21
13
|
|
22
14
|
from dataclasses import field
|
23
15
|
from enum import Enum
|
24
|
-
from typing import Optional, Union
|
16
|
+
from typing import Any, Optional, TypedDict, Union
|
25
17
|
|
26
18
|
from pydantic import ConfigDict
|
27
19
|
from pydantic.dataclasses import dataclass
|
28
20
|
from pydantic.functional_validators import model_validator
|
29
|
-
from typing_extensions import Self
|
21
|
+
from typing_extensions import NotRequired, Self
|
30
22
|
|
31
23
|
from . import (
|
32
24
|
JobCancelError,
|
@@ -34,13 +26,16 @@ from . import (
|
|
34
26
|
JobSkipError,
|
35
27
|
StageCancelError,
|
36
28
|
StageError,
|
29
|
+
StageNestedCancelError,
|
30
|
+
StageNestedError,
|
31
|
+
StageNestedSkipError,
|
37
32
|
StageSkipError,
|
38
33
|
WorkflowCancelError,
|
39
34
|
WorkflowError,
|
40
35
|
)
|
41
36
|
from .__types import DictData
|
42
|
-
from .audits import
|
43
|
-
from .errors import ResultError
|
37
|
+
from .audits import Trace, get_trace
|
38
|
+
from .errors import ErrorData, ResultError
|
44
39
|
from .utils import default_gen_id
|
45
40
|
|
46
41
|
|
@@ -123,10 +118,10 @@ def validate_statuses(statuses: list[Status]) -> Status:
|
|
123
118
|
>>> validate_statuses([SUCCESS, SUCCESS, SUCCESS])
|
124
119
|
>>> # Returns: SUCCESS
|
125
120
|
"""
|
126
|
-
if any(s ==
|
127
|
-
return CANCEL
|
128
|
-
elif any(s == FAILED for s in statuses):
|
121
|
+
if any(s == FAILED for s in statuses):
|
129
122
|
return FAILED
|
123
|
+
elif any(s == CANCEL for s in statuses):
|
124
|
+
return CANCEL
|
130
125
|
elif any(s == WAIT for s in statuses):
|
131
126
|
return WAIT
|
132
127
|
for status in (SUCCESS, SKIP):
|
@@ -140,6 +135,9 @@ def get_status_from_error(
|
|
140
135
|
StageError,
|
141
136
|
StageCancelError,
|
142
137
|
StageSkipError,
|
138
|
+
StageNestedCancelError,
|
139
|
+
StageNestedError,
|
140
|
+
StageNestedSkipError,
|
143
141
|
JobError,
|
144
142
|
JobCancelError,
|
145
143
|
JobSkipError,
|
@@ -157,10 +155,16 @@ def get_status_from_error(
|
|
157
155
|
Returns:
|
158
156
|
Status: The status from the specific exception class.
|
159
157
|
"""
|
160
|
-
if isinstance(error, (StageSkipError, JobSkipError)):
|
158
|
+
if isinstance(error, (StageNestedSkipError, StageSkipError, JobSkipError)):
|
161
159
|
return SKIP
|
162
160
|
elif isinstance(
|
163
|
-
error,
|
161
|
+
error,
|
162
|
+
(
|
163
|
+
StageNestedCancelError,
|
164
|
+
StageCancelError,
|
165
|
+
JobCancelError,
|
166
|
+
WorkflowCancelError,
|
167
|
+
),
|
164
168
|
):
|
165
169
|
return CANCEL
|
166
170
|
return FAILED
|
@@ -188,9 +192,7 @@ class Result:
|
|
188
192
|
info: DictData = field(default_factory=dict)
|
189
193
|
run_id: str = field(default_factory=default_gen_id)
|
190
194
|
parent_run_id: Optional[str] = field(default=None)
|
191
|
-
trace: Optional[
|
192
|
-
default=None, compare=False, repr=False
|
193
|
-
)
|
195
|
+
trace: Optional[Trace] = field(default=None, compare=False, repr=False)
|
194
196
|
|
195
197
|
@model_validator(mode="after")
|
196
198
|
def __prepare_trace(self) -> Self:
|
@@ -199,7 +201,7 @@ class Result:
|
|
199
201
|
:rtype: Self
|
200
202
|
"""
|
201
203
|
if self.trace is None: # pragma: no cov
|
202
|
-
self.trace:
|
204
|
+
self.trace: Trace = get_trace(
|
203
205
|
self.run_id,
|
204
206
|
parent_run_id=self.parent_run_id,
|
205
207
|
extras=self.extras,
|
@@ -208,7 +210,7 @@ class Result:
|
|
208
210
|
return self
|
209
211
|
|
210
212
|
@classmethod
|
211
|
-
def from_trace(cls, trace:
|
213
|
+
def from_trace(cls, trace: Trace):
|
212
214
|
"""Construct the result model from trace for clean code objective."""
|
213
215
|
return cls(
|
214
216
|
run_id=trace.run_id,
|
@@ -274,6 +276,9 @@ def catch(
|
|
274
276
|
context: A context data that want to be the current context.
|
275
277
|
status: A status enum object.
|
276
278
|
updated: A updated data that will update to the current context.
|
279
|
+
|
280
|
+
Returns:
|
281
|
+
DictData: A catch context data.
|
277
282
|
"""
|
278
283
|
context.update(updated or {})
|
279
284
|
context["status"] = Status(status) if isinstance(status, int) else status
|
@@ -291,3 +296,49 @@ def catch(
|
|
291
296
|
else:
|
292
297
|
raise ResultError(f"The key {k!r} does not exists on context data.")
|
293
298
|
return context
|
299
|
+
|
300
|
+
|
301
|
+
class Context(TypedDict):
|
302
|
+
"""Context dict typed."""
|
303
|
+
|
304
|
+
status: Status
|
305
|
+
context: NotRequired[DictData]
|
306
|
+
errors: NotRequired[Union[list[ErrorData], ErrorData]]
|
307
|
+
info: NotRequired[DictData]
|
308
|
+
|
309
|
+
|
310
|
+
class Layer(str, Enum):
|
311
|
+
WORKFLOW = "workflow"
|
312
|
+
JOB = "job"
|
313
|
+
STRATEGY = "strategy"
|
314
|
+
STAGE = "stage"
|
315
|
+
|
316
|
+
|
317
|
+
def get_context_by_layer(
|
318
|
+
context: DictData,
|
319
|
+
key: str,
|
320
|
+
layer: Layer,
|
321
|
+
context_key: str,
|
322
|
+
*,
|
323
|
+
default: Optional[Any] = None,
|
324
|
+
) -> Any: # pragma: no cov
|
325
|
+
if layer == Layer.WORKFLOW:
|
326
|
+
return context.get("jobs", {}).get(key, {}).get(context_key, default)
|
327
|
+
elif layer == Layer.JOB:
|
328
|
+
return context.get("stages", {}).get(key, {}).get(context_key, default)
|
329
|
+
elif layer == Layer.STRATEGY:
|
330
|
+
return (
|
331
|
+
context.get("strategies", {}).get(key, {}).get(context_key, default)
|
332
|
+
)
|
333
|
+
return context.get(key, {}).get(context_key, default)
|
334
|
+
|
335
|
+
|
336
|
+
def get_status(
|
337
|
+
context: DictData,
|
338
|
+
key: str,
|
339
|
+
layer: Layer,
|
340
|
+
) -> Status: # pragma: no cov
|
341
|
+
"""Get status from context by a specific key and context layer."""
|
342
|
+
return get_context_by_layer(
|
343
|
+
context, key, layer, context_key="status", default=WAIT
|
344
|
+
)
|