ddeutil-workflow 0.0.43__py3-none-any.whl → 0.0.45__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/conf.py +33 -27
- ddeutil/workflow/job.py +65 -50
- ddeutil/workflow/logs.py +44 -5
- ddeutil/workflow/result.py +10 -1
- ddeutil/workflow/reusables.py +1 -0
- ddeutil/workflow/stages.py +9 -5
- ddeutil/workflow/workflow.py +75 -94
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.45.dist-info}/METADATA +6 -7
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.45.dist-info}/RECORD +13 -13
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.45.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.45.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.45.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.45"
|
ddeutil/workflow/conf.py
CHANGED
@@ -341,6 +341,32 @@ class SimLoad:
|
|
341
341
|
)
|
342
342
|
|
343
343
|
|
344
|
+
config: Config = Config()
|
345
|
+
api_config: APIConfig = APIConfig()
|
346
|
+
|
347
|
+
|
348
|
+
def dynamic(
|
349
|
+
key: Optional[str] = None,
|
350
|
+
*,
|
351
|
+
f: Optional[T] = None,
|
352
|
+
extras: Optional[DictData] = None,
|
353
|
+
) -> Optional[T]:
|
354
|
+
"""Dynamic get config if extra value was passed at run-time.
|
355
|
+
|
356
|
+
:param key: (str) A config key that get from Config object.
|
357
|
+
:param f: An inner config function scope.
|
358
|
+
:param extras: An extra values that pass at run-time.
|
359
|
+
"""
|
360
|
+
rsx: Optional[T] = extras[key] if extras and key in extras else None
|
361
|
+
rs: Optional[T] = f or getattr(config, key, None)
|
362
|
+
if rsx is not None and not isinstance(rsx, type(rs)):
|
363
|
+
raise TypeError(
|
364
|
+
f"Type of config {key!r} from extras: {rsx!r} does not valid "
|
365
|
+
f"as config {type(rs)}."
|
366
|
+
)
|
367
|
+
return rsx or rs
|
368
|
+
|
369
|
+
|
344
370
|
class Loader(SimLoad):
|
345
371
|
"""Loader Object that get the config `yaml` file from current path.
|
346
372
|
|
@@ -355,6 +381,7 @@ class Loader(SimLoad):
|
|
355
381
|
*,
|
356
382
|
included: list[str] | None = None,
|
357
383
|
excluded: list[str] | None = None,
|
384
|
+
path: Path | None = None,
|
358
385
|
**kwargs,
|
359
386
|
) -> Iterator[tuple[str, DictData]]:
|
360
387
|
"""Override the find class method from the Simple Loader object.
|
@@ -362,44 +389,23 @@ class Loader(SimLoad):
|
|
362
389
|
:param obj: An object that want to validate matching before return.
|
363
390
|
:param included:
|
364
391
|
:param excluded:
|
392
|
+
:param path:
|
365
393
|
|
366
394
|
:rtype: Iterator[tuple[str, DictData]]
|
367
395
|
"""
|
368
396
|
return super().finds(
|
369
397
|
obj=obj,
|
370
|
-
conf_path=config.conf_path,
|
398
|
+
conf_path=(path or config.conf_path),
|
371
399
|
included=included,
|
372
400
|
excluded=excluded,
|
373
401
|
)
|
374
402
|
|
375
403
|
def __init__(self, name: str, externals: DictData) -> None:
|
376
|
-
super().__init__(
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
api_config: APIConfig = APIConfig()
|
381
|
-
|
382
|
-
|
383
|
-
def dynamic(
|
384
|
-
key: Optional[str] = None,
|
385
|
-
*,
|
386
|
-
f: Optional[T] = None,
|
387
|
-
extras: Optional[DictData] = None,
|
388
|
-
) -> Optional[T]:
|
389
|
-
"""Dynamic get config if extra value was passed at run-time.
|
390
|
-
|
391
|
-
:param key: (str) A config key that get from Config object.
|
392
|
-
:param f: An inner config function scope.
|
393
|
-
:param extras: An extra values that pass at run-time.
|
394
|
-
"""
|
395
|
-
rsx: Optional[T] = extras[key] if extras and key in extras else None
|
396
|
-
rs: Optional[T] = f or getattr(config, key, None)
|
397
|
-
if rsx is not None and not isinstance(rsx, type(rs)):
|
398
|
-
raise TypeError(
|
399
|
-
f"Type of config {key!r} from extras: {rsx!r} does not valid "
|
400
|
-
f"as config {type(rs)}."
|
404
|
+
super().__init__(
|
405
|
+
name,
|
406
|
+
conf_path=dynamic("conf_path", extras=externals),
|
407
|
+
externals=externals,
|
401
408
|
)
|
402
|
-
return rsx or rs
|
403
409
|
|
404
410
|
|
405
411
|
@lru_cache
|
ddeutil/workflow/job.py
CHANGED
@@ -38,6 +38,7 @@ from .exceptions import (
|
|
38
38
|
JobException,
|
39
39
|
StageException,
|
40
40
|
UtilException,
|
41
|
+
to_dict,
|
41
42
|
)
|
42
43
|
from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
|
43
44
|
from .reusables import has_template, param2template
|
@@ -415,6 +416,7 @@ class Job(BaseModel):
|
|
415
416
|
need_exist: dict[str, Any] = {
|
416
417
|
need: jobs[need] for need in self.needs if need in jobs
|
417
418
|
}
|
419
|
+
|
418
420
|
if len(need_exist) != len(self.needs):
|
419
421
|
return WAIT
|
420
422
|
elif all("skipped" in need_exist[job] for job in need_exist):
|
@@ -630,19 +632,6 @@ def local_execute_strategy(
|
|
630
632
|
result: Result = Result(run_id=gen_id(job.id or "not-set", unique=True))
|
631
633
|
|
632
634
|
strategy_id: str = gen_id(strategy)
|
633
|
-
|
634
|
-
# PARAGRAPH:
|
635
|
-
#
|
636
|
-
# Create strategy execution context and update a matrix and copied
|
637
|
-
# of params. So, the context value will have structure like;
|
638
|
-
#
|
639
|
-
# {
|
640
|
-
# "params": { ... }, <== Current input params
|
641
|
-
# "jobs": { ... }, <== Current input params
|
642
|
-
# "matrix": { ... } <== Current strategy value
|
643
|
-
# "stages": { ... } <== Catching stage outputs
|
644
|
-
# }
|
645
|
-
#
|
646
635
|
context: DictData = copy.deepcopy(params)
|
647
636
|
context.update({"matrix": strategy, "stages": {}})
|
648
637
|
|
@@ -650,7 +639,6 @@ def local_execute_strategy(
|
|
650
639
|
result.trace.info(f"[JOB]: Execute Strategy ID: {strategy_id}")
|
651
640
|
result.trace.info(f"[JOB]: ... Matrix: {strategy_id}")
|
652
641
|
|
653
|
-
# IMPORTANT: The stage execution only run sequentially one-by-one.
|
654
642
|
for stage in job.stages:
|
655
643
|
|
656
644
|
if stage.is_skipped(params=context):
|
@@ -674,34 +662,30 @@ def local_execute_strategy(
|
|
674
662
|
},
|
675
663
|
)
|
676
664
|
|
677
|
-
# PARAGRAPH:
|
678
|
-
#
|
679
|
-
# This step will add the stage result to `stages` key in that
|
680
|
-
# stage id. It will have structure like;
|
681
|
-
#
|
682
|
-
# {
|
683
|
-
# "params": { ... },
|
684
|
-
# "jobs": { ... },
|
685
|
-
# "matrix": { ... },
|
686
|
-
# "stages": { { "stage-id-01": { "outputs": { ... } } }, ... }
|
687
|
-
# }
|
688
|
-
#
|
689
|
-
# IMPORTANT:
|
690
|
-
# This execution change all stage running IDs to the current job
|
691
|
-
# running ID, but it still trac log to the same parent running ID
|
692
|
-
# (with passing `run_id` and `parent_run_id` to the stage
|
693
|
-
# execution arguments).
|
694
|
-
#
|
695
665
|
try:
|
696
|
-
stage.
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
event=event,
|
702
|
-
).context,
|
703
|
-
to=context,
|
666
|
+
rs: Result = stage.handler_execute(
|
667
|
+
params=context,
|
668
|
+
run_id=result.run_id,
|
669
|
+
parent_run_id=result.parent_run_id,
|
670
|
+
event=event,
|
704
671
|
)
|
672
|
+
stage.set_outputs(rs.context, to=context)
|
673
|
+
if rs.status == FAILED:
|
674
|
+
error_msg: str = (
|
675
|
+
f"Job strategy was break because it has a stage, "
|
676
|
+
f"{stage.iden}, failed without raise error."
|
677
|
+
)
|
678
|
+
return result.catch(
|
679
|
+
status=FAILED,
|
680
|
+
context={
|
681
|
+
strategy_id: {
|
682
|
+
"matrix": strategy,
|
683
|
+
"stages": context.pop("stages", {}),
|
684
|
+
"errors": JobException(error_msg).to_dict(),
|
685
|
+
},
|
686
|
+
},
|
687
|
+
)
|
688
|
+
|
705
689
|
except (StageException, UtilException) as err:
|
706
690
|
result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
|
707
691
|
do_raise: bool = dynamic(
|
@@ -746,8 +730,8 @@ def local_execute(
|
|
746
730
|
raise_error: bool | None = None,
|
747
731
|
) -> Result:
|
748
732
|
"""Local job execution with passing dynamic parameters from the workflow
|
749
|
-
execution. It will generate matrix values at the first
|
750
|
-
multithread on this metrics to the `stages` field of this job.
|
733
|
+
execution or itself execution. It will generate matrix values at the first
|
734
|
+
step and run multithread on this metrics to the `stages` field of this job.
|
751
735
|
|
752
736
|
This method does not raise any JobException if it runs with
|
753
737
|
multi-threading strategy.
|
@@ -798,7 +782,7 @@ def local_execute(
|
|
798
782
|
raise_error=raise_error,
|
799
783
|
)
|
800
784
|
|
801
|
-
return result.catch(status=
|
785
|
+
return result.catch(status=result.status)
|
802
786
|
|
803
787
|
fail_fast_flag: bool = job.strategy.fail_fast
|
804
788
|
ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
|
@@ -818,8 +802,6 @@ def local_execute(
|
|
818
802
|
},
|
819
803
|
)
|
820
804
|
|
821
|
-
# IMPORTANT: Start running strategy execution by multithreading because
|
822
|
-
# it will run by strategy values without waiting previous execution.
|
823
805
|
with ThreadPoolExecutor(
|
824
806
|
max_workers=job.strategy.max_parallel,
|
825
807
|
thread_name_prefix="job_strategy_exec_",
|
@@ -885,6 +867,22 @@ def self_hosted_execute(
|
|
885
867
|
event: Event | None = None,
|
886
868
|
raise_error: bool | None = None,
|
887
869
|
) -> Result: # pragma: no cov
|
870
|
+
"""Self-Hosted job execution with passing dynamic parameters from the
|
871
|
+
workflow execution or itself execution. It will make request to the
|
872
|
+
self-hosted host url.
|
873
|
+
|
874
|
+
:param job: (Job) A job model that want to execute.
|
875
|
+
:param params: (DictData) An input parameters that use on job execution.
|
876
|
+
:param run_id: (str) A job running ID for this execution.
|
877
|
+
:param parent_run_id: (str) A parent workflow running ID for this release.
|
878
|
+
:param result: (Result) A result object for keeping context and status
|
879
|
+
data.
|
880
|
+
:param event: (Event) An event manager that pass to the PoolThreadExecutor.
|
881
|
+
:param raise_error: (bool) A flag that all this method raise error to the
|
882
|
+
strategy execution.
|
883
|
+
|
884
|
+
:rtype: Result
|
885
|
+
"""
|
888
886
|
result: Result = Result.construct_with_rs_or_id(
|
889
887
|
result,
|
890
888
|
run_id=run_id,
|
@@ -893,14 +891,31 @@ def self_hosted_execute(
|
|
893
891
|
)
|
894
892
|
|
895
893
|
if event and event.is_set():
|
896
|
-
return result.catch(
|
894
|
+
return result.catch(
|
895
|
+
status=FAILED,
|
896
|
+
context={
|
897
|
+
"errors": JobException(
|
898
|
+
"Job self-hosted execution was canceled from event that "
|
899
|
+
"had set before start execution."
|
900
|
+
).to_dict()
|
901
|
+
},
|
902
|
+
)
|
897
903
|
|
898
904
|
import requests
|
899
905
|
|
900
|
-
|
901
|
-
|
902
|
-
|
903
|
-
|
906
|
+
try:
|
907
|
+
resp = requests.post(
|
908
|
+
job.runs_on.args.host,
|
909
|
+
headers={"Auth": f"Barer {job.runs_on.args.token}"},
|
910
|
+
data={
|
911
|
+
"job": job.model_dump(),
|
912
|
+
"params": params,
|
913
|
+
"result": result.__dict__,
|
914
|
+
"raise_error": raise_error,
|
915
|
+
},
|
916
|
+
)
|
917
|
+
except requests.exceptions.RequestException as e:
|
918
|
+
return result.catch(status=FAILED, context={"errors": to_dict(e)})
|
904
919
|
|
905
920
|
if resp.status_code != 200:
|
906
921
|
do_raise: bool = dynamic(
|
ddeutil/workflow/logs.py
CHANGED
@@ -80,6 +80,8 @@ class TraceMeda(BaseModel): # pragma: no cov
|
|
80
80
|
|
81
81
|
|
82
82
|
class TraceData(BaseModel): # pragma: no cov
|
83
|
+
"""Trace Data model for keeping data for any Trace models."""
|
84
|
+
|
83
85
|
stdout: str = Field(description="A standard output trace data.")
|
84
86
|
stderr: str = Field(description="A standard error trace data.")
|
85
87
|
meta: list[TraceMeda] = Field(
|
@@ -92,6 +94,12 @@ class TraceData(BaseModel): # pragma: no cov
|
|
92
94
|
|
93
95
|
@classmethod
|
94
96
|
def from_path(cls, file: Path) -> Self:
|
97
|
+
"""Construct this trace data model with a trace path.
|
98
|
+
|
99
|
+
:param file: (Path) A trace path.
|
100
|
+
|
101
|
+
:rtype: Self
|
102
|
+
"""
|
95
103
|
data: DictStr = {"stdout": "", "stderr": "", "meta": []}
|
96
104
|
|
97
105
|
if (file / "stdout.txt").exists():
|
@@ -207,27 +215,52 @@ class BaseTraceLog(ABC): # pragma: no cov
|
|
207
215
|
logger.exception(msg, stacklevel=2)
|
208
216
|
|
209
217
|
async def adebug(self, message: str) -> None: # pragma: no cov
|
218
|
+
"""Async write trace log with append mode and logging this message with
|
219
|
+
the DEBUG level.
|
220
|
+
|
221
|
+
:param message: (str) A message that want to log.
|
222
|
+
"""
|
210
223
|
msg: str = self.make_message(message)
|
211
224
|
if config.debug:
|
212
225
|
await self.awriter(msg)
|
213
226
|
logger.info(msg, stacklevel=2)
|
214
227
|
|
215
228
|
async def ainfo(self, message: str) -> None: # pragma: no cov
|
229
|
+
"""Async write trace log with append mode and logging this message with
|
230
|
+
the INFO level.
|
231
|
+
|
232
|
+
:param message: (str) A message that want to log.
|
233
|
+
"""
|
216
234
|
msg: str = self.make_message(message)
|
217
235
|
await self.awriter(msg)
|
218
236
|
logger.info(msg, stacklevel=2)
|
219
237
|
|
220
238
|
async def awarning(self, message: str) -> None: # pragma: no cov
|
239
|
+
"""Async write trace log with append mode and logging this message with
|
240
|
+
the WARNING level.
|
241
|
+
|
242
|
+
:param message: (str) A message that want to log.
|
243
|
+
"""
|
221
244
|
msg: str = self.make_message(message)
|
222
245
|
await self.awriter(msg)
|
223
246
|
logger.warning(msg, stacklevel=2)
|
224
247
|
|
225
248
|
async def aerror(self, message: str) -> None: # pragma: no cov
|
249
|
+
"""Async write trace log with append mode and logging this message with
|
250
|
+
the ERROR level.
|
251
|
+
|
252
|
+
:param message: (str) A message that want to log.
|
253
|
+
"""
|
226
254
|
msg: str = self.make_message(message)
|
227
255
|
await self.awriter(msg, is_err=True)
|
228
256
|
logger.error(msg, stacklevel=2)
|
229
257
|
|
230
258
|
async def aexception(self, message: str) -> None: # pragma: no cov
|
259
|
+
"""Async write trace log with append mode and logging this message with
|
260
|
+
the EXCEPTION level.
|
261
|
+
|
262
|
+
:param message: (str) A message that want to log.
|
263
|
+
"""
|
231
264
|
msg: str = self.make_message(message)
|
232
265
|
await self.awriter(msg, is_err=True)
|
233
266
|
logger.exception(msg, stacklevel=2)
|
@@ -237,23 +270,29 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
|
|
237
270
|
"""Trace Log object that write file to the local storage."""
|
238
271
|
|
239
272
|
@classmethod
|
240
|
-
def find_logs(
|
273
|
+
def find_logs(
|
274
|
+
cls, path: Path | None = None
|
275
|
+
) -> Iterator[TraceData]: # pragma: no cov
|
276
|
+
"""Find trace logs."""
|
241
277
|
for file in sorted(
|
242
|
-
config.log_path.glob("./run_id=*"),
|
278
|
+
(path or config.log_path).glob("./run_id=*"),
|
243
279
|
key=lambda f: f.lstat().st_mtime,
|
244
280
|
):
|
245
281
|
yield TraceData.from_path(file)
|
246
282
|
|
247
283
|
@classmethod
|
248
284
|
def find_log_with_id(
|
249
|
-
cls, run_id: str, force_raise: bool = True
|
285
|
+
cls, run_id: str, force_raise: bool = True, *, path: Path | None = None
|
250
286
|
) -> TraceData:
|
251
|
-
|
287
|
+
"""Find trace log with an input specific run ID."""
|
288
|
+
base_path: Path = path or config.log_path
|
289
|
+
file: Path = base_path / f"run_id={run_id}"
|
252
290
|
if file.exists():
|
253
291
|
return TraceData.from_path(file)
|
254
292
|
elif force_raise:
|
255
293
|
raise FileNotFoundError(
|
256
|
-
f"Trace log on path
|
294
|
+
f"Trace log on path {base_path}, does not found trace "
|
295
|
+
f"'run_id={run_id}'."
|
257
296
|
)
|
258
297
|
return {}
|
259
298
|
|
ddeutil/workflow/result.py
CHANGED
@@ -72,6 +72,7 @@ class Result:
|
|
72
72
|
ts: datetime = field(default_factory=get_dt_tznow, compare=False)
|
73
73
|
|
74
74
|
trace: Optional[TraceLog] = field(default=None, compare=False, repr=False)
|
75
|
+
extras: DictData = field(default_factory=dict)
|
75
76
|
|
76
77
|
@classmethod
|
77
78
|
def construct_with_rs_or_id(
|
@@ -80,6 +81,8 @@ class Result:
|
|
80
81
|
run_id: str | None = None,
|
81
82
|
parent_run_id: str | None = None,
|
82
83
|
id_logic: str | None = None,
|
84
|
+
*,
|
85
|
+
extras: DictData | None = None,
|
83
86
|
) -> Self:
|
84
87
|
"""Create the Result object or set parent running id if passing Result
|
85
88
|
object.
|
@@ -88,16 +91,22 @@ class Result:
|
|
88
91
|
:param run_id:
|
89
92
|
:param parent_run_id:
|
90
93
|
:param id_logic:
|
94
|
+
:param extras:
|
91
95
|
|
92
96
|
:rtype: Self
|
93
97
|
"""
|
94
98
|
if result is None:
|
95
|
-
|
99
|
+
return cls(
|
96
100
|
run_id=(run_id or gen_id(id_logic or "", unique=True)),
|
97
101
|
parent_run_id=parent_run_id,
|
102
|
+
extras=(extras or {}),
|
98
103
|
)
|
99
104
|
elif parent_run_id:
|
100
105
|
result.set_parent_run_id(parent_run_id)
|
106
|
+
|
107
|
+
if extras is not None:
|
108
|
+
result.extras.update(extras)
|
109
|
+
|
101
110
|
return result
|
102
111
|
|
103
112
|
@model_validator(mode="after")
|
ddeutil/workflow/reusables.py
CHANGED
ddeutil/workflow/stages.py
CHANGED
@@ -223,10 +223,11 @@ class BaseStage(BaseModel, ABC):
|
|
223
223
|
) from e
|
224
224
|
|
225
225
|
errors: DictData = {"errors": to_dict(e)}
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
226
|
+
return (
|
227
|
+
self.set_outputs(errors, to=to)
|
228
|
+
if to is not None
|
229
|
+
else result.catch(status=FAILED, context=errors)
|
230
|
+
)
|
230
231
|
|
231
232
|
def set_outputs(self, output: DictData, to: DictData) -> DictData:
|
232
233
|
"""Set an outputs from execution process to the received context. The
|
@@ -326,7 +327,10 @@ class BaseAsyncStage(BaseStage):
|
|
326
327
|
*,
|
327
328
|
result: Result | None = None,
|
328
329
|
event: Event | None = None,
|
329
|
-
) -> Result:
|
330
|
+
) -> Result:
|
331
|
+
raise NotImplementedError(
|
332
|
+
"Async Stage should implement `execute` method."
|
333
|
+
)
|
330
334
|
|
331
335
|
@abstractmethod
|
332
336
|
async def axecute(
|
ddeutil/workflow/workflow.py
CHANGED
@@ -314,7 +314,8 @@ class Workflow(BaseModel):
|
|
314
314
|
|
315
315
|
loader_data: DictData = copy.deepcopy(loader.data)
|
316
316
|
loader_data["name"] = name.replace(" ", "_")
|
317
|
-
|
317
|
+
|
318
|
+
if extras:
|
318
319
|
loader_data["extras"] = extras
|
319
320
|
|
320
321
|
cls.__bypass_on__(loader_data, path=loader.conf_path, extras=extras)
|
@@ -325,6 +326,7 @@ class Workflow(BaseModel):
|
|
325
326
|
cls,
|
326
327
|
name: str,
|
327
328
|
path: Path,
|
329
|
+
*,
|
328
330
|
extras: DictData | None = None,
|
329
331
|
) -> Self:
|
330
332
|
"""Create Workflow instance from the specific path. The loader object
|
@@ -349,7 +351,8 @@ class Workflow(BaseModel):
|
|
349
351
|
|
350
352
|
loader_data: DictData = copy.deepcopy(loader.data)
|
351
353
|
loader_data["name"] = name.replace(" ", "_")
|
352
|
-
|
354
|
+
|
355
|
+
if extras:
|
353
356
|
loader_data["extras"] = extras
|
354
357
|
|
355
358
|
cls.__bypass_on__(loader_data, path=path, extras=extras)
|
@@ -981,11 +984,11 @@ class Workflow(BaseModel):
|
|
981
984
|
).context,
|
982
985
|
to=params,
|
983
986
|
)
|
984
|
-
except JobException as
|
985
|
-
result.trace.error(f"[WORKFLOW]: {
|
987
|
+
except JobException as e:
|
988
|
+
result.trace.error(f"[WORKFLOW]: {e.__class__.__name__}: {e}")
|
986
989
|
if raise_error:
|
987
990
|
raise WorkflowException(
|
988
|
-
f"Get job execution error {job_id}: JobException: {
|
991
|
+
f"Get job execution error {job_id}: JobException: {e}"
|
989
992
|
) from None
|
990
993
|
raise NotImplementedError(
|
991
994
|
"Handle error from the job execution does not support yet."
|
@@ -1042,16 +1045,12 @@ class Workflow(BaseModel):
|
|
1042
1045
|
)
|
1043
1046
|
|
1044
1047
|
result.trace.info(f"[WORKFLOW]: Start Execute: {self.name!r} ...")
|
1045
|
-
|
1046
|
-
# NOTE: It should not do anything if it does not have job.
|
1047
1048
|
if not self.jobs:
|
1048
1049
|
result.trace.warning(
|
1049
1050
|
f"[WORKFLOW]: {self.name!r} does not have any jobs"
|
1050
1051
|
)
|
1051
1052
|
return result.catch(status=SUCCESS, context=params)
|
1052
1053
|
|
1053
|
-
# NOTE: Create a job queue that keep the job that want to run after
|
1054
|
-
# its dependency condition.
|
1055
1054
|
jq: Queue = Queue()
|
1056
1055
|
for job_id in self.jobs:
|
1057
1056
|
jq.put(job_id)
|
@@ -1090,9 +1089,9 @@ class Workflow(BaseModel):
|
|
1090
1089
|
timeout=timeout,
|
1091
1090
|
event=event,
|
1092
1091
|
)
|
1093
|
-
except WorkflowException as
|
1094
|
-
status = FAILED
|
1095
|
-
context.update({"errors":
|
1092
|
+
except WorkflowException as e:
|
1093
|
+
status: Status = FAILED
|
1094
|
+
context.update({"errors": e.to_dict()})
|
1096
1095
|
|
1097
1096
|
return result.catch(status=status, context=context)
|
1098
1097
|
|
@@ -1149,7 +1148,8 @@ class Workflow(BaseModel):
|
|
1149
1148
|
continue
|
1150
1149
|
elif check == FAILED: # pragma: no cov
|
1151
1150
|
raise WorkflowException(
|
1152
|
-
"
|
1151
|
+
f"Validate job trigger rule was failed with "
|
1152
|
+
f"{job.trigger_rule.value!r}."
|
1153
1153
|
)
|
1154
1154
|
elif check == SKIP: # pragma: no cov
|
1155
1155
|
result.trace.info(f"[JOB]: Skip job: {job_id!r}")
|
@@ -1157,16 +1157,6 @@ class Workflow(BaseModel):
|
|
1157
1157
|
job_queue.task_done()
|
1158
1158
|
continue
|
1159
1159
|
|
1160
|
-
# NOTE: Start workflow job execution with deep copy context data
|
1161
|
-
# before release.
|
1162
|
-
#
|
1163
|
-
# Context:
|
1164
|
-
# ---
|
1165
|
-
# {
|
1166
|
-
# 'params': <input-params>,
|
1167
|
-
# 'jobs': { <job's-id>: ... },
|
1168
|
-
# }
|
1169
|
-
#
|
1170
1160
|
futures.append(
|
1171
1161
|
executor.submit(
|
1172
1162
|
self.execute_job,
|
@@ -1177,19 +1167,14 @@ class Workflow(BaseModel):
|
|
1177
1167
|
),
|
1178
1168
|
)
|
1179
1169
|
|
1180
|
-
# NOTE: Mark this job queue done.
|
1181
1170
|
job_queue.task_done()
|
1182
1171
|
|
1183
1172
|
if not_timeout_flag:
|
1184
|
-
|
1185
|
-
# NOTE: Wait for all items to finish processing by `task_done()`
|
1186
|
-
# method.
|
1187
1173
|
job_queue.join()
|
1188
|
-
|
1189
1174
|
for future in as_completed(futures, timeout=thread_timeout):
|
1190
|
-
if
|
1191
|
-
result.trace.error(f"[WORKFLOW]: {
|
1192
|
-
raise WorkflowException(str(
|
1175
|
+
if e := future.exception():
|
1176
|
+
result.trace.error(f"[WORKFLOW]: {e}")
|
1177
|
+
raise WorkflowException(str(e))
|
1193
1178
|
|
1194
1179
|
future.result()
|
1195
1180
|
|
@@ -1235,78 +1220,74 @@ class Workflow(BaseModel):
|
|
1235
1220
|
"max_job_exec_timeout", f=timeout, extras=self.extras
|
1236
1221
|
)
|
1237
1222
|
event: Event = event or Event()
|
1238
|
-
future: Future | None = None
|
1239
1223
|
result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with non-threading.")
|
1240
|
-
|
1241
|
-
executor = ThreadPoolExecutor(
|
1224
|
+
with ThreadPoolExecutor(
|
1242
1225
|
max_workers=1,
|
1243
1226
|
thread_name_prefix="wf_exec_non_threading_",
|
1244
|
-
)
|
1227
|
+
) as executor:
|
1228
|
+
future: Future | None = None
|
1245
1229
|
|
1246
|
-
|
1247
|
-
|
1248
|
-
|
1249
|
-
|
1250
|
-
|
1230
|
+
while not job_queue.empty() and (
|
1231
|
+
not_timeout_flag := ((time.monotonic() - ts) < timeout)
|
1232
|
+
):
|
1233
|
+
job_id: str = job_queue.get()
|
1234
|
+
job: Job = self.jobs[job_id]
|
1251
1235
|
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1258
|
-
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
1262
|
-
|
1263
|
-
|
1236
|
+
if (check := job.check_needs(context["jobs"])) == WAIT:
|
1237
|
+
job_queue.task_done()
|
1238
|
+
job_queue.put(job_id)
|
1239
|
+
time.sleep(0.075)
|
1240
|
+
continue
|
1241
|
+
elif check == FAILED:
|
1242
|
+
raise WorkflowException(
|
1243
|
+
f"Validate job trigger rule was failed with "
|
1244
|
+
f"{job.trigger_rule.value!r}."
|
1245
|
+
)
|
1246
|
+
elif check == SKIP: # pragma: no cov
|
1247
|
+
result.trace.info(f"[JOB]: Skip job: {job_id!r}")
|
1248
|
+
job.set_outputs({"SKIP": {"skipped": True}}, to=context)
|
1249
|
+
job_queue.task_done()
|
1250
|
+
continue
|
1264
1251
|
|
1265
|
-
|
1266
|
-
|
1267
|
-
|
1268
|
-
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
future
|
1275
|
-
|
1276
|
-
|
1277
|
-
|
1278
|
-
result=result,
|
1279
|
-
event=event,
|
1280
|
-
)
|
1281
|
-
result.trace.debug(f"[WORKFLOW]: Make future: {future}")
|
1282
|
-
time.sleep(0.025)
|
1283
|
-
elif future.done():
|
1284
|
-
if err := future.exception():
|
1285
|
-
result.trace.error(f"[WORKFLOW]: {err}")
|
1286
|
-
raise WorkflowException(str(err))
|
1287
|
-
|
1288
|
-
future = None
|
1289
|
-
job_queue.put(job_id)
|
1290
|
-
elif future.running():
|
1291
|
-
time.sleep(0.075)
|
1292
|
-
job_queue.put(job_id)
|
1293
|
-
else: # pragma: no cov
|
1294
|
-
job_queue.put(job_id)
|
1295
|
-
result.trace.debug(
|
1296
|
-
f"Execution non-threading does not handle case: {future} "
|
1297
|
-
f"that not running."
|
1298
|
-
)
|
1252
|
+
if future is None:
|
1253
|
+
future: Future = executor.submit(
|
1254
|
+
self.execute_job,
|
1255
|
+
job_id=job_id,
|
1256
|
+
params=context,
|
1257
|
+
result=result,
|
1258
|
+
event=event,
|
1259
|
+
)
|
1260
|
+
time.sleep(0.025)
|
1261
|
+
elif future.done():
|
1262
|
+
if e := future.exception():
|
1263
|
+
result.trace.error(f"[WORKFLOW]: {e}")
|
1264
|
+
raise WorkflowException(str(e))
|
1299
1265
|
|
1300
|
-
|
1266
|
+
future = None
|
1267
|
+
job_queue.put(job_id)
|
1268
|
+
elif future.running():
|
1269
|
+
time.sleep(0.075)
|
1270
|
+
job_queue.put(job_id)
|
1271
|
+
else: # pragma: no cov
|
1272
|
+
job_queue.put(job_id)
|
1273
|
+
result.trace.debug(
|
1274
|
+
f"Execution non-threading does not handle case: {future} "
|
1275
|
+
f"that not running."
|
1276
|
+
)
|
1277
|
+
|
1278
|
+
job_queue.task_done()
|
1279
|
+
|
1280
|
+
if not_timeout_flag:
|
1281
|
+
job_queue.join()
|
1282
|
+
return context
|
1301
1283
|
|
1302
|
-
|
1303
|
-
|
1304
|
-
|
1305
|
-
|
1284
|
+
result.trace.error(
|
1285
|
+
f"[WORKFLOW]: Execution: {self.name!r} was timeout."
|
1286
|
+
)
|
1287
|
+
event.set()
|
1288
|
+
if future:
|
1289
|
+
future.cancel()
|
1306
1290
|
|
1307
|
-
result.trace.error(f"[WORKFLOW]: Execution: {self.name!r} was timeout.")
|
1308
|
-
event.set()
|
1309
|
-
executor.shutdown()
|
1310
1291
|
raise WorkflowException(f"Execution: {self.name!r} was timeout.")
|
1311
1292
|
|
1312
1293
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.45
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -30,7 +30,6 @@ Requires-Dist: schedule<2.0.0,==1.2.2
|
|
30
30
|
Provides-Extra: all
|
31
31
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
|
32
32
|
Requires-Dist: httpx; extra == "all"
|
33
|
-
Requires-Dist: ujson; extra == "all"
|
34
33
|
Requires-Dist: aiofiles; extra == "all"
|
35
34
|
Requires-Dist: aiohttp; extra == "all"
|
36
35
|
Provides-Extra: api
|
@@ -71,9 +70,9 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
71
70
|
|
72
71
|
**:pushpin: <u>Rules of This Workflow engine</u>**:
|
73
72
|
|
74
|
-
1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
|
73
|
+
1. The Minimum frequency unit of built-in scheduling is **1 Minute** 🕘
|
75
74
|
2. **Can not** re-run only failed stage and its pending downstream ↩️
|
76
|
-
3. All parallel tasks inside workflow engine use **Multi-Threading**
|
75
|
+
3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
|
77
76
|
(Python 3.13 unlock GIL 🐍🔓)
|
78
77
|
|
79
78
|
---
|
@@ -266,11 +265,11 @@ it will use default value and do not raise any error to you.
|
|
266
265
|
| **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
|
267
266
|
| **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
|
268
267
|
| **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
|
269
|
-
| **CONF_PATH** | Core | `conf` |
|
268
|
+
| **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
|
270
269
|
| **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
|
271
|
-
| **STAGE_DEFAULT_ID** | Core | `true` |
|
270
|
+
| **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
|
272
271
|
| **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
|
273
|
-
| **JOB_DEFAULT_ID** | Core | `false` |
|
272
|
+
| **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
|
274
273
|
| **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
|
275
274
|
| **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
|
276
275
|
| **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
|
@@ -1,19 +1,19 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=
|
1
|
+
ddeutil/workflow/__about__.py,sha256=SRGMEfbOwgNFzpR_XZJUPWxcNUSIkcCKp1M3mCR6pQk,28
|
2
2
|
ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
|
3
3
|
ddeutil/workflow/__init__.py,sha256=m7ZTCuUOarcTKJuXOyuaXd5WTIO7NTkqCeCrNX3d5i8,1943
|
4
4
|
ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
|
5
|
-
ddeutil/workflow/conf.py,sha256=
|
5
|
+
ddeutil/workflow/conf.py,sha256=n8grMRUVMtZFFc0J0g0wwG7JpDbvzpQcLoRehca1Qnk,12519
|
6
6
|
ddeutil/workflow/cron.py,sha256=80SijzMdDOBxTWRsiF-Fmuz7Ym7leY0XT2lzRAPGdXc,8781
|
7
7
|
ddeutil/workflow/exceptions.py,sha256=uLNxzav3HRcr4vaZnvbUIF_eTR6UXXZNaxroMWFOUL4,1418
|
8
|
-
ddeutil/workflow/job.py,sha256=
|
9
|
-
ddeutil/workflow/logs.py,sha256=
|
8
|
+
ddeutil/workflow/job.py,sha256=NgEPgMAUL2mqDctLpdoEVx4g0ZsTj4RmkQluCh_ZUdM,30614
|
9
|
+
ddeutil/workflow/logs.py,sha256=Ki1t6HkThwimzAe1OSxPPc7OQ4r-kXAc1kB63x2DsOg,21160
|
10
10
|
ddeutil/workflow/params.py,sha256=xCtFEh0-G-G-f8y_SXxyf31bU6Ox5p5Z-WbBFXrjy8M,9960
|
11
|
-
ddeutil/workflow/result.py,sha256=
|
12
|
-
ddeutil/workflow/reusables.py,sha256=
|
11
|
+
ddeutil/workflow/result.py,sha256=9tbCmP0Sjy7h9GKWyD5e1bjAzNOWZcnvBFuC6to_f-8,4929
|
12
|
+
ddeutil/workflow/reusables.py,sha256=ZE8WfD0WyQUKRV5aujJpGG6g6ODJz-wtgwHbQiCrN-E,17536
|
13
13
|
ddeutil/workflow/scheduler.py,sha256=_MDsEHbBVOeF-381U8DfIMDyca_nG3XNXmgX4229_EU,27437
|
14
|
-
ddeutil/workflow/stages.py,sha256=
|
14
|
+
ddeutil/workflow/stages.py,sha256=cvSNt4IjbE4O9llCIL2bojnl4W50GHNa1ANQ4oGefUo,47438
|
15
15
|
ddeutil/workflow/utils.py,sha256=sblje9qOtejCHVt8EVrbC0KY98vKqvxccaR5HIkRiTA,7363
|
16
|
-
ddeutil/workflow/workflow.py,sha256=
|
16
|
+
ddeutil/workflow/workflow.py,sha256=Pkm2e7Edph3l7ITjo3iTtvpbXmKD_fCeq7HrxdV0KAo,49708
|
17
17
|
ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
|
18
18
|
ddeutil/workflow/api/api.py,sha256=b-bMg0aRsEqt8Qb2hNUtamEt2Fq2CgNotF2oXSAdDu8,5226
|
19
19
|
ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
|
@@ -23,8 +23,8 @@ ddeutil/workflow/api/routes/job.py,sha256=YVta083i8vU8-o4WdKFwDpfdC9vN1dZ6goZSmN
|
|
23
23
|
ddeutil/workflow/api/routes/logs.py,sha256=TeRDrEelbKS2Hu_EovgLh0bOdmSv9mfnrIZsrE7uPD4,5353
|
24
24
|
ddeutil/workflow/api/routes/schedules.py,sha256=rUWBm5RgLS1PNBHSWwWXJ0l-c5mYWfl9os0BA9_OTEw,4810
|
25
25
|
ddeutil/workflow/api/routes/workflows.py,sha256=ctgQGxXfpIV6bHFDM9IQ1_qaQHT6n5-HjJ1-D4GKWpc,4527
|
26
|
-
ddeutil_workflow-0.0.
|
27
|
-
ddeutil_workflow-0.0.
|
28
|
-
ddeutil_workflow-0.0.
|
29
|
-
ddeutil_workflow-0.0.
|
30
|
-
ddeutil_workflow-0.0.
|
26
|
+
ddeutil_workflow-0.0.45.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
27
|
+
ddeutil_workflow-0.0.45.dist-info/METADATA,sha256=4i4i_79_2HjUuyzyB6LWhP2wEKh3xifSfiDf8o_bENY,19116
|
28
|
+
ddeutil_workflow-0.0.45.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
29
|
+
ddeutil_workflow-0.0.45.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
30
|
+
ddeutil_workflow-0.0.45.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|