ddeutil-workflow 0.0.20__py3-none-any.whl → 0.0.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__types.py +1 -0
- ddeutil/workflow/conf.py +3 -0
- ddeutil/workflow/job.py +62 -68
- ddeutil/workflow/stage.py +2 -2
- ddeutil/workflow/workflow.py +120 -72
- {ddeutil_workflow-0.0.20.dist-info → ddeutil_workflow-0.0.21.dist-info}/METADATA +2 -1
- {ddeutil_workflow-0.0.20.dist-info → ddeutil_workflow-0.0.21.dist-info}/RECORD +12 -12
- {ddeutil_workflow-0.0.20.dist-info → ddeutil_workflow-0.0.21.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.20.dist-info → ddeutil_workflow-0.0.21.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.20.dist-info → ddeutil_workflow-0.0.21.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.20.dist-info → ddeutil_workflow-0.0.21.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.21"
|
ddeutil/workflow/__types.py
CHANGED
ddeutil/workflow/conf.py
CHANGED
@@ -100,6 +100,9 @@ class Config:
|
|
100
100
|
|
101
101
|
# NOTE: Workflow
|
102
102
|
max_job_parallel: int = int(env("WORKFLOW_CORE_MAX_JOB_PARALLEL", "2"))
|
103
|
+
max_job_exec_timeout: int = int(
|
104
|
+
env("WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT", "600")
|
105
|
+
)
|
103
106
|
max_poking_pool_worker: int = int(
|
104
107
|
os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
|
105
108
|
)
|
ddeutil/workflow/job.py
CHANGED
@@ -11,7 +11,6 @@ job.
|
|
11
11
|
from __future__ import annotations
|
12
12
|
|
13
13
|
import copy
|
14
|
-
import time
|
15
14
|
from concurrent.futures import (
|
16
15
|
FIRST_EXCEPTION,
|
17
16
|
Future,
|
@@ -48,13 +47,13 @@ from .utils import (
|
|
48
47
|
)
|
49
48
|
|
50
49
|
logger = get_logger("ddeutil.workflow")
|
51
|
-
|
52
|
-
MatrixExclude = list[dict[str, Union[str, int]]]
|
50
|
+
MatrixFilter = list[dict[str, Union[str, int]]]
|
53
51
|
|
54
52
|
|
55
53
|
__all__: TupleStr = (
|
56
54
|
"Strategy",
|
57
55
|
"Job",
|
56
|
+
"TriggerRules",
|
58
57
|
"make",
|
59
58
|
)
|
60
59
|
|
@@ -63,8 +62,8 @@ __all__: TupleStr = (
|
|
63
62
|
@lru_cache
|
64
63
|
def make(
|
65
64
|
matrix: Matrix,
|
66
|
-
include:
|
67
|
-
exclude:
|
65
|
+
include: MatrixFilter,
|
66
|
+
exclude: MatrixFilter,
|
68
67
|
) -> list[DictStr]:
|
69
68
|
"""Make a list of product of matrix values that already filter with
|
70
69
|
exclude matrix and add specific matrix with include.
|
@@ -124,7 +123,7 @@ def make(
|
|
124
123
|
|
125
124
|
|
126
125
|
class Strategy(BaseModel):
|
127
|
-
"""Strategy
|
126
|
+
"""Strategy model that will combine a matrix together for running the
|
128
127
|
special job with combination of matrix data.
|
129
128
|
|
130
129
|
This model does not be the part of job only because you can use it to
|
@@ -166,11 +165,11 @@ class Strategy(BaseModel):
|
|
166
165
|
"A matrix values that want to cross product to possible strategies."
|
167
166
|
),
|
168
167
|
)
|
169
|
-
include:
|
168
|
+
include: MatrixFilter = Field(
|
170
169
|
default_factory=list,
|
171
170
|
description="A list of additional matrix that want to adds-in.",
|
172
171
|
)
|
173
|
-
exclude:
|
172
|
+
exclude: MatrixFilter = Field(
|
174
173
|
default_factory=list,
|
175
174
|
description="A list of exclude matrix that want to filter-out.",
|
176
175
|
)
|
@@ -204,7 +203,7 @@ class Strategy(BaseModel):
|
|
204
203
|
|
205
204
|
|
206
205
|
class TriggerRules(str, Enum):
|
207
|
-
"""Trigger
|
206
|
+
"""Trigger rules enum object."""
|
208
207
|
|
209
208
|
all_success: str = "all_success"
|
210
209
|
all_failed: str = "all_failed"
|
@@ -215,8 +214,15 @@ class TriggerRules(str, Enum):
|
|
215
214
|
none_skipped: str = "none_skipped"
|
216
215
|
|
217
216
|
|
217
|
+
class RunsOn(str, Enum):
|
218
|
+
"""Runs-On enum object."""
|
219
|
+
|
220
|
+
local: str = "local"
|
221
|
+
docker: str = "docker"
|
222
|
+
|
223
|
+
|
218
224
|
class Job(BaseModel):
|
219
|
-
"""Job Pydantic model object (group of stages).
|
225
|
+
"""Job Pydantic model object (short descripte: a group of stages).
|
220
226
|
|
221
227
|
This job model allow you to use for-loop that call matrix strategy. If
|
222
228
|
you pass matrix mapping and it able to generate, you will see it running
|
@@ -327,7 +333,10 @@ class Job(BaseModel):
|
|
327
333
|
return self
|
328
334
|
|
329
335
|
def stage(self, stage_id: str) -> Stage:
|
330
|
-
"""Return stage
|
336
|
+
"""Return stage instance that exists in this job via passing an input
|
337
|
+
stage ID.
|
338
|
+
|
339
|
+
:raise ValueError: If an input stage ID does not found on this job.
|
331
340
|
|
332
341
|
:param stage_id: A stage ID that want to extract from this job.
|
333
342
|
:rtype: Stage
|
@@ -360,8 +369,12 @@ class Job(BaseModel):
|
|
360
369
|
}
|
361
370
|
}
|
362
371
|
|
372
|
+
:raise JobException: If the job's ID does not set and the setting
|
373
|
+
default job ID flag does not set.
|
374
|
+
|
363
375
|
:param output: An output context.
|
364
376
|
:param to: A context data that want to add output result.
|
377
|
+
|
365
378
|
:rtype: DictData
|
366
379
|
"""
|
367
380
|
if self.id is None and not config.job_default_id:
|
@@ -387,8 +400,8 @@ class Job(BaseModel):
|
|
387
400
|
self,
|
388
401
|
strategy: DictData,
|
389
402
|
params: DictData,
|
390
|
-
run_id: str | None = None,
|
391
403
|
*,
|
404
|
+
run_id: str | None = None,
|
392
405
|
event: Event | None = None,
|
393
406
|
) -> Result:
|
394
407
|
"""Job Strategy execution with passing dynamic parameters from the
|
@@ -398,11 +411,15 @@ class Job(BaseModel):
|
|
398
411
|
It different with ``self.execute`` because this method run only one
|
399
412
|
strategy and return with context of this strategy data.
|
400
413
|
|
414
|
+
The result of this execution will return result with strategy ID
|
415
|
+
that generated from the `gen_id` function with a input strategy value.
|
416
|
+
|
401
417
|
:raise JobException: If it has any error from ``StageException`` or
|
402
418
|
``UtilException``.
|
403
419
|
|
404
|
-
:param strategy: A metrix
|
405
|
-
|
420
|
+
:param strategy: A strategy metrix value that use on this execution.
|
421
|
+
This value will pass to the `matrix` key for templating.
|
422
|
+
:param params: A dynamic parameters that will deepcopy to the context.
|
406
423
|
:param run_id: A job running ID for this strategy execution.
|
407
424
|
:param event: An manger event that pass to the PoolThreadExecutor.
|
408
425
|
|
@@ -433,17 +450,19 @@ class Job(BaseModel):
|
|
433
450
|
logger.info(f"({run_id}) [JOB]: Skip stage: {stage.iden!r}")
|
434
451
|
continue
|
435
452
|
|
436
|
-
logger.info(
|
437
|
-
f"({run_id}) [JOB]: Start execute the stage: {stage.iden!r}"
|
438
|
-
)
|
453
|
+
logger.info(f"({run_id}) [JOB]: Execute stage: {stage.iden!r}")
|
439
454
|
|
440
455
|
# NOTE: Logging a matrix that pass on this stage execution.
|
441
456
|
if strategy:
|
442
|
-
logger.info(f"({run_id}) [JOB]: Matrix: {strategy}")
|
457
|
+
logger.info(f"({run_id}) [JOB]: ... Matrix: {strategy}")
|
443
458
|
|
444
459
|
# NOTE: Force stop this execution if event was set from main
|
445
460
|
# execution.
|
446
461
|
if event and event.is_set():
|
462
|
+
error_msg: str = (
|
463
|
+
"Job strategy was canceled from event that had set before "
|
464
|
+
"strategy execution."
|
465
|
+
)
|
447
466
|
return Result(
|
448
467
|
status=1,
|
449
468
|
context={
|
@@ -453,15 +472,10 @@ class Job(BaseModel):
|
|
453
472
|
# it will not filter function object from context.
|
454
473
|
# ---
|
455
474
|
# "stages": filter_func(context.pop("stages", {})),
|
475
|
+
#
|
456
476
|
"stages": context.pop("stages", {}),
|
457
|
-
"error": JobException(
|
458
|
-
|
459
|
-
"that had stopped before execution."
|
460
|
-
),
|
461
|
-
"error_message": (
|
462
|
-
"Job strategy was canceled from trigger event "
|
463
|
-
"that had stopped before execution."
|
464
|
-
),
|
477
|
+
"error": JobException(error_msg),
|
478
|
+
"error_message": error_msg,
|
465
479
|
},
|
466
480
|
},
|
467
481
|
run_id=run_id,
|
@@ -512,7 +526,7 @@ class Job(BaseModel):
|
|
512
526
|
run_id=run_id,
|
513
527
|
)
|
514
528
|
|
515
|
-
# NOTE: Remove the current stage object.
|
529
|
+
# NOTE: Remove the current stage object for saving memory.
|
516
530
|
del stage
|
517
531
|
|
518
532
|
return Result(
|
@@ -542,7 +556,8 @@ class Job(BaseModel):
|
|
542
556
|
run_id: str = run_id or gen_id(self.id or "", unique=True)
|
543
557
|
context: DictData = {}
|
544
558
|
|
545
|
-
# NOTE: Normal Job execution without parallel strategy.
|
559
|
+
# NOTE: Normal Job execution without parallel strategy matrix. It use
|
560
|
+
# for-loop to control strategy execution sequentially.
|
546
561
|
if (not self.strategy.is_set()) or self.strategy.max_parallel == 1:
|
547
562
|
for strategy in self.strategy.make():
|
548
563
|
rs: Result = self.execute_strategy(
|
@@ -566,6 +581,7 @@ class Job(BaseModel):
|
|
566
581
|
max_workers=self.strategy.max_parallel,
|
567
582
|
thread_name_prefix="job_strategy_exec_",
|
568
583
|
) as executor:
|
584
|
+
|
569
585
|
futures: list[Future] = [
|
570
586
|
executor.submit(
|
571
587
|
self.execute_strategy,
|
@@ -577,11 +593,8 @@ class Job(BaseModel):
|
|
577
593
|
for strategy in self.strategy.make()
|
578
594
|
]
|
579
595
|
|
580
|
-
# NOTE: Dynamic catching futures object with fail-fast flag.
|
581
596
|
return (
|
582
|
-
self.__catch_fail_fast(
|
583
|
-
event=event, futures=futures, run_id=run_id
|
584
|
-
)
|
597
|
+
self.__catch_fail_fast(event, futures=futures, run_id=run_id)
|
585
598
|
if self.strategy.fail_fast
|
586
599
|
else self.__catch_all_completed(futures=futures, run_id=run_id)
|
587
600
|
)
|
@@ -593,19 +606,17 @@ class Job(BaseModel):
|
|
593
606
|
run_id: str,
|
594
607
|
*,
|
595
608
|
timeout: int = 1800,
|
596
|
-
result_timeout: int = 60,
|
597
609
|
) -> Result:
|
598
610
|
"""Job parallel pool futures catching with fail-fast mode. That will
|
599
|
-
stop all not done futures if it receive the first
|
600
|
-
running futures.
|
611
|
+
stop and set event on all not done futures if it receive the first
|
612
|
+
exception from all running futures.
|
601
613
|
|
602
614
|
:param event: An event manager instance that able to set stopper on the
|
603
|
-
observing
|
615
|
+
observing multithreading.
|
604
616
|
:param futures: A list of futures.
|
605
617
|
:param run_id: A job running ID from execution.
|
606
618
|
:param timeout: A timeout to waiting all futures complete.
|
607
|
-
|
608
|
-
instance when it was running completely.
|
619
|
+
|
609
620
|
:rtype: Result
|
610
621
|
"""
|
611
622
|
rs_final: Result = Result()
|
@@ -615,9 +626,7 @@ class Job(BaseModel):
|
|
615
626
|
# NOTE: Get results from a collection of tasks with a timeout that has
|
616
627
|
# the first exception.
|
617
628
|
done, not_done = wait(
|
618
|
-
futures,
|
619
|
-
timeout=timeout,
|
620
|
-
return_when=FIRST_EXCEPTION,
|
629
|
+
futures, timeout=timeout, return_when=FIRST_EXCEPTION
|
621
630
|
)
|
622
631
|
nd: str = (
|
623
632
|
f", the strategies do not run is {not_done}" if not_done else ""
|
@@ -635,11 +644,13 @@ class Job(BaseModel):
|
|
635
644
|
|
636
645
|
future: Future
|
637
646
|
for future in done:
|
647
|
+
|
648
|
+
# NOTE: Handle the first exception from feature
|
638
649
|
if err := future.exception():
|
639
650
|
status: int = 1
|
640
651
|
logger.error(
|
641
|
-
f"({run_id}) [JOB]:
|
642
|
-
f"{future.exception()}
|
652
|
+
f"({run_id}) [JOB]: Fail-fast catching:\n\t"
|
653
|
+
f"{future.exception()}"
|
643
654
|
)
|
644
655
|
context.update(
|
645
656
|
{
|
@@ -650,7 +661,7 @@ class Job(BaseModel):
|
|
650
661
|
continue
|
651
662
|
|
652
663
|
# NOTE: Update the result context to main job context.
|
653
|
-
context.update(future.result(
|
664
|
+
context.update(future.result().context)
|
654
665
|
|
655
666
|
return rs_final.catch(status=status, context=context)
|
656
667
|
|
@@ -660,45 +671,27 @@ class Job(BaseModel):
|
|
660
671
|
run_id: str,
|
661
672
|
*,
|
662
673
|
timeout: int = 1800,
|
663
|
-
result_timeout: int = 60,
|
664
674
|
) -> Result:
|
665
675
|
"""Job parallel pool futures catching with all-completed mode.
|
666
676
|
|
667
|
-
:param futures: A list of futures
|
668
|
-
result.
|
677
|
+
:param futures: A list of futures.
|
669
678
|
:param run_id: A job running ID from execution.
|
670
679
|
:param timeout: A timeout to waiting all futures complete.
|
671
|
-
|
672
|
-
instance when it was running completely.
|
680
|
+
|
673
681
|
:rtype: Result
|
674
682
|
"""
|
675
683
|
rs_final: Result = Result()
|
676
684
|
context: DictData = {}
|
677
685
|
status: int = 0
|
686
|
+
|
678
687
|
for future in as_completed(futures, timeout=timeout):
|
679
688
|
try:
|
680
|
-
context.update(future.result(
|
681
|
-
except TimeoutError: # pragma: no cov
|
682
|
-
status = 1
|
683
|
-
logger.warning(
|
684
|
-
f"({run_id}) [JOB]: Task is hanging. Attempting to "
|
685
|
-
f"kill."
|
686
|
-
)
|
687
|
-
future.cancel()
|
688
|
-
time.sleep(0.1)
|
689
|
-
|
690
|
-
stmt: str = (
|
691
|
-
"Failed to cancel the task."
|
692
|
-
if not future.cancelled()
|
693
|
-
else "Task canceled successfully."
|
694
|
-
)
|
695
|
-
logger.warning(f"({run_id}) [JOB]: {stmt}")
|
689
|
+
context.update(future.result().context)
|
696
690
|
except JobException as err:
|
697
691
|
status = 1
|
698
692
|
logger.error(
|
699
|
-
f"({run_id}) [JOB]:
|
700
|
-
f"
|
701
|
-
f"{err}"
|
693
|
+
f"({run_id}) [JOB]: All-completed catching:\n\t"
|
694
|
+
f"{err.__class__.__name__}:\n\t{err}"
|
702
695
|
)
|
703
696
|
context.update(
|
704
697
|
{
|
@@ -706,4 +699,5 @@ class Job(BaseModel):
|
|
706
699
|
"error_message": f"{err.__class__.__name__}: {err}",
|
707
700
|
},
|
708
701
|
)
|
702
|
+
|
709
703
|
return rs_final.catch(status=status, context=context)
|
ddeutil/workflow/stage.py
CHANGED
@@ -496,6 +496,7 @@ class PyStage(BaseStage):
|
|
496
496
|
|
497
497
|
:param output: A output data that want to extract to an output key.
|
498
498
|
:param to: A context data that want to add output result.
|
499
|
+
|
499
500
|
:rtype: DictData
|
500
501
|
"""
|
501
502
|
# NOTE: The output will fileter unnecessary keys from locals.
|
@@ -638,8 +639,7 @@ class HookStage(BaseStage):
|
|
638
639
|
|
639
640
|
:rtype: Result
|
640
641
|
"""
|
641
|
-
|
642
|
-
t_func: TagFunc = extract_hook(t_func_hook)()
|
642
|
+
t_func: TagFunc = extract_hook(param2template(self.uses, params))()
|
643
643
|
|
644
644
|
# VALIDATE: check input task caller parameters that exists before
|
645
645
|
# calling.
|
ddeutil/workflow/workflow.py
CHANGED
@@ -3,8 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
"""
|
7
|
-
The main schedule running is ``workflow_runner`` function that trigger the
|
6
|
+
"""The main schedule running is ``workflow_runner`` function that trigger the
|
8
7
|
multiprocess of ``workflow_control`` function for listing schedules on the
|
9
8
|
config by ``Loader.finds(Schedule)``.
|
10
9
|
|
@@ -12,6 +11,7 @@ config by ``Loader.finds(Schedule)``.
|
|
12
11
|
functions; ``workflow_task``, and ``workflow_monitor``.
|
13
12
|
|
14
13
|
``workflow_control`` --- Every minute at :02 --> ``workflow_task``
|
14
|
+
|
15
15
|
--- Every 5 minutes --> ``workflow_monitor``
|
16
16
|
|
17
17
|
The ``workflow_task`` will run ``task.release`` method in threading object
|
@@ -70,7 +70,7 @@ __all__: TupleStr = (
|
|
70
70
|
@total_ordering
|
71
71
|
@dataclass(config=ConfigDict(arbitrary_types_allowed=True))
|
72
72
|
class WorkflowRelease:
|
73
|
-
"""Workflow release
|
73
|
+
"""Workflow release Pydantic dataclass object."""
|
74
74
|
|
75
75
|
date: datetime
|
76
76
|
offset: float
|
@@ -86,6 +86,12 @@ class WorkflowRelease:
|
|
86
86
|
|
87
87
|
@classmethod
|
88
88
|
def from_dt(cls, dt: datetime) -> Self:
|
89
|
+
"""Construct WorkflowRelease via datetime object only.
|
90
|
+
|
91
|
+
:param dt: A datetime object.
|
92
|
+
|
93
|
+
:rtype: Self
|
94
|
+
"""
|
89
95
|
return cls(
|
90
96
|
date=dt,
|
91
97
|
offset=0,
|
@@ -95,6 +101,9 @@ class WorkflowRelease:
|
|
95
101
|
)
|
96
102
|
|
97
103
|
def __eq__(self, other: WorkflowRelease | datetime) -> bool:
|
104
|
+
"""Override equal property that will compare only the same type or
|
105
|
+
datetime.
|
106
|
+
"""
|
98
107
|
if isinstance(other, self.__class__):
|
99
108
|
return self.date == other.date
|
100
109
|
elif isinstance(other, datetime):
|
@@ -102,6 +111,9 @@ class WorkflowRelease:
|
|
102
111
|
return NotImplemented
|
103
112
|
|
104
113
|
def __lt__(self, other: WorkflowRelease | datetime) -> bool:
|
114
|
+
"""Override equal property that will compare only the same type or
|
115
|
+
datetime.
|
116
|
+
"""
|
105
117
|
if isinstance(other, self.__class__):
|
106
118
|
return self.date < other.date
|
107
119
|
elif isinstance(other, datetime):
|
@@ -119,14 +131,19 @@ class WorkflowQueue:
|
|
119
131
|
|
120
132
|
@property
|
121
133
|
def is_queued(self) -> bool:
|
122
|
-
"""Return True if it has
|
134
|
+
"""Return True if it has workflow release object in the queue.
|
135
|
+
|
136
|
+
:rtype: bool
|
137
|
+
"""
|
123
138
|
return len(self.queue) > 0
|
124
139
|
|
125
140
|
def check_queue(self, data: WorkflowRelease) -> bool:
|
126
141
|
"""Check a WorkflowRelease value already exists in list of tracking
|
127
142
|
queues.
|
128
143
|
|
129
|
-
:param data:
|
144
|
+
:param data: A workflow release object.
|
145
|
+
|
146
|
+
:rtype: bool
|
130
147
|
"""
|
131
148
|
return (
|
132
149
|
(data in self.queue)
|
@@ -135,23 +152,28 @@ class WorkflowQueue:
|
|
135
152
|
)
|
136
153
|
|
137
154
|
def push_queue(self, data: WorkflowRelease) -> Self:
|
155
|
+
"""Push data to the queue."""
|
138
156
|
heappush(self.queue, data)
|
139
157
|
return self
|
140
158
|
|
141
159
|
def push_running(self, data: WorkflowRelease) -> Self:
|
160
|
+
"""Push data to the running."""
|
142
161
|
heappush(self.running, data)
|
143
162
|
return self
|
144
163
|
|
145
164
|
def remove_running(self, data: WorkflowRelease) -> Self:
|
165
|
+
"""Remove data on the running if it exists."""
|
146
166
|
if data in self.running:
|
147
167
|
self.running.remove(data)
|
148
168
|
|
149
169
|
|
150
170
|
class Workflow(BaseModel):
|
151
|
-
"""Workflow Pydantic
|
152
|
-
|
153
|
-
|
154
|
-
|
171
|
+
"""Workflow Pydantic model.
|
172
|
+
|
173
|
+
This is the main future of this project because it use to be workflow
|
174
|
+
data for running everywhere that you want or using it to scheduler task in
|
175
|
+
background. It use lightweight coding line from Pydantic Model and enhance
|
176
|
+
execute method on it.
|
155
177
|
"""
|
156
178
|
|
157
179
|
name: str = Field(description="A workflow name.")
|
@@ -184,9 +206,12 @@ class Workflow(BaseModel):
|
|
184
206
|
an input workflow name. The loader object will use this workflow name to
|
185
207
|
searching configuration data of this workflow model in conf path.
|
186
208
|
|
209
|
+
:raise ValueError: If the type does not match with current object.
|
210
|
+
|
187
211
|
:param name: A workflow name that want to pass to Loader object.
|
188
212
|
:param externals: An external parameters that want to pass to Loader
|
189
213
|
object.
|
214
|
+
|
190
215
|
:rtype: Self
|
191
216
|
"""
|
192
217
|
loader: Loader = Loader(name, externals=(externals or {}))
|
@@ -235,7 +260,7 @@ class Workflow(BaseModel):
|
|
235
260
|
|
236
261
|
@model_validator(mode="before")
|
237
262
|
def __prepare_model_before__(cls, values: DictData) -> DictData:
|
238
|
-
"""Prepare the params key."""
|
263
|
+
"""Prepare the params key in the data model before validating."""
|
239
264
|
# NOTE: Prepare params type if it passing with only type value.
|
240
265
|
if params := values.pop("params", {}):
|
241
266
|
values["params"] = {
|
@@ -260,7 +285,15 @@ class Workflow(BaseModel):
|
|
260
285
|
@field_validator("on", mode="after")
|
261
286
|
def __on_no_dup__(cls, value: list[On]) -> list[On]:
|
262
287
|
"""Validate the on fields should not contain duplicate values and if it
|
263
|
-
contain every minute value
|
288
|
+
contain the every minute value more than one value, it will remove to
|
289
|
+
only one value.
|
290
|
+
|
291
|
+
:raise ValueError: If it has some duplicate value.
|
292
|
+
|
293
|
+
:param value: A list of on object.
|
294
|
+
|
295
|
+
:rtype: list[On]
|
296
|
+
"""
|
264
297
|
set_ons: set[str] = {str(on.cronjob) for on in value}
|
265
298
|
if len(set_ons) != len(value):
|
266
299
|
raise ValueError(
|
@@ -279,6 +312,9 @@ class Workflow(BaseModel):
|
|
279
312
|
def __validate_jobs_need__(self) -> Self:
|
280
313
|
"""Validate each need job in any jobs should exists.
|
281
314
|
|
315
|
+
:raise WorkflowException: If it has not exists need value in this
|
316
|
+
workflow job.
|
317
|
+
|
282
318
|
:rtype: Self
|
283
319
|
"""
|
284
320
|
for job in self.jobs:
|
@@ -623,8 +659,7 @@ class Workflow(BaseModel):
|
|
623
659
|
# WARNING: This poking method does not allow to use fail-fast
|
624
660
|
# logic to catching parallel execution result.
|
625
661
|
for future in as_completed(futures):
|
626
|
-
|
627
|
-
results.append(rs.set_parent_run_id(run_id))
|
662
|
+
results.append(future.result().set_parent_run_id(run_id))
|
628
663
|
|
629
664
|
while len(workflow_queue.running) > 0: # pragma: no cov
|
630
665
|
logger.warning(
|
@@ -639,17 +674,19 @@ class Workflow(BaseModel):
|
|
639
674
|
self,
|
640
675
|
job_id: str,
|
641
676
|
params: DictData,
|
642
|
-
run_id: str | None = None,
|
643
677
|
*,
|
678
|
+
run_id: str | None = None,
|
644
679
|
raise_error: bool = True,
|
645
680
|
) -> Result:
|
646
|
-
"""
|
647
|
-
|
681
|
+
"""Job execution with passing dynamic parameters from the main workflow
|
682
|
+
execution to the target job object via job's ID.
|
648
683
|
|
649
684
|
This execution is the minimum level of execution of this workflow
|
650
685
|
model. It different with ``self.execute`` because this method run only
|
651
686
|
one job and return with context of this job data.
|
652
687
|
|
688
|
+
:raise NotImplementedError: If set raise_error argument to False.
|
689
|
+
|
653
690
|
:param job_id: A job ID that want to execute.
|
654
691
|
:param params: A params that was parameterized from workflow execution.
|
655
692
|
:param run_id: A workflow running ID for this job execution.
|
@@ -663,11 +700,11 @@ class Workflow(BaseModel):
|
|
663
700
|
# VALIDATE: check a job ID that exists in this workflow or not.
|
664
701
|
if job_id not in self.jobs:
|
665
702
|
raise WorkflowException(
|
666
|
-
f"The job
|
703
|
+
f"The job: {job_id!r} does not exists in {self.name!r} "
|
667
704
|
f"workflow."
|
668
705
|
)
|
669
706
|
|
670
|
-
logger.info(f"({run_id}) [WORKFLOW]: Start execute: {job_id!r}")
|
707
|
+
logger.info(f"({run_id}) [WORKFLOW]: Start execute job: {job_id!r}")
|
671
708
|
|
672
709
|
# IMPORTANT:
|
673
710
|
# Change any job running IDs to this workflow running ID.
|
@@ -686,17 +723,18 @@ class Workflow(BaseModel):
|
|
686
723
|
raise WorkflowException(
|
687
724
|
f"Get job execution error {job_id}: JobException: {err}"
|
688
725
|
) from None
|
689
|
-
|
690
|
-
|
726
|
+
raise NotImplementedError(
|
727
|
+
"Handle error from the job execution does not support yet."
|
728
|
+
) from None
|
691
729
|
|
692
730
|
return Result(status=0, context=params).set_run_id(run_id)
|
693
731
|
|
694
732
|
def execute(
|
695
733
|
self,
|
696
734
|
params: DictData,
|
697
|
-
run_id: str | None = None,
|
698
735
|
*,
|
699
|
-
|
736
|
+
run_id: str | None = None,
|
737
|
+
timeout: int = 0,
|
700
738
|
) -> Result:
|
701
739
|
"""Execute workflow with passing a dynamic parameters to all jobs that
|
702
740
|
included in this workflow model with ``jobs`` field.
|
@@ -712,13 +750,13 @@ class Workflow(BaseModel):
|
|
712
750
|
|
713
751
|
:param params: An input parameters that use on workflow execution that
|
714
752
|
will parameterize before using it. Default is None.
|
715
|
-
:type params: DictData
|
753
|
+
:type params: DictData
|
754
|
+
|
716
755
|
:param run_id: A workflow running ID for this job execution.
|
717
|
-
:type run_id: str | None
|
756
|
+
:type run_id: str | None (default: None)
|
718
757
|
:param timeout: A workflow execution time out in second unit that use
|
719
|
-
for limit time of execution and waiting job dependency.
|
720
|
-
|
721
|
-
:type timeout: int
|
758
|
+
for limit time of execution and waiting job dependency.
|
759
|
+
:type timeout: int (default: 0)
|
722
760
|
|
723
761
|
:rtype: Result
|
724
762
|
"""
|
@@ -769,17 +807,16 @@ class Workflow(BaseModel):
|
|
769
807
|
context=context,
|
770
808
|
ts=ts,
|
771
809
|
job_queue=jq,
|
772
|
-
worker=config.max_job_parallel,
|
773
810
|
timeout=timeout,
|
774
811
|
)
|
775
812
|
except WorkflowException as err:
|
813
|
+
status: int = 1
|
776
814
|
context.update(
|
777
815
|
{
|
778
816
|
"error": err,
|
779
817
|
"error_message": f"{err.__class__.__name__}: {err}",
|
780
818
|
},
|
781
819
|
)
|
782
|
-
status = 1
|
783
820
|
return rs.catch(status=status, context=context)
|
784
821
|
|
785
822
|
def __exec_threading(
|
@@ -789,10 +826,10 @@ class Workflow(BaseModel):
|
|
789
826
|
ts: float,
|
790
827
|
job_queue: Queue,
|
791
828
|
*,
|
792
|
-
|
793
|
-
|
829
|
+
timeout: int = 0,
|
830
|
+
thread_timeout: int = 1800,
|
794
831
|
) -> DictData:
|
795
|
-
"""Workflow execution by threading strategy.
|
832
|
+
"""Workflow execution by threading strategy that use multithreading.
|
796
833
|
|
797
834
|
If a job need dependency, it will check dependency job ID from
|
798
835
|
context data before allow it run.
|
@@ -802,22 +839,26 @@ class Workflow(BaseModel):
|
|
802
839
|
timeout.
|
803
840
|
:param job_queue: A job queue object.
|
804
841
|
:param timeout: A second value unit that bounding running time.
|
805
|
-
:param
|
842
|
+
:param thread_timeout: A timeout to waiting all futures complete.
|
843
|
+
|
806
844
|
:rtype: DictData
|
807
845
|
"""
|
808
|
-
|
846
|
+
not_timeout_flag: bool = True
|
847
|
+
timeout: int = timeout or config.max_job_exec_timeout
|
809
848
|
logger.debug(
|
810
|
-
f"({run_id})
|
811
|
-
f"executor"
|
849
|
+
f"({run_id}) [WORKFLOW]: Run {self.name} with threading executor."
|
812
850
|
)
|
813
851
|
|
814
852
|
# IMPORTANT: The job execution can run parallel and waiting by
|
815
853
|
# needed.
|
816
|
-
with ThreadPoolExecutor(
|
854
|
+
with ThreadPoolExecutor(
|
855
|
+
max_workers=config.max_job_parallel,
|
856
|
+
thread_name_prefix="workflow_exec_threading_",
|
857
|
+
) as executor:
|
817
858
|
futures: list[Future] = []
|
818
859
|
|
819
860
|
while not job_queue.empty() and (
|
820
|
-
|
861
|
+
not_timeout_flag := ((time.monotonic() - ts) < timeout)
|
821
862
|
):
|
822
863
|
job_id: str = job_queue.get()
|
823
864
|
job: Job = self.jobs[job_id]
|
@@ -846,29 +887,31 @@ class Workflow(BaseModel):
|
|
846
887
|
# NOTE: Mark this job queue done.
|
847
888
|
job_queue.task_done()
|
848
889
|
|
849
|
-
|
850
|
-
job_queue.join()
|
890
|
+
if not_timeout_flag:
|
851
891
|
|
852
|
-
|
853
|
-
|
854
|
-
|
855
|
-
|
856
|
-
|
857
|
-
future.
|
858
|
-
|
859
|
-
|
860
|
-
|
861
|
-
|
862
|
-
|
863
|
-
|
864
|
-
|
892
|
+
# NOTE: Wait for all items to finish processing by `task_done()`
|
893
|
+
# method.
|
894
|
+
job_queue.join()
|
895
|
+
|
896
|
+
for future in as_completed(futures, timeout=thread_timeout):
|
897
|
+
if err := future.exception():
|
898
|
+
logger.error(f"({run_id}) [WORKFLOW]: {err}")
|
899
|
+
raise WorkflowException(f"{err}")
|
900
|
+
|
901
|
+
# NOTE: This getting result does not do anything.
|
902
|
+
future.result()
|
903
|
+
|
904
|
+
return context
|
905
|
+
|
906
|
+
for future in futures:
|
907
|
+
future.cancel()
|
865
908
|
|
866
909
|
# NOTE: Raise timeout error.
|
867
|
-
logger.warning(
|
868
|
-
f"({run_id}) [WORKFLOW]: Execution of workflow, {self.name!r} "
|
869
|
-
f"
|
910
|
+
logger.warning(
|
911
|
+
f"({run_id}) [WORKFLOW]: Execution of workflow, {self.name!r}, "
|
912
|
+
f"was timeout"
|
870
913
|
)
|
871
|
-
raise WorkflowException(
|
914
|
+
raise WorkflowException(
|
872
915
|
f"Execution of workflow: {self.name} was timeout"
|
873
916
|
)
|
874
917
|
|
@@ -879,7 +922,7 @@ class Workflow(BaseModel):
|
|
879
922
|
ts: float,
|
880
923
|
job_queue: Queue,
|
881
924
|
*,
|
882
|
-
timeout: int =
|
925
|
+
timeout: int = 0,
|
883
926
|
) -> DictData:
|
884
927
|
"""Workflow execution with non-threading strategy that use sequential
|
885
928
|
job running and waiting previous job was run successful.
|
@@ -891,16 +934,18 @@ class Workflow(BaseModel):
|
|
891
934
|
:param ts: A start timestamp that use for checking execute time should
|
892
935
|
timeout.
|
893
936
|
:param timeout: A second value unit that bounding running time.
|
937
|
+
|
894
938
|
:rtype: DictData
|
895
939
|
"""
|
896
|
-
|
940
|
+
not_timeout_flag: bool = True
|
941
|
+
timeout: int = timeout or config.max_job_exec_timeout
|
897
942
|
logger.debug(
|
898
|
-
f"({run_id}) [WORKFLOW]: Run {self.name} with non-threading
|
899
|
-
f"executor"
|
943
|
+
f"({run_id}) [WORKFLOW]: Run {self.name} with non-threading "
|
944
|
+
f"executor."
|
900
945
|
)
|
901
946
|
|
902
947
|
while not job_queue.empty() and (
|
903
|
-
|
948
|
+
not_timeout_flag := ((time.monotonic() - ts) < timeout)
|
904
949
|
):
|
905
950
|
job_id: str = job_queue.get()
|
906
951
|
job: Job = self.jobs[job_id]
|
@@ -909,7 +954,7 @@ class Workflow(BaseModel):
|
|
909
954
|
if any(need not in context["jobs"] for need in job.needs):
|
910
955
|
job_queue.task_done()
|
911
956
|
job_queue.put(job_id)
|
912
|
-
time.sleep(0.
|
957
|
+
time.sleep(0.075)
|
913
958
|
continue
|
914
959
|
|
915
960
|
# NOTE: Start workflow job execution with deep copy context data
|
@@ -925,27 +970,29 @@ class Workflow(BaseModel):
|
|
925
970
|
# NOTE: Mark this job queue done.
|
926
971
|
job_queue.task_done()
|
927
972
|
|
928
|
-
|
929
|
-
|
973
|
+
if not_timeout_flag:
|
974
|
+
|
975
|
+
# NOTE: Wait for all items to finish processing by `task_done()`
|
976
|
+
# method.
|
977
|
+
job_queue.join()
|
930
978
|
|
931
|
-
if not_time_out_flag:
|
932
979
|
return context
|
933
980
|
|
934
981
|
# NOTE: Raise timeout error.
|
935
|
-
logger.warning(
|
982
|
+
logger.warning(
|
936
983
|
f"({run_id}) [WORKFLOW]: Execution of workflow was timeout"
|
937
984
|
)
|
938
|
-
raise WorkflowException(
|
985
|
+
raise WorkflowException(
|
939
986
|
f"Execution of workflow: {self.name} was timeout"
|
940
987
|
)
|
941
988
|
|
942
989
|
|
943
990
|
@dataclass(config=ConfigDict(arbitrary_types_allowed=True))
|
944
991
|
class WorkflowTaskData:
|
945
|
-
"""Workflow task dataclass that use to keep mapping data and
|
946
|
-
passing
|
992
|
+
"""Workflow task Pydantic dataclass object that use to keep mapping data and
|
993
|
+
workflow model for passing to the multithreading task.
|
947
994
|
|
948
|
-
This dataclass
|
995
|
+
This dataclass object is mapping 1-to-1 with workflow and cron runner
|
949
996
|
objects.
|
950
997
|
"""
|
951
998
|
|
@@ -1075,7 +1122,8 @@ class WorkflowTaskData:
|
|
1075
1122
|
# NOTE: Queue next release date.
|
1076
1123
|
logger.debug(f"[CORE]: {'-' * 100}")
|
1077
1124
|
|
1078
|
-
def __eq__(self, other) -> bool:
|
1125
|
+
def __eq__(self, other: WorkflowTaskData) -> bool:
|
1126
|
+
"""Override equal property that will compare only the same type."""
|
1079
1127
|
if isinstance(other, WorkflowTaskData):
|
1080
1128
|
return (
|
1081
1129
|
self.workflow.name == other.workflow.name
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.21
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -195,6 +195,7 @@ and do not raise any error to you.
|
|
195
195
|
| `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
|
196
196
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
197
197
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
198
|
+
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
198
199
|
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
199
200
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
200
201
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
@@ -1,22 +1,22 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=
|
1
|
+
ddeutil/workflow/__about__.py,sha256=at-1IOQn4CID6hNRKtCzRBtCjVNyizp3IXd27XWqHPQ,28
|
2
2
|
ddeutil/workflow/__cron.py,sha256=_2P9nmGOwGdv5bLgf9TpML2HBgqLv_qRgiO1Rulo1PA,26693
|
3
3
|
ddeutil/workflow/__init__.py,sha256=DCSN0foPFlFLN_Q4uoWa_EBBlKeMHXGpOdr-lWHISrQ,1422
|
4
|
-
ddeutil/workflow/__types.py,sha256=
|
4
|
+
ddeutil/workflow/__types.py,sha256=Ia7f38kvL3NibwmRKi0wQ1ud_45Z-SojYGhNJwIqcu8,3713
|
5
5
|
ddeutil/workflow/api.py,sha256=vUT2RVS9sF3hvY-IrzAEnahxwq4ZFYP0G3xfctHbNsw,4701
|
6
6
|
ddeutil/workflow/cli.py,sha256=baHhvtI8snbHYHeThoX401Cd6SMB2boyyCbCtTrIl3E,3278
|
7
|
-
ddeutil/workflow/conf.py,sha256=
|
7
|
+
ddeutil/workflow/conf.py,sha256=KSwEHlZ_2I-bWGNKllDvkxNLy1WdHjUGxGH45_A3K5w,15717
|
8
8
|
ddeutil/workflow/exceptions.py,sha256=NqnQJP52S59XIYMeXbTDbr4xH2UZ5EA3ejpU5Z4g6cQ,894
|
9
|
-
ddeutil/workflow/job.py,sha256=
|
9
|
+
ddeutil/workflow/job.py,sha256=jj8f8SyFD6GQyTZnxhviiDGv2ELVybMmVDDoxORoy1A,23875
|
10
10
|
ddeutil/workflow/on.py,sha256=3Typ9YS2303LTijGK4ytN38ZLC0Gyq55HtFd0lm97Ic,7391
|
11
11
|
ddeutil/workflow/repeat.py,sha256=s0azh-f5JQeow7kpxM8GKlqgAmKL7oU6St3L4Ggx4cY,4925
|
12
12
|
ddeutil/workflow/route.py,sha256=JALwOH6xKu5rnII7DgA1Lbp_E5ehCoBbOW_eKqB_Olk,6753
|
13
13
|
ddeutil/workflow/scheduler.py,sha256=B2uXsqzmp32nIbya8EDePYyRhpwcxCMeoibPABCuMOA,18750
|
14
|
-
ddeutil/workflow/stage.py,sha256=
|
14
|
+
ddeutil/workflow/stage.py,sha256=Tzq-ciMZXKNUZ4cH4djyPIZ8aqj_P1Qm5zYZqYF8bDM,26301
|
15
15
|
ddeutil/workflow/utils.py,sha256=IUTj7c6Jsi1oNHP7inLpv1TYhAA44lEMU1n5nNa1-bk,25657
|
16
|
-
ddeutil/workflow/workflow.py,sha256=
|
17
|
-
ddeutil_workflow-0.0.
|
18
|
-
ddeutil_workflow-0.0.
|
19
|
-
ddeutil_workflow-0.0.
|
20
|
-
ddeutil_workflow-0.0.
|
21
|
-
ddeutil_workflow-0.0.
|
22
|
-
ddeutil_workflow-0.0.
|
16
|
+
ddeutil/workflow/workflow.py,sha256=w800yMcajcIxdhWXgmqtPYGiiU2ftwsjyqqqnh-1-7o,38405
|
17
|
+
ddeutil_workflow-0.0.21.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
18
|
+
ddeutil_workflow-0.0.21.dist-info/METADATA,sha256=7JQpnRPdK6pOi2tzqK8_dquR5LHSlD0OP8agmVxCEUg,13800
|
19
|
+
ddeutil_workflow-0.0.21.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
20
|
+
ddeutil_workflow-0.0.21.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
|
21
|
+
ddeutil_workflow-0.0.21.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
22
|
+
ddeutil_workflow-0.0.21.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|