ddeutil-workflow 0.0.21__py3-none-any.whl → 0.0.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/conf.py +4 -0
- ddeutil/workflow/job.py +26 -16
- ddeutil/workflow/on.py +3 -0
- ddeutil/workflow/stage.py +17 -9
- ddeutil/workflow/utils.py +18 -2
- ddeutil/workflow/workflow.py +188 -112
- {ddeutil_workflow-0.0.21.dist-info → ddeutil_workflow-0.0.22.dist-info}/METADATA +2 -1
- ddeutil_workflow-0.0.22.dist-info/RECORD +22 -0
- ddeutil_workflow-0.0.21.dist-info/RECORD +0 -22
- {ddeutil_workflow-0.0.21.dist-info → ddeutil_workflow-0.0.22.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.21.dist-info → ddeutil_workflow-0.0.22.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.21.dist-info → ddeutil_workflow-0.0.22.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.21.dist-info → ddeutil_workflow-0.0.22.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.22"
|
ddeutil/workflow/conf.py
CHANGED
@@ -106,6 +106,9 @@ class Config:
|
|
106
106
|
max_poking_pool_worker: int = int(
|
107
107
|
os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
|
108
108
|
)
|
109
|
+
max_on_per_workflow: int = int(
|
110
|
+
env("WORKFLOW_CORE_MAX_ON_PER_WORKFLOW", "5")
|
111
|
+
)
|
109
112
|
|
110
113
|
# NOTE: Schedule App
|
111
114
|
max_schedule_process: int = int(env("WORKFLOW_APP_MAX_PROCESS", "2"))
|
@@ -462,6 +465,7 @@ class FileLog(BaseLog):
|
|
462
465
|
|
463
466
|
:param excluded: An excluded list of key name that want to pass in the
|
464
467
|
model_dump method.
|
468
|
+
|
465
469
|
:rtype: Self
|
466
470
|
"""
|
467
471
|
# NOTE: Check environ variable was set for real writing.
|
ddeutil/workflow/job.py
CHANGED
@@ -22,7 +22,7 @@ from enum import Enum
|
|
22
22
|
from functools import lru_cache
|
23
23
|
from textwrap import dedent
|
24
24
|
from threading import Event
|
25
|
-
from typing import Optional, Union
|
25
|
+
from typing import Any, Optional, Union
|
26
26
|
|
27
27
|
from ddeutil.core import freeze_args
|
28
28
|
from pydantic import BaseModel, Field
|
@@ -40,6 +40,7 @@ from .stage import Stage
|
|
40
40
|
from .utils import (
|
41
41
|
Result,
|
42
42
|
cross_product,
|
43
|
+
cut_id,
|
43
44
|
dash2underscore,
|
44
45
|
filter_func,
|
45
46
|
gen_id,
|
@@ -346,6 +347,13 @@ class Job(BaseModel):
|
|
346
347
|
return stage
|
347
348
|
raise ValueError(f"Stage ID {stage_id} does not exists")
|
348
349
|
|
350
|
+
def check_needs(self, jobs: dict[str, Any]) -> bool:
|
351
|
+
"""Return True if job's need exists in an input list of job's ID.
|
352
|
+
|
353
|
+
:rtype: bool
|
354
|
+
"""
|
355
|
+
return all(need in jobs for need in self.needs)
|
356
|
+
|
349
357
|
def set_outputs(self, output: DictData, to: DictData) -> DictData:
|
350
358
|
"""Set an outputs from execution process to the receive context. The
|
351
359
|
result from execution will pass to value of ``strategies`` key.
|
@@ -427,6 +435,7 @@ class Job(BaseModel):
|
|
427
435
|
"""
|
428
436
|
run_id: str = run_id or gen_id(self.id or "", unique=True)
|
429
437
|
strategy_id: str = gen_id(strategy)
|
438
|
+
rs: Result = Result(run_id=run_id)
|
430
439
|
|
431
440
|
# PARAGRAPH:
|
432
441
|
#
|
@@ -447,14 +456,18 @@ class Job(BaseModel):
|
|
447
456
|
for stage in self.stages:
|
448
457
|
|
449
458
|
if stage.is_skipped(params=context):
|
450
|
-
logger.info(
|
459
|
+
logger.info(
|
460
|
+
f"({cut_id(run_id)}) [JOB]: Skip stage: {stage.iden!r}"
|
461
|
+
)
|
451
462
|
continue
|
452
463
|
|
453
|
-
logger.info(
|
464
|
+
logger.info(
|
465
|
+
f"({cut_id(run_id)}) [JOB]: Execute stage: {stage.iden!r}"
|
466
|
+
)
|
454
467
|
|
455
468
|
# NOTE: Logging a matrix that pass on this stage execution.
|
456
469
|
if strategy:
|
457
|
-
logger.info(f"({run_id}) [JOB]: ... Matrix: {strategy}")
|
470
|
+
logger.info(f"({cut_id(run_id)}) [JOB]: ... Matrix: {strategy}")
|
458
471
|
|
459
472
|
# NOTE: Force stop this execution if event was set from main
|
460
473
|
# execution.
|
@@ -463,7 +476,7 @@ class Job(BaseModel):
|
|
463
476
|
"Job strategy was canceled from event that had set before "
|
464
477
|
"strategy execution."
|
465
478
|
)
|
466
|
-
return
|
479
|
+
return rs.catch(
|
467
480
|
status=1,
|
468
481
|
context={
|
469
482
|
strategy_id: {
|
@@ -478,7 +491,6 @@ class Job(BaseModel):
|
|
478
491
|
"error_message": error_msg,
|
479
492
|
},
|
480
493
|
},
|
481
|
-
run_id=run_id,
|
482
494
|
)
|
483
495
|
|
484
496
|
# PARAGRAPH:
|
@@ -506,14 +518,14 @@ class Job(BaseModel):
|
|
506
518
|
)
|
507
519
|
except (StageException, UtilException) as err:
|
508
520
|
logger.error(
|
509
|
-
f"({run_id}) [JOB]: {err.__class__.__name__}: {err}"
|
521
|
+
f"({cut_id(run_id)}) [JOB]: {err.__class__.__name__}: {err}"
|
510
522
|
)
|
511
523
|
if config.job_raise_error:
|
512
524
|
raise JobException(
|
513
525
|
f"Get stage execution error: {err.__class__.__name__}: "
|
514
526
|
f"{err}"
|
515
527
|
) from None
|
516
|
-
return
|
528
|
+
return rs.catch(
|
517
529
|
status=1,
|
518
530
|
context={
|
519
531
|
strategy_id: {
|
@@ -523,13 +535,12 @@ class Job(BaseModel):
|
|
523
535
|
"error_message": f"{err.__class__.__name__}: {err}",
|
524
536
|
},
|
525
537
|
},
|
526
|
-
run_id=run_id,
|
527
538
|
)
|
528
539
|
|
529
540
|
# NOTE: Remove the current stage object for saving memory.
|
530
541
|
del stage
|
531
542
|
|
532
|
-
return
|
543
|
+
return rs.catch(
|
533
544
|
status=0,
|
534
545
|
context={
|
535
546
|
strategy_id: {
|
@@ -537,7 +548,6 @@ class Job(BaseModel):
|
|
537
548
|
"stages": filter_func(context.pop("stages", {})),
|
538
549
|
},
|
539
550
|
},
|
540
|
-
run_id=run_id,
|
541
551
|
)
|
542
552
|
|
543
553
|
def execute(self, params: DictData, run_id: str | None = None) -> Result:
|
@@ -619,7 +629,7 @@ class Job(BaseModel):
|
|
619
629
|
|
620
630
|
:rtype: Result
|
621
631
|
"""
|
622
|
-
rs_final: Result = Result()
|
632
|
+
rs_final: Result = Result(run_id=run_id)
|
623
633
|
context: DictData = {}
|
624
634
|
status: int = 0
|
625
635
|
|
@@ -631,7 +641,7 @@ class Job(BaseModel):
|
|
631
641
|
nd: str = (
|
632
642
|
f", the strategies do not run is {not_done}" if not_done else ""
|
633
643
|
)
|
634
|
-
logger.debug(f"({run_id}) [JOB]: Strategy is set Fail Fast{nd}")
|
644
|
+
logger.debug(f"({cut_id(run_id)}) [JOB]: Strategy is set Fail Fast{nd}")
|
635
645
|
|
636
646
|
# NOTE:
|
637
647
|
# Stop all running tasks with setting the event manager and cancel
|
@@ -649,7 +659,7 @@ class Job(BaseModel):
|
|
649
659
|
if err := future.exception():
|
650
660
|
status: int = 1
|
651
661
|
logger.error(
|
652
|
-
f"({run_id}) [JOB]: Fail-fast catching:\n\t"
|
662
|
+
f"({cut_id(run_id)}) [JOB]: Fail-fast catching:\n\t"
|
653
663
|
f"{future.exception()}"
|
654
664
|
)
|
655
665
|
context.update(
|
@@ -680,7 +690,7 @@ class Job(BaseModel):
|
|
680
690
|
|
681
691
|
:rtype: Result
|
682
692
|
"""
|
683
|
-
rs_final: Result = Result()
|
693
|
+
rs_final: Result = Result(run_id=run_id)
|
684
694
|
context: DictData = {}
|
685
695
|
status: int = 0
|
686
696
|
|
@@ -690,7 +700,7 @@ class Job(BaseModel):
|
|
690
700
|
except JobException as err:
|
691
701
|
status = 1
|
692
702
|
logger.error(
|
693
|
-
f"({run_id}) [JOB]: All-completed catching:\n\t"
|
703
|
+
f"({cut_id(run_id)}) [JOB]: All-completed catching:\n\t"
|
694
704
|
f"{err.__class__.__name__}:\n\t{err}"
|
695
705
|
)
|
696
706
|
context.update(
|
ddeutil/workflow/on.py
CHANGED
ddeutil/workflow/stage.py
CHANGED
@@ -55,6 +55,7 @@ from .utils import (
|
|
55
55
|
Registry,
|
56
56
|
Result,
|
57
57
|
TagFunc,
|
58
|
+
cut_id,
|
58
59
|
gen_id,
|
59
60
|
make_exec,
|
60
61
|
make_registry,
|
@@ -124,13 +125,16 @@ def handler_result(message: str | None = None) -> DecoratorResult:
|
|
124
125
|
run_id: str = gen_id(self.name + (self.id or ""), unique=True)
|
125
126
|
kwargs["run_id"] = run_id
|
126
127
|
|
128
|
+
rs_raise: Result = Result(status=1, run_id=run_id)
|
129
|
+
|
127
130
|
try:
|
128
131
|
# NOTE: Start calling origin function with a passing args.
|
129
132
|
return func(self, *args, **kwargs)
|
130
133
|
except Exception as err:
|
131
134
|
# NOTE: Start catching error from the stage execution.
|
132
135
|
logger.error(
|
133
|
-
f"({run_id}) [STAGE]: {err.__class__.__name__}:
|
136
|
+
f"({cut_id(run_id)}) [STAGE]: {err.__class__.__name__}: "
|
137
|
+
f"{err}"
|
134
138
|
)
|
135
139
|
if config.stage_raise_error:
|
136
140
|
# NOTE: If error that raise from stage execution course by
|
@@ -147,13 +151,12 @@ def handler_result(message: str | None = None) -> DecoratorResult:
|
|
147
151
|
|
148
152
|
# NOTE: Catching exception error object to result with
|
149
153
|
# error_message and error keys.
|
150
|
-
return
|
154
|
+
return rs_raise.catch(
|
151
155
|
status=1,
|
152
156
|
context={
|
153
157
|
"error": err,
|
154
158
|
"error_message": f"{err.__class__.__name__}: {err}",
|
155
159
|
},
|
156
|
-
run_id=run_id,
|
157
160
|
)
|
158
161
|
|
159
162
|
return wrapped
|
@@ -339,7 +342,7 @@ class EmptyStage(BaseStage):
|
|
339
342
|
:rtype: Result
|
340
343
|
"""
|
341
344
|
logger.info(
|
342
|
-
f"({run_id}) [STAGE]: Empty-Execute: {self.name!r}: "
|
345
|
+
f"({cut_id(run_id)}) [STAGE]: Empty-Execute: {self.name!r}: "
|
343
346
|
f"( {param2template(self.echo, params=params) or '...'} )"
|
344
347
|
)
|
345
348
|
if self.sleep > 0:
|
@@ -393,7 +396,9 @@ class BashStage(BaseStage):
|
|
393
396
|
f_name: str = f"{run_id}.sh"
|
394
397
|
f_shebang: str = "bash" if sys.platform.startswith("win") else "sh"
|
395
398
|
|
396
|
-
logger.debug(
|
399
|
+
logger.debug(
|
400
|
+
f"({cut_id(run_id)}) [STAGE]: Start create `{f_name}` file."
|
401
|
+
)
|
397
402
|
|
398
403
|
with open(f"./{f_name}", mode="w", newline="\n") as f:
|
399
404
|
# NOTE: write header of `.sh` file
|
@@ -425,7 +430,7 @@ class BashStage(BaseStage):
|
|
425
430
|
"""
|
426
431
|
bash: str = param2template(dedent(self.bash), params)
|
427
432
|
|
428
|
-
logger.info(f"({run_id}) [STAGE]: Shell-Execute: {self.name}")
|
433
|
+
logger.info(f"({cut_id(run_id)}) [STAGE]: Shell-Execute: {self.name}")
|
429
434
|
with self.create_sh_file(
|
430
435
|
bash=bash, env=param2template(self.env, params), run_id=run_id
|
431
436
|
) as sh:
|
@@ -535,7 +540,7 @@ class PyStage(BaseStage):
|
|
535
540
|
lc: DictData = {}
|
536
541
|
|
537
542
|
# NOTE: Start exec the run statement.
|
538
|
-
logger.info(f"({run_id}) [STAGE]: Py-Execute: {self.name}")
|
543
|
+
logger.info(f"({cut_id(run_id)}) [STAGE]: Py-Execute: {self.name}")
|
539
544
|
|
540
545
|
# WARNING: The exec build-in function is vary dangerous. So, it
|
541
546
|
# should us the re module to validate exec-string before running.
|
@@ -660,7 +665,8 @@ class HookStage(BaseStage):
|
|
660
665
|
args[k] = args.pop(k.removeprefix("_"))
|
661
666
|
|
662
667
|
logger.info(
|
663
|
-
f"({run_id}) [STAGE]: Hook-Execute:
|
668
|
+
f"({cut_id(run_id)}) [STAGE]: Hook-Execute: "
|
669
|
+
f"{t_func.name}@{t_func.tag}"
|
664
670
|
)
|
665
671
|
rs: DictData = t_func(**param2template(args, params))
|
666
672
|
|
@@ -716,7 +722,9 @@ class TriggerStage(BaseStage):
|
|
716
722
|
# NOTE: Set running workflow ID from running stage ID to external
|
717
723
|
# params on Loader object.
|
718
724
|
wf: Workflow = Workflow.from_loader(name=_trigger)
|
719
|
-
logger.info(
|
725
|
+
logger.info(
|
726
|
+
f"({cut_id(run_id)}) [STAGE]: Trigger-Execute: {_trigger!r}"
|
727
|
+
)
|
720
728
|
return wf.execute(
|
721
729
|
params=param2template(self.params, params),
|
722
730
|
run_id=run_id,
|
ddeutil/workflow/utils.py
CHANGED
@@ -430,7 +430,7 @@ class Result:
|
|
430
430
|
return self
|
431
431
|
|
432
432
|
|
433
|
-
def make_exec(path: str | Path) -> None:
|
433
|
+
def make_exec(path: str | Path) -> None:
|
434
434
|
"""Change mode of file to be executable file.
|
435
435
|
|
436
436
|
:param path: A file path that want to make executable permission.
|
@@ -451,7 +451,9 @@ FILTERS: dict[str, callable] = { # pragma: no cov
|
|
451
451
|
|
452
452
|
|
453
453
|
class FilterFunc(Protocol):
|
454
|
-
"""Tag Function Protocol
|
454
|
+
"""Tag Function Protocol. This protocol that use to represent any callable
|
455
|
+
object that able to access the name attribute.
|
456
|
+
"""
|
455
457
|
|
456
458
|
name: str
|
457
459
|
|
@@ -814,3 +816,17 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
|
|
814
816
|
|
815
817
|
def queue2str(queue: list[datetime]) -> Iterator[str]: # pragma: no cov
|
816
818
|
return (f"{q:%Y-%m-%d %H:%M:%S}" for q in queue)
|
819
|
+
|
820
|
+
|
821
|
+
def cut_id(run_id: str, *, num: int = 6):
|
822
|
+
"""Cutting running ID with length.
|
823
|
+
|
824
|
+
Example:
|
825
|
+
>>> cut_id(run_id='668931127320241228100331254567')
|
826
|
+
'254567'
|
827
|
+
|
828
|
+
:param run_id:
|
829
|
+
:param num:
|
830
|
+
:return:
|
831
|
+
"""
|
832
|
+
return run_id[-num:]
|
ddeutil/workflow/workflow.py
CHANGED
@@ -49,6 +49,7 @@ from .on import On
|
|
49
49
|
from .utils import (
|
50
50
|
Param,
|
51
51
|
Result,
|
52
|
+
cut_id,
|
52
53
|
delay,
|
53
54
|
gen_id,
|
54
55
|
get_diff_sec,
|
@@ -123,12 +124,38 @@ class WorkflowRelease:
|
|
123
124
|
|
124
125
|
@dataclass
|
125
126
|
class WorkflowQueue:
|
126
|
-
"""Workflow Queue object."""
|
127
|
+
"""Workflow Queue object that is management of WorkflowRelease objects."""
|
127
128
|
|
128
129
|
queue: list[WorkflowRelease] = field(default_factory=list)
|
129
130
|
running: list[WorkflowRelease] = field(default_factory=list)
|
130
131
|
complete: list[WorkflowRelease] = field(default_factory=list)
|
131
132
|
|
133
|
+
@classmethod
|
134
|
+
def from_list(
|
135
|
+
cls, queue: list[datetime] | list[WorkflowRelease] | None
|
136
|
+
) -> Self:
|
137
|
+
"""Construct WorkflowQueue object from an input queue value that passing
|
138
|
+
with list of datetime or list of WorkflowRelease.
|
139
|
+
|
140
|
+
:raise TypeError: If the type of an input queue does not valid.
|
141
|
+
|
142
|
+
:rtype: Self
|
143
|
+
"""
|
144
|
+
if queue is None:
|
145
|
+
return cls()
|
146
|
+
elif isinstance(queue, list):
|
147
|
+
|
148
|
+
if all(isinstance(q, datetime) for q in queue):
|
149
|
+
return cls(queue=[WorkflowRelease.from_dt(q) for q in queue])
|
150
|
+
|
151
|
+
elif all(isinstance(q, WorkflowRelease) for q in queue):
|
152
|
+
return cls(queue=queue)
|
153
|
+
|
154
|
+
raise TypeError(
|
155
|
+
"Type of the queue does not valid with WorkflowQueue "
|
156
|
+
"or list of datetime or list of WorkflowRelease."
|
157
|
+
)
|
158
|
+
|
132
159
|
@property
|
133
160
|
def is_queued(self) -> bool:
|
134
161
|
"""Return True if it has workflow release object in the queue.
|
@@ -137,34 +164,35 @@ class WorkflowQueue:
|
|
137
164
|
"""
|
138
165
|
return len(self.queue) > 0
|
139
166
|
|
140
|
-
def check_queue(self,
|
167
|
+
def check_queue(self, value: WorkflowRelease) -> bool:
|
141
168
|
"""Check a WorkflowRelease value already exists in list of tracking
|
142
169
|
queues.
|
143
170
|
|
144
|
-
:param
|
171
|
+
:param value: A WorkflowRelease object that want to check it already in
|
172
|
+
queues.
|
145
173
|
|
146
174
|
:rtype: bool
|
147
175
|
"""
|
148
176
|
return (
|
149
|
-
(
|
150
|
-
or (
|
151
|
-
or (
|
177
|
+
(value in self.queue)
|
178
|
+
or (value in self.running)
|
179
|
+
or (value in self.complete)
|
152
180
|
)
|
153
181
|
|
154
|
-
def push_queue(self,
|
182
|
+
def push_queue(self, value: WorkflowRelease) -> Self:
|
155
183
|
"""Push data to the queue."""
|
156
|
-
heappush(self.queue,
|
184
|
+
heappush(self.queue, value)
|
157
185
|
return self
|
158
186
|
|
159
|
-
def push_running(self,
|
187
|
+
def push_running(self, value: WorkflowRelease) -> Self:
|
160
188
|
"""Push data to the running."""
|
161
|
-
heappush(self.running,
|
189
|
+
heappush(self.running, value)
|
162
190
|
return self
|
163
191
|
|
164
|
-
def remove_running(self,
|
192
|
+
def remove_running(self, value: WorkflowRelease) -> Self:
|
165
193
|
"""Remove data on the running if it exists."""
|
166
|
-
if
|
167
|
-
self.running.remove(
|
194
|
+
if value in self.running:
|
195
|
+
self.running.remove(value)
|
168
196
|
|
169
197
|
|
170
198
|
class Workflow(BaseModel):
|
@@ -283,7 +311,7 @@ class Workflow(BaseModel):
|
|
283
311
|
return dedent(value)
|
284
312
|
|
285
313
|
@field_validator("on", mode="after")
|
286
|
-
def
|
314
|
+
def __on_no_dup_and_reach_limit__(cls, value: list[On]) -> list[On]:
|
287
315
|
"""Validate the on fields should not contain duplicate values and if it
|
288
316
|
contain the every minute value more than one value, it will remove to
|
289
317
|
only one value.
|
@@ -306,6 +334,12 @@ class Workflow(BaseModel):
|
|
306
334
|
# "If it has every minute cronjob on value, it should has only "
|
307
335
|
# "one value in the on field."
|
308
336
|
# )
|
337
|
+
|
338
|
+
if len(set_ons) > config.max_on_per_workflow:
|
339
|
+
raise ValueError(
|
340
|
+
f"The number of the on should not more than "
|
341
|
+
f"{config.max_on_per_workflow} crontab."
|
342
|
+
)
|
309
343
|
return value
|
310
344
|
|
311
345
|
@model_validator(mode="after")
|
@@ -404,22 +438,21 @@ class Workflow(BaseModel):
|
|
404
438
|
self,
|
405
439
|
release: datetime | WorkflowRelease,
|
406
440
|
params: DictData,
|
407
|
-
run_id: str | None = None,
|
408
441
|
*,
|
442
|
+
run_id: str | None = None,
|
409
443
|
log: type[Log] = None,
|
410
|
-
queue:
|
444
|
+
queue: (
|
445
|
+
WorkflowQueue | list[datetime] | list[WorkflowRelease] | None
|
446
|
+
) = None,
|
411
447
|
) -> Result:
|
412
448
|
"""Release the workflow execution with overriding parameter with the
|
413
449
|
release templating that include logical date (release date), execution
|
414
450
|
date, or running id to the params.
|
415
451
|
|
416
452
|
This method allow workflow use log object to save the execution
|
417
|
-
result to log destination like file log to local `/logs` directory.
|
418
|
-
|
419
|
-
I will add sleep with 0.15 seconds on every step that interact with
|
420
|
-
the queue object.
|
453
|
+
result to log destination like file log to the local `/logs` directory.
|
421
454
|
|
422
|
-
:param release: A release datetime.
|
455
|
+
:param release: A release datetime or WorkflowRelease object.
|
423
456
|
:param params: A workflow parameter that pass to execute method.
|
424
457
|
:param queue: A list of release time that already queue.
|
425
458
|
:param run_id: A workflow running ID for this release.
|
@@ -430,23 +463,23 @@ class Workflow(BaseModel):
|
|
430
463
|
"""
|
431
464
|
log: type[Log] = log or FileLog
|
432
465
|
run_id: str = run_id or gen_id(self.name, unique=True)
|
466
|
+
rs_release: Result = Result(run_id=run_id)
|
433
467
|
|
434
468
|
# VALIDATE: Change queue value to WorkflowQueue object.
|
435
|
-
if queue is None:
|
436
|
-
queue: WorkflowQueue = WorkflowQueue()
|
437
|
-
elif isinstance(queue, list):
|
438
|
-
queue: WorkflowQueue = WorkflowQueue(queue=queue)
|
469
|
+
if queue is None or isinstance(queue, list):
|
470
|
+
queue: WorkflowQueue = WorkflowQueue.from_list(queue)
|
439
471
|
|
440
472
|
# VALIDATE: Change release value to WorkflowRelease object.
|
441
473
|
if isinstance(release, datetime):
|
442
474
|
release: WorkflowRelease = WorkflowRelease.from_dt(release)
|
443
475
|
|
444
476
|
logger.debug(
|
445
|
-
f"({run_id}) [RELEASE]: {self.name!r} : "
|
446
|
-
f"
|
477
|
+
f"({cut_id(run_id)}) [RELEASE]: {self.name!r} : Start release - "
|
478
|
+
f"{release.date:%Y-%m-%d %H:%M:%S}"
|
447
479
|
)
|
448
480
|
|
449
|
-
# NOTE: Release
|
481
|
+
# NOTE: Release parameters that use to templating on the schedule
|
482
|
+
# config data.
|
450
483
|
release_params: DictData = {
|
451
484
|
"release": {
|
452
485
|
"logical_date": release.date,
|
@@ -456,14 +489,14 @@ class Workflow(BaseModel):
|
|
456
489
|
}
|
457
490
|
}
|
458
491
|
|
459
|
-
#
|
492
|
+
# NOTE: Execute workflow with templating params from release mapping.
|
460
493
|
rs: Result = self.execute(
|
461
494
|
params=param2template(params, release_params),
|
462
495
|
run_id=run_id,
|
463
496
|
)
|
464
497
|
logger.debug(
|
465
|
-
f"({run_id}) [RELEASE]: {self.name!r} : "
|
466
|
-
f"
|
498
|
+
f"({cut_id(run_id)}) [RELEASE]: {self.name!r} : End release - "
|
499
|
+
f"{release.date:%Y-%m-%d %H:%M:%S}"
|
467
500
|
)
|
468
501
|
|
469
502
|
rs.set_parent_run_id(run_id)
|
@@ -485,16 +518,13 @@ class Workflow(BaseModel):
|
|
485
518
|
queue.remove_running(release)
|
486
519
|
heappush(queue.complete, release)
|
487
520
|
|
488
|
-
return
|
521
|
+
return rs_release.catch(
|
489
522
|
status=0,
|
490
523
|
context={
|
491
524
|
"params": params,
|
492
|
-
"release": {
|
493
|
-
|
494
|
-
"logical_date": release.date,
|
495
|
-
},
|
525
|
+
"release": {"status": "success", "logical_date": release.date},
|
526
|
+
"outputs": rs.context,
|
496
527
|
},
|
497
|
-
run_id=run_id,
|
498
528
|
)
|
499
529
|
|
500
530
|
def queue_poking(
|
@@ -503,14 +533,20 @@ class Workflow(BaseModel):
|
|
503
533
|
end_date: datetime,
|
504
534
|
queue: WorkflowQueue,
|
505
535
|
log: type[Log],
|
536
|
+
*,
|
537
|
+
force_run: bool = False,
|
506
538
|
) -> WorkflowQueue:
|
507
539
|
"""Generate queue of datetime from the cron runner that initialize from
|
508
540
|
the on field. with offset value.
|
509
541
|
|
510
|
-
:param offset:
|
511
|
-
:param end_date:
|
512
|
-
:param queue:
|
513
|
-
:param log:
|
542
|
+
:param offset: A offset in second unit for time travel.
|
543
|
+
:param end_date: An end datetime object.
|
544
|
+
:param queue: A workflow queue object.
|
545
|
+
:param log: A log class that want to making log object.
|
546
|
+
:param force_run: A flag that allow to release workflow if the log with
|
547
|
+
that release was pointed.
|
548
|
+
|
549
|
+
:rtype: WorkflowQueue
|
514
550
|
"""
|
515
551
|
for on in self.on:
|
516
552
|
|
@@ -518,6 +554,7 @@ class Workflow(BaseModel):
|
|
518
554
|
get_dt_now(tz=config.tz, offset=offset).replace(microsecond=0)
|
519
555
|
)
|
520
556
|
|
557
|
+
# NOTE: Skip this runner date if it more than the end date.
|
521
558
|
if runner.date > end_date:
|
522
559
|
continue
|
523
560
|
|
@@ -529,8 +566,9 @@ class Workflow(BaseModel):
|
|
529
566
|
type="poking",
|
530
567
|
)
|
531
568
|
|
532
|
-
while queue.check_queue(
|
569
|
+
while queue.check_queue(workflow_release) or (
|
533
570
|
log.is_pointed(name=self.name, release=workflow_release.date)
|
571
|
+
and not force_run
|
534
572
|
):
|
535
573
|
workflow_release = WorkflowRelease(
|
536
574
|
date=runner.next,
|
@@ -543,36 +581,50 @@ class Workflow(BaseModel):
|
|
543
581
|
if runner.date > end_date:
|
544
582
|
continue
|
545
583
|
|
584
|
+
# NOTE: Push the WorkflowRelease object to queue.
|
546
585
|
queue.push_queue(workflow_release)
|
586
|
+
|
547
587
|
return queue
|
548
588
|
|
549
589
|
def poke(
|
550
590
|
self,
|
551
591
|
start_date: datetime | None = None,
|
552
592
|
params: DictData | None = None,
|
593
|
+
*,
|
553
594
|
run_id: str | None = None,
|
554
595
|
periods: int = 1,
|
555
|
-
*,
|
556
596
|
log: Log | None = None,
|
597
|
+
force_run: bool = False,
|
598
|
+
timeout: int = 1800,
|
557
599
|
) -> list[Result]:
|
558
|
-
"""Poke workflow with
|
559
|
-
|
560
|
-
|
600
|
+
"""Poke this workflow with start datetime value that passing to its
|
601
|
+
``on`` field with threading executor pool for executing with all its
|
602
|
+
schedules that was set on the `on` value.
|
603
|
+
|
604
|
+
This method will observe its schedule that nearing to run with the
|
561
605
|
``self.release()`` method.
|
562
606
|
|
563
607
|
:param start_date: A start datetime object.
|
564
608
|
:param params: A parameters that want to pass to the release method.
|
565
609
|
:param run_id: A workflow running ID for this poke.
|
566
|
-
:param periods: A periods
|
610
|
+
:param periods: A periods in minutes value that use to run this poking.
|
567
611
|
:param log: A log object that want to use on this poking process.
|
612
|
+
:param force_run: A flag that allow to release workflow if the log with
|
613
|
+
that release was pointed.
|
614
|
+
:param timeout: A second value for timeout while waiting all futures
|
615
|
+
run completely.
|
568
616
|
|
569
617
|
:rtype: list[Result]
|
618
|
+
:return: A list of all results that return from ``self.release`` method.
|
570
619
|
"""
|
620
|
+
log: type[Log] = log or FileLog
|
621
|
+
run_id: str = run_id or gen_id(self.name, unique=True)
|
622
|
+
|
571
623
|
# NOTE: If this workflow does not set the on schedule, it will return
|
572
624
|
# empty result.
|
573
625
|
if len(self.on) == 0:
|
574
626
|
logger.info(
|
575
|
-
f"({run_id}) [POKING]: {self.name!r} does not have any "
|
627
|
+
f"({cut_id(run_id)}) [POKING]: {self.name!r} does not have any "
|
576
628
|
f"schedule to run."
|
577
629
|
)
|
578
630
|
return []
|
@@ -592,82 +644,94 @@ class Workflow(BaseModel):
|
|
592
644
|
start_date: datetime = current_date
|
593
645
|
offset: float = 0
|
594
646
|
|
647
|
+
# NOTE: End date is use to stop generate queue with an input periods
|
648
|
+
# value.
|
595
649
|
end_date: datetime = start_date + timedelta(minutes=periods)
|
596
650
|
|
597
|
-
log: type[Log] = log or FileLog
|
598
|
-
run_id: str = run_id or gen_id(self.name, unique=True)
|
599
651
|
logger.info(
|
600
|
-
f"({run_id}) [POKING]: Start Poking: {self.name!r} from "
|
652
|
+
f"({cut_id(run_id)}) [POKING]: Start Poking: {self.name!r} from "
|
601
653
|
f"{start_date:%Y-%m-%d %H:%M:%S} to {end_date:%Y-%m-%d %H:%M:%S}"
|
602
654
|
)
|
603
655
|
|
604
|
-
params: DictData = params
|
605
|
-
|
656
|
+
params: DictData = {} if params is None else params
|
657
|
+
wf_queue: WorkflowQueue = WorkflowQueue()
|
606
658
|
results: list[Result] = []
|
607
659
|
futures: list[Future] = []
|
608
660
|
|
661
|
+
# NOTE: Make queue to the workflow queue object.
|
609
662
|
self.queue_poking(
|
610
|
-
offset,
|
663
|
+
offset,
|
664
|
+
end_date=end_date,
|
665
|
+
queue=wf_queue,
|
666
|
+
log=log,
|
667
|
+
force_run=force_run,
|
611
668
|
)
|
612
|
-
|
613
|
-
if len(workflow_queue.queue) == 0:
|
669
|
+
if not wf_queue.is_queued:
|
614
670
|
logger.info(
|
615
|
-
f"({run_id}) [POKING]: {self.name!r} does not have
|
616
|
-
f"queue
|
671
|
+
f"({cut_id(run_id)}) [POKING]: {self.name!r} does not have "
|
672
|
+
f"any queue."
|
617
673
|
)
|
618
674
|
return []
|
619
675
|
|
676
|
+
# NOTE: Start create the thread pool executor for running this poke
|
677
|
+
# process.
|
620
678
|
with ThreadPoolExecutor(
|
621
679
|
max_workers=config.max_poking_pool_worker,
|
622
|
-
thread_name_prefix="
|
680
|
+
thread_name_prefix="wf_poking_",
|
623
681
|
) as executor:
|
624
682
|
|
625
|
-
while
|
683
|
+
while wf_queue.is_queued:
|
684
|
+
|
685
|
+
# NOTE: Pop the latest WorkflowRelease object from queue.
|
686
|
+
release: WorkflowRelease = heappop(wf_queue.queue)
|
626
687
|
|
627
|
-
wf_release: WorkflowRelease = heappop(workflow_queue.queue)
|
628
688
|
if (
|
629
|
-
|
689
|
+
release.date - get_dt_now(tz=config.tz, offset=offset)
|
630
690
|
).total_seconds() > 60:
|
631
691
|
logger.debug(
|
632
|
-
f"({run_id}) [POKING]:
|
633
|
-
f"release has diff time more than 60 seconds "
|
692
|
+
f"({cut_id(run_id)}) [POKING]: Wait because the latest "
|
693
|
+
f"release has diff time more than 60 seconds ..."
|
634
694
|
)
|
635
|
-
heappush(
|
695
|
+
heappush(wf_queue.queue, release)
|
636
696
|
delay(60)
|
697
|
+
|
698
|
+
# WARNING: I already call queue poking again because issue
|
699
|
+
# about the every minute crontab.
|
637
700
|
self.queue_poking(
|
638
|
-
offset,
|
701
|
+
offset,
|
702
|
+
end_date,
|
703
|
+
queue=wf_queue,
|
704
|
+
log=log,
|
705
|
+
force_run=force_run,
|
639
706
|
)
|
640
707
|
continue
|
641
708
|
|
642
|
-
# NOTE: Push the
|
643
|
-
|
709
|
+
# NOTE: Push the latest WorkflowRelease to the running queue.
|
710
|
+
wf_queue.push_running(release)
|
644
711
|
|
645
712
|
futures.append(
|
646
713
|
executor.submit(
|
647
714
|
self.release,
|
648
|
-
release=
|
715
|
+
release=release,
|
649
716
|
params=params,
|
650
717
|
log=log,
|
651
|
-
queue=
|
718
|
+
queue=wf_queue,
|
652
719
|
)
|
653
720
|
)
|
654
721
|
|
655
722
|
self.queue_poking(
|
656
|
-
offset,
|
723
|
+
offset,
|
724
|
+
end_date,
|
725
|
+
queue=wf_queue,
|
726
|
+
log=log,
|
727
|
+
force_run=force_run,
|
657
728
|
)
|
658
729
|
|
659
730
|
# WARNING: This poking method does not allow to use fail-fast
|
660
731
|
# logic to catching parallel execution result.
|
661
|
-
for future in as_completed(futures):
|
732
|
+
for future in as_completed(futures, timeout=timeout):
|
662
733
|
results.append(future.result().set_parent_run_id(run_id))
|
663
734
|
|
664
|
-
while len(workflow_queue.running) > 0: # pragma: no cov
|
665
|
-
logger.warning(
|
666
|
-
f"({run_id}) [POKING]: Running does empty when poking "
|
667
|
-
f"process was finishing."
|
668
|
-
)
|
669
|
-
delay(10)
|
670
|
-
|
671
735
|
return results
|
672
736
|
|
673
737
|
def execute_job(
|
@@ -685,6 +749,8 @@ class Workflow(BaseModel):
|
|
685
749
|
model. It different with ``self.execute`` because this method run only
|
686
750
|
one job and return with context of this job data.
|
687
751
|
|
752
|
+
:raise WorkflowException: If execute with not exist job's ID.
|
753
|
+
:raise WorkflowException: If the job execution raise JobException.
|
688
754
|
:raise NotImplementedError: If set raise_error argument to False.
|
689
755
|
|
690
756
|
:param job_id: A job ID that want to execute.
|
@@ -694,8 +760,11 @@ class Workflow(BaseModel):
|
|
694
760
|
if it get exception from job execution.
|
695
761
|
|
696
762
|
:rtype: Result
|
763
|
+
:return: Return the result object that receive the job execution result
|
764
|
+
context.
|
697
765
|
"""
|
698
766
|
run_id: str = run_id or gen_id(self.name, unique=True)
|
767
|
+
rs: Result = Result(run_id=run_id)
|
699
768
|
|
700
769
|
# VALIDATE: check a job ID that exists in this workflow or not.
|
701
770
|
if job_id not in self.jobs:
|
@@ -704,10 +773,14 @@ class Workflow(BaseModel):
|
|
704
773
|
f"workflow."
|
705
774
|
)
|
706
775
|
|
707
|
-
logger.info(
|
776
|
+
logger.info(
|
777
|
+
f"({cut_id(run_id)}) [WORKFLOW]: Start execute job: {job_id!r}"
|
778
|
+
)
|
708
779
|
|
709
780
|
# IMPORTANT:
|
710
|
-
#
|
781
|
+
# This execution change all job running IDs to the current workflow
|
782
|
+
# execution running ID (with passing run_id to the job execution
|
783
|
+
# argument).
|
711
784
|
#
|
712
785
|
try:
|
713
786
|
job: Job = self.jobs[job_id]
|
@@ -717,7 +790,8 @@ class Workflow(BaseModel):
|
|
717
790
|
)
|
718
791
|
except JobException as err:
|
719
792
|
logger.error(
|
720
|
-
f"({run_id}) [WORKFLOW]: {err.__class__.__name__}:
|
793
|
+
f"({cut_id(run_id)}) [WORKFLOW]: {err.__class__.__name__}: "
|
794
|
+
f"{err}"
|
721
795
|
)
|
722
796
|
if raise_error:
|
723
797
|
raise WorkflowException(
|
@@ -727,7 +801,7 @@ class Workflow(BaseModel):
|
|
727
801
|
"Handle error from the job execution does not support yet."
|
728
802
|
) from None
|
729
803
|
|
730
|
-
return
|
804
|
+
return rs.catch(status=0, context=params)
|
731
805
|
|
732
806
|
def execute(
|
733
807
|
self,
|
@@ -761,7 +835,9 @@ class Workflow(BaseModel):
|
|
761
835
|
:rtype: Result
|
762
836
|
"""
|
763
837
|
run_id: str = run_id or gen_id(self.name, unique=True)
|
764
|
-
logger.info(
|
838
|
+
logger.info(
|
839
|
+
f"({cut_id(run_id)}) [WORKFLOW]: Start Execute: {self.name!r} ..."
|
840
|
+
)
|
765
841
|
|
766
842
|
# NOTE: I use this condition because this method allow passing empty
|
767
843
|
# params and I do not want to create new dict object.
|
@@ -771,7 +847,7 @@ class Workflow(BaseModel):
|
|
771
847
|
# NOTE: It should not do anything if it does not have job.
|
772
848
|
if not self.jobs:
|
773
849
|
logger.warning(
|
774
|
-
f"({run_id}) [WORKFLOW]: This workflow: {self.name!r} "
|
850
|
+
f"({cut_id(run_id)}) [WORKFLOW]: This workflow: {self.name!r} "
|
775
851
|
f"does not have any jobs"
|
776
852
|
)
|
777
853
|
return rs.catch(status=0, context=params)
|
@@ -846,14 +922,14 @@ class Workflow(BaseModel):
|
|
846
922
|
not_timeout_flag: bool = True
|
847
923
|
timeout: int = timeout or config.max_job_exec_timeout
|
848
924
|
logger.debug(
|
849
|
-
f"({run_id}) [WORKFLOW]: Run {self.name} with threading
|
925
|
+
f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with threading."
|
850
926
|
)
|
851
927
|
|
852
928
|
# IMPORTANT: The job execution can run parallel and waiting by
|
853
929
|
# needed.
|
854
930
|
with ThreadPoolExecutor(
|
855
931
|
max_workers=config.max_job_parallel,
|
856
|
-
thread_name_prefix="
|
932
|
+
thread_name_prefix="wf_exec_threading_",
|
857
933
|
) as executor:
|
858
934
|
futures: list[Future] = []
|
859
935
|
|
@@ -863,7 +939,7 @@ class Workflow(BaseModel):
|
|
863
939
|
job_id: str = job_queue.get()
|
864
940
|
job: Job = self.jobs[job_id]
|
865
941
|
|
866
|
-
if
|
942
|
+
if not job.check_needs(context["jobs"]):
|
867
943
|
job_queue.task_done()
|
868
944
|
job_queue.put(job_id)
|
869
945
|
time.sleep(0.25)
|
@@ -872,10 +948,13 @@ class Workflow(BaseModel):
|
|
872
948
|
# NOTE: Start workflow job execution with deep copy context data
|
873
949
|
# before release.
|
874
950
|
#
|
951
|
+
# Context:
|
952
|
+
# ---
|
875
953
|
# {
|
876
954
|
# 'params': <input-params>,
|
877
|
-
# 'jobs': {},
|
955
|
+
# 'jobs': { <job's-id>: ... },
|
878
956
|
# }
|
957
|
+
#
|
879
958
|
futures.append(
|
880
959
|
executor.submit(
|
881
960
|
self.execute_job,
|
@@ -895,8 +974,8 @@ class Workflow(BaseModel):
|
|
895
974
|
|
896
975
|
for future in as_completed(futures, timeout=thread_timeout):
|
897
976
|
if err := future.exception():
|
898
|
-
logger.error(f"({run_id}) [WORKFLOW]: {err}")
|
899
|
-
raise WorkflowException(
|
977
|
+
logger.error(f"({cut_id(run_id)}) [WORKFLOW]: {err}")
|
978
|
+
raise WorkflowException(str(err))
|
900
979
|
|
901
980
|
# NOTE: This getting result does not do anything.
|
902
981
|
future.result()
|
@@ -908,12 +987,10 @@ class Workflow(BaseModel):
|
|
908
987
|
|
909
988
|
# NOTE: Raise timeout error.
|
910
989
|
logger.warning(
|
911
|
-
f"({run_id}) [WORKFLOW]: Execution
|
912
|
-
f"was timeout"
|
913
|
-
)
|
914
|
-
raise WorkflowException(
|
915
|
-
f"Execution of workflow: {self.name} was timeout"
|
990
|
+
f"({cut_id(run_id)}) [WORKFLOW]: Execution: {self.name!r} "
|
991
|
+
f"was timeout."
|
916
992
|
)
|
993
|
+
raise WorkflowException(f"Execution: {self.name!r} was timeout.")
|
917
994
|
|
918
995
|
def __exec_non_threading(
|
919
996
|
self,
|
@@ -940,8 +1017,8 @@ class Workflow(BaseModel):
|
|
940
1017
|
not_timeout_flag: bool = True
|
941
1018
|
timeout: int = timeout or config.max_job_exec_timeout
|
942
1019
|
logger.debug(
|
943
|
-
f"({run_id}) [WORKFLOW]: Run {self.name} with
|
944
|
-
f"
|
1020
|
+
f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name} with "
|
1021
|
+
f"non-threading."
|
945
1022
|
)
|
946
1023
|
|
947
1024
|
while not job_queue.empty() and (
|
@@ -951,7 +1028,7 @@ class Workflow(BaseModel):
|
|
951
1028
|
job: Job = self.jobs[job_id]
|
952
1029
|
|
953
1030
|
# NOTE: Waiting dependency job run successful before release.
|
954
|
-
if
|
1031
|
+
if not job.check_needs(context["jobs"]):
|
955
1032
|
job_queue.task_done()
|
956
1033
|
job_queue.put(job_id)
|
957
1034
|
time.sleep(0.075)
|
@@ -980,11 +1057,10 @@ class Workflow(BaseModel):
|
|
980
1057
|
|
981
1058
|
# NOTE: Raise timeout error.
|
982
1059
|
logger.warning(
|
983
|
-
f"({run_id}) [WORKFLOW]: Execution
|
984
|
-
|
985
|
-
raise WorkflowException(
|
986
|
-
f"Execution of workflow: {self.name} was timeout"
|
1060
|
+
f"({cut_id(run_id)}) [WORKFLOW]: Execution: {self.name!r} "
|
1061
|
+
f"was timeout."
|
987
1062
|
)
|
1063
|
+
raise WorkflowException(f"Execution: {self.name!r} was timeout.")
|
988
1064
|
|
989
1065
|
|
990
1066
|
@dataclass(config=ConfigDict(arbitrary_types_allowed=True))
|
@@ -1035,15 +1111,15 @@ class WorkflowTaskData:
|
|
1035
1111
|
next_time: datetime = runner.next
|
1036
1112
|
|
1037
1113
|
logger.debug(
|
1038
|
-
f"({run_id}) [CORE]: {self.workflow.name!r} :
|
1039
|
-
f"{next_time:%Y-%m-%d %H:%M:%S}"
|
1114
|
+
f"({cut_id(run_id)}) [CORE]: {self.workflow.name!r} : "
|
1115
|
+
f"{runner.cron} : {next_time:%Y-%m-%d %H:%M:%S}"
|
1040
1116
|
)
|
1041
1117
|
heappush(queue[self.alias], next_time)
|
1042
1118
|
start_sec: float = time.monotonic()
|
1043
1119
|
|
1044
1120
|
if get_diff_sec(next_time, tz=runner.tz) > waiting_sec:
|
1045
1121
|
logger.debug(
|
1046
|
-
f"({run_id}) [WORKFLOW]: {self.workflow.name!r} : "
|
1122
|
+
f"({cut_id(run_id)}) [WORKFLOW]: {self.workflow.name!r} : "
|
1047
1123
|
f"{runner.cron} "
|
1048
1124
|
f": Does not closely >> {next_time:%Y-%m-%d %H:%M:%S}"
|
1049
1125
|
)
|
@@ -1056,8 +1132,8 @@ class WorkflowTaskData:
|
|
1056
1132
|
return
|
1057
1133
|
|
1058
1134
|
logger.debug(
|
1059
|
-
f"({run_id}) [CORE]: {self.workflow.name!r} :
|
1060
|
-
f"Closely to run >> {next_time:%Y-%m-%d %H:%M:%S}"
|
1135
|
+
f"({cut_id(run_id)}) [CORE]: {self.workflow.name!r} : "
|
1136
|
+
f"{runner.cron} : Closely to run >> {next_time:%Y-%m-%d %H:%M:%S}"
|
1061
1137
|
)
|
1062
1138
|
|
1063
1139
|
# NOTE: Release when the time is nearly to schedule time.
|
@@ -1065,8 +1141,8 @@ class WorkflowTaskData:
|
|
1065
1141
|
sleep_interval + 5
|
1066
1142
|
):
|
1067
1143
|
logger.debug(
|
1068
|
-
f"({run_id}) [CORE]: {self.workflow.name!r} :
|
1069
|
-
f": Sleep until: {duration}"
|
1144
|
+
f"({cut_id(run_id)}) [CORE]: {self.workflow.name!r} : "
|
1145
|
+
f"{runner.cron} : Sleep until: {duration}"
|
1070
1146
|
)
|
1071
1147
|
time.sleep(15)
|
1072
1148
|
|
@@ -1085,8 +1161,8 @@ class WorkflowTaskData:
|
|
1085
1161
|
params=param2template(self.params, release_params),
|
1086
1162
|
)
|
1087
1163
|
logger.debug(
|
1088
|
-
f"({run_id}) [CORE]: {self.workflow.name!r} :
|
1089
|
-
f"End release - {next_time:%Y-%m-%d %H:%M:%S}"
|
1164
|
+
f"({cut_id(run_id)}) [CORE]: {self.workflow.name!r} : "
|
1165
|
+
f"{runner.cron} : End release - {next_time:%Y-%m-%d %H:%M:%S}"
|
1090
1166
|
)
|
1091
1167
|
|
1092
1168
|
# NOTE: Set parent ID on this result.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.22
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -196,6 +196,7 @@ and do not raise any error to you.
|
|
196
196
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
197
197
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
198
198
|
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
199
|
+
| `WORKFLOW_CORE_MAX_ON_PER_WORKFLOW` | Core | 5 | | |
|
199
200
|
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
200
201
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
201
202
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
@@ -0,0 +1,22 @@
|
|
1
|
+
ddeutil/workflow/__about__.py,sha256=hJavfsPLTnuXMwKFo9HZgsq6b7tJpWgyfttwaxzMujE,28
|
2
|
+
ddeutil/workflow/__cron.py,sha256=_2P9nmGOwGdv5bLgf9TpML2HBgqLv_qRgiO1Rulo1PA,26693
|
3
|
+
ddeutil/workflow/__init__.py,sha256=DCSN0foPFlFLN_Q4uoWa_EBBlKeMHXGpOdr-lWHISrQ,1422
|
4
|
+
ddeutil/workflow/__types.py,sha256=Ia7f38kvL3NibwmRKi0wQ1ud_45Z-SojYGhNJwIqcu8,3713
|
5
|
+
ddeutil/workflow/api.py,sha256=vUT2RVS9sF3hvY-IrzAEnahxwq4ZFYP0G3xfctHbNsw,4701
|
6
|
+
ddeutil/workflow/cli.py,sha256=baHhvtI8snbHYHeThoX401Cd6SMB2boyyCbCtTrIl3E,3278
|
7
|
+
ddeutil/workflow/conf.py,sha256=GsbuJDQfQoAGiR4keUEoB4lKfZxdkaiZ4N4FfIHc0xY,15814
|
8
|
+
ddeutil/workflow/exceptions.py,sha256=NqnQJP52S59XIYMeXbTDbr4xH2UZ5EA3ejpU5Z4g6cQ,894
|
9
|
+
ddeutil/workflow/job.py,sha256=liu8M_pUhAGHZ_Ez922jI94LCC3yioI-Tw5o71Zy88w,24216
|
10
|
+
ddeutil/workflow/on.py,sha256=wxKfL2u-bBhPbDtZbhqE2lZoPVukHA1zq-qrg0ldic0,7469
|
11
|
+
ddeutil/workflow/repeat.py,sha256=s0azh-f5JQeow7kpxM8GKlqgAmKL7oU6St3L4Ggx4cY,4925
|
12
|
+
ddeutil/workflow/route.py,sha256=JALwOH6xKu5rnII7DgA1Lbp_E5ehCoBbOW_eKqB_Olk,6753
|
13
|
+
ddeutil/workflow/scheduler.py,sha256=B2uXsqzmp32nIbya8EDePYyRhpwcxCMeoibPABCuMOA,18750
|
14
|
+
ddeutil/workflow/stage.py,sha256=ADFqExFmD8Y00A86TSS05HpabvsLV7_dbLrzD31TkK8,26490
|
15
|
+
ddeutil/workflow/utils.py,sha256=0GaHpRL1HuyES1NS7r56DFgloOVftYVAvAdVgIbPA_k,26001
|
16
|
+
ddeutil/workflow/workflow.py,sha256=fzhKJx9s-RF95FQ0tAvAQ1nsL8dsp_py2Ea5TGnjsOk,41542
|
17
|
+
ddeutil_workflow-0.0.22.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
18
|
+
ddeutil_workflow-0.0.22.dist-info/METADATA,sha256=J_VrfU8ZBPAa7OrhMV_c4sLbQ0g3Nc0MQSdYUgmxF6I,14017
|
19
|
+
ddeutil_workflow-0.0.22.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
20
|
+
ddeutil_workflow-0.0.22.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
|
21
|
+
ddeutil_workflow-0.0.22.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
22
|
+
ddeutil_workflow-0.0.22.dist-info/RECORD,,
|
@@ -1,22 +0,0 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=at-1IOQn4CID6hNRKtCzRBtCjVNyizp3IXd27XWqHPQ,28
|
2
|
-
ddeutil/workflow/__cron.py,sha256=_2P9nmGOwGdv5bLgf9TpML2HBgqLv_qRgiO1Rulo1PA,26693
|
3
|
-
ddeutil/workflow/__init__.py,sha256=DCSN0foPFlFLN_Q4uoWa_EBBlKeMHXGpOdr-lWHISrQ,1422
|
4
|
-
ddeutil/workflow/__types.py,sha256=Ia7f38kvL3NibwmRKi0wQ1ud_45Z-SojYGhNJwIqcu8,3713
|
5
|
-
ddeutil/workflow/api.py,sha256=vUT2RVS9sF3hvY-IrzAEnahxwq4ZFYP0G3xfctHbNsw,4701
|
6
|
-
ddeutil/workflow/cli.py,sha256=baHhvtI8snbHYHeThoX401Cd6SMB2boyyCbCtTrIl3E,3278
|
7
|
-
ddeutil/workflow/conf.py,sha256=KSwEHlZ_2I-bWGNKllDvkxNLy1WdHjUGxGH45_A3K5w,15717
|
8
|
-
ddeutil/workflow/exceptions.py,sha256=NqnQJP52S59XIYMeXbTDbr4xH2UZ5EA3ejpU5Z4g6cQ,894
|
9
|
-
ddeutil/workflow/job.py,sha256=jj8f8SyFD6GQyTZnxhviiDGv2ELVybMmVDDoxORoy1A,23875
|
10
|
-
ddeutil/workflow/on.py,sha256=3Typ9YS2303LTijGK4ytN38ZLC0Gyq55HtFd0lm97Ic,7391
|
11
|
-
ddeutil/workflow/repeat.py,sha256=s0azh-f5JQeow7kpxM8GKlqgAmKL7oU6St3L4Ggx4cY,4925
|
12
|
-
ddeutil/workflow/route.py,sha256=JALwOH6xKu5rnII7DgA1Lbp_E5ehCoBbOW_eKqB_Olk,6753
|
13
|
-
ddeutil/workflow/scheduler.py,sha256=B2uXsqzmp32nIbya8EDePYyRhpwcxCMeoibPABCuMOA,18750
|
14
|
-
ddeutil/workflow/stage.py,sha256=Tzq-ciMZXKNUZ4cH4djyPIZ8aqj_P1Qm5zYZqYF8bDM,26301
|
15
|
-
ddeutil/workflow/utils.py,sha256=IUTj7c6Jsi1oNHP7inLpv1TYhAA44lEMU1n5nNa1-bk,25657
|
16
|
-
ddeutil/workflow/workflow.py,sha256=w800yMcajcIxdhWXgmqtPYGiiU2ftwsjyqqqnh-1-7o,38405
|
17
|
-
ddeutil_workflow-0.0.21.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
18
|
-
ddeutil_workflow-0.0.21.dist-info/METADATA,sha256=7JQpnRPdK6pOi2tzqK8_dquR5LHSlD0OP8agmVxCEUg,13800
|
19
|
-
ddeutil_workflow-0.0.21.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
20
|
-
ddeutil_workflow-0.0.21.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
|
21
|
-
ddeutil_workflow-0.0.21.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
22
|
-
ddeutil_workflow-0.0.21.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|