ddeutil-workflow 0.0.32__py3-none-any.whl → 0.0.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +4 -2
- ddeutil/workflow/api/api.py +2 -2
- ddeutil/workflow/api/route.py +4 -3
- ddeutil/workflow/audit.py +261 -0
- ddeutil/workflow/conf.py +122 -265
- ddeutil/workflow/job.py +59 -52
- ddeutil/workflow/result.py +89 -37
- ddeutil/workflow/scheduler.py +7 -6
- ddeutil/workflow/stage.py +73 -54
- ddeutil/workflow/workflow.py +63 -64
- {ddeutil_workflow-0.0.32.dist-info → ddeutil_workflow-0.0.33.dist-info}/METADATA +29 -25
- ddeutil_workflow-0.0.33.dist-info/RECORD +26 -0
- ddeutil_workflow-0.0.32.dist-info/RECORD +0 -25
- {ddeutil_workflow-0.0.32.dist-info → ddeutil_workflow-0.0.33.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.32.dist-info → ddeutil_workflow-0.0.33.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.32.dist-info → ddeutil_workflow-0.0.33.dist-info}/top_level.txt +0 -0
ddeutil/workflow/stage.py
CHANGED
@@ -45,7 +45,7 @@ from .__types import DictData, DictStr, TupleStr
|
|
45
45
|
from .conf import config, get_logger
|
46
46
|
from .exceptions import StageException
|
47
47
|
from .hook import TagFunc, extract_hook
|
48
|
-
from .result import Result
|
48
|
+
from .result import Result, Status
|
49
49
|
from .templates import not_in_template, param2template
|
50
50
|
from .utils import (
|
51
51
|
cut_id,
|
@@ -121,19 +121,26 @@ class BaseStage(BaseModel, ABC):
|
|
121
121
|
return self
|
122
122
|
|
123
123
|
@abstractmethod
|
124
|
-
def execute(
|
124
|
+
def execute(
|
125
|
+
self, params: DictData, *, result: Result | None = None
|
126
|
+
) -> Result:
|
125
127
|
"""Execute abstraction method that action something by sub-model class.
|
126
128
|
This is important method that make this class is able to be the stage.
|
127
129
|
|
128
130
|
:param params: A parameter data that want to use in this execution.
|
129
|
-
:param
|
131
|
+
:param result: (Result) A result object for keeping context and status
|
132
|
+
data.
|
130
133
|
|
131
134
|
:rtype: Result
|
132
135
|
"""
|
133
136
|
raise NotImplementedError("Stage should implement ``execute`` method.")
|
134
137
|
|
135
138
|
def handler_execute(
|
136
|
-
self,
|
139
|
+
self,
|
140
|
+
params: DictData,
|
141
|
+
*,
|
142
|
+
run_id: str | None = None,
|
143
|
+
result: Result | None = None,
|
137
144
|
) -> Result:
|
138
145
|
"""Handler result from the stage execution.
|
139
146
|
|
@@ -158,23 +165,25 @@ class BaseStage(BaseModel, ABC):
|
|
158
165
|
from current stage ID before release the final result.
|
159
166
|
|
160
167
|
:param params: A parameter data that want to use in this execution.
|
161
|
-
:param run_id: A running stage ID for this execution.
|
168
|
+
:param run_id: (str) A running stage ID for this execution.
|
169
|
+
:param result: (Result) A result object for keeping context and status
|
170
|
+
data.
|
162
171
|
|
163
172
|
:rtype: Result
|
164
173
|
"""
|
165
|
-
if
|
166
|
-
|
174
|
+
if result is None: # pragma: no cov
|
175
|
+
result: Result = Result(
|
176
|
+
run_id=(
|
177
|
+
run_id or gen_id(self.name + (self.id or ""), unique=True)
|
178
|
+
),
|
179
|
+
)
|
167
180
|
|
168
|
-
rs_raise: Result = Result(status=1, run_id=run_id)
|
169
181
|
try:
|
170
182
|
# NOTE: Start calling origin function with a passing args.
|
171
|
-
return self.execute(params,
|
183
|
+
return self.execute(params, result=result)
|
172
184
|
except Exception as err:
|
173
185
|
# NOTE: Start catching error from the stage execution.
|
174
|
-
|
175
|
-
f"({cut_id(run_id)}) [STAGE]: {err.__class__.__name__}: "
|
176
|
-
f"{err}"
|
177
|
-
)
|
186
|
+
result.trace.error(f"[STAGE]: {err.__class__.__name__}: {err}")
|
178
187
|
if config.stage_raise_error:
|
179
188
|
# NOTE: If error that raise from stage execution course by
|
180
189
|
# itself, it will return that error with previous
|
@@ -190,8 +199,8 @@ class BaseStage(BaseModel, ABC):
|
|
190
199
|
|
191
200
|
# NOTE: Catching exception error object to result with
|
192
201
|
# error_message and error keys.
|
193
|
-
return
|
194
|
-
status=
|
202
|
+
return result.catch(
|
203
|
+
status=Status.FAILED,
|
195
204
|
context={
|
196
205
|
"error": err,
|
197
206
|
"error_message": f"{err.__class__.__name__}: {err}",
|
@@ -295,7 +304,9 @@ class EmptyStage(BaseStage):
|
|
295
304
|
ge=0,
|
296
305
|
)
|
297
306
|
|
298
|
-
def execute(
|
307
|
+
def execute(
|
308
|
+
self, params: DictData, *, result: Result | None = None
|
309
|
+
) -> Result:
|
299
310
|
"""Execution method for the Empty stage that do only logging out to
|
300
311
|
stdout. This method does not use the `handler_result` decorator because
|
301
312
|
it does not get any error from logging function.
|
@@ -305,22 +316,21 @@ class EmptyStage(BaseStage):
|
|
305
316
|
|
306
317
|
:param params: A context data that want to add output result. But this
|
307
318
|
stage does not pass any output.
|
308
|
-
:param
|
319
|
+
:param result: (Result) A result object for keeping context and status
|
320
|
+
data.
|
309
321
|
|
310
322
|
:rtype: Result
|
311
323
|
"""
|
312
|
-
|
313
|
-
f"
|
324
|
+
result.trace.info(
|
325
|
+
f"[STAGE]: Empty-Execute: {self.name!r}: "
|
314
326
|
f"( {param2template(self.echo, params=params) or '...'} )"
|
315
327
|
)
|
316
328
|
if self.sleep > 0:
|
317
329
|
if self.sleep > 30:
|
318
|
-
|
319
|
-
f"({cut_id(run_id)}) [STAGE]: ... sleep "
|
320
|
-
f"({self.sleep} seconds)"
|
321
|
-
)
|
330
|
+
result.trace.info(f"[STAGE]: ... sleep ({self.sleep} seconds)")
|
322
331
|
time.sleep(self.sleep)
|
323
|
-
|
332
|
+
|
333
|
+
return result.catch(status=Status.SUCCESS)
|
324
334
|
|
325
335
|
|
326
336
|
class BashStage(BaseStage):
|
@@ -334,7 +344,7 @@ class BashStage(BaseStage):
|
|
334
344
|
|
335
345
|
Data Validate:
|
336
346
|
>>> stage = {
|
337
|
-
... "name": "Shell stage execution",
|
347
|
+
... "name": "The Shell stage execution",
|
338
348
|
... "bash": 'echo "Hello $FOO"',
|
339
349
|
... "env": {
|
340
350
|
... "FOO": "BAR",
|
@@ -391,20 +401,25 @@ class BashStage(BaseStage):
|
|
391
401
|
# Note: Remove .sh file that use to run bash.
|
392
402
|
Path(f"./{f_name}").unlink()
|
393
403
|
|
394
|
-
def execute(
|
404
|
+
def execute(
|
405
|
+
self, params: DictData, *, result: Result | None = None
|
406
|
+
) -> Result:
|
395
407
|
"""Execute the Bash statement with the Python build-in ``subprocess``
|
396
408
|
package.
|
397
409
|
|
398
410
|
:param params: A parameter data that want to use in this execution.
|
399
|
-
:param
|
411
|
+
:param result: (Result) A result object for keeping context and status
|
412
|
+
data.
|
400
413
|
|
401
414
|
:rtype: Result
|
402
415
|
"""
|
403
416
|
bash: str = param2template(dedent(self.bash), params)
|
404
417
|
|
405
|
-
|
418
|
+
result.trace.info(f"[STAGE]: Shell-Execute: {self.name}")
|
406
419
|
with self.create_sh_file(
|
407
|
-
bash=bash,
|
420
|
+
bash=bash,
|
421
|
+
env=param2template(self.env, params),
|
422
|
+
run_id=result.run_id,
|
408
423
|
) as sh:
|
409
424
|
rs: CompletedProcess = subprocess.run(
|
410
425
|
sh, shell=False, capture_output=True, text=True
|
@@ -420,14 +435,13 @@ class BashStage(BaseStage):
|
|
420
435
|
f"Subprocess: {err}\nRunning Statement:\n---\n"
|
421
436
|
f"```bash\n{bash}\n```"
|
422
437
|
)
|
423
|
-
return
|
424
|
-
status=
|
438
|
+
return result.catch(
|
439
|
+
status=Status.SUCCESS,
|
425
440
|
context={
|
426
441
|
"return_code": rs.returncode,
|
427
442
|
"stdout": rs.stdout.rstrip("\n") or None,
|
428
443
|
"stderr": rs.stderr.rstrip("\n") or None,
|
429
444
|
},
|
430
|
-
run_id=run_id,
|
431
445
|
)
|
432
446
|
|
433
447
|
|
@@ -492,12 +506,15 @@ class PyStage(BaseStage):
|
|
492
506
|
to.update({k: gb[k] for k in to if k in gb})
|
493
507
|
return to
|
494
508
|
|
495
|
-
def execute(
|
509
|
+
def execute(
|
510
|
+
self, params: DictData, *, result: Result | None = None
|
511
|
+
) -> Result:
|
496
512
|
"""Execute the Python statement that pass all globals and input params
|
497
513
|
to globals argument on ``exec`` build-in function.
|
498
514
|
|
499
515
|
:param params: A parameter that want to pass before run any statement.
|
500
|
-
:param
|
516
|
+
:param result: (Result) A result object for keeping context and status
|
517
|
+
data.
|
501
518
|
|
502
519
|
:rtype: Result
|
503
520
|
"""
|
@@ -511,16 +528,14 @@ class PyStage(BaseStage):
|
|
511
528
|
lc: DictData = {}
|
512
529
|
|
513
530
|
# NOTE: Start exec the run statement.
|
514
|
-
|
531
|
+
result.trace.info(f"[STAGE]: Py-Execute: {self.name}")
|
515
532
|
|
516
533
|
# WARNING: The exec build-in function is very dangerous. So, it
|
517
534
|
# should use the re module to validate exec-string before running.
|
518
535
|
exec(run, _globals, lc)
|
519
536
|
|
520
|
-
return
|
521
|
-
status=
|
522
|
-
context={"locals": lc, "globals": _globals},
|
523
|
-
run_id=run_id,
|
537
|
+
return result.catch(
|
538
|
+
status=Status.SUCCESS, context={"locals": lc, "globals": _globals}
|
524
539
|
)
|
525
540
|
|
526
541
|
|
@@ -552,7 +567,9 @@ class HookStage(BaseStage):
|
|
552
567
|
alias="with",
|
553
568
|
)
|
554
569
|
|
555
|
-
def execute(
|
570
|
+
def execute(
|
571
|
+
self, params: DictData, *, result: Result | None = None
|
572
|
+
) -> Result:
|
556
573
|
"""Execute the Hook function that already in the hook registry.
|
557
574
|
|
558
575
|
:raise ValueError: When the necessary arguments of hook function do not
|
@@ -562,7 +579,8 @@ class HookStage(BaseStage):
|
|
562
579
|
|
563
580
|
:param params: A parameter that want to pass before run any statement.
|
564
581
|
:type params: DictData
|
565
|
-
:param
|
582
|
+
:param result: (Result) A result object for keeping context and status
|
583
|
+
data.
|
566
584
|
:type: str | None
|
567
585
|
|
568
586
|
:rtype: Result
|
@@ -571,7 +589,7 @@ class HookStage(BaseStage):
|
|
571
589
|
|
572
590
|
# VALIDATE: check input task caller parameters that exists before
|
573
591
|
# calling.
|
574
|
-
args: DictData = param2template(self.args, params)
|
592
|
+
args: DictData = {"result": result} | param2template(self.args, params)
|
575
593
|
ips = inspect.signature(t_func)
|
576
594
|
if any(
|
577
595
|
(k.removeprefix("_") not in args and k not in args)
|
@@ -587,10 +605,10 @@ class HookStage(BaseStage):
|
|
587
605
|
if k.removeprefix("_") in args:
|
588
606
|
args[k] = args.pop(k.removeprefix("_"))
|
589
607
|
|
590
|
-
|
591
|
-
|
592
|
-
|
593
|
-
)
|
608
|
+
if "result" not in ips.parameters:
|
609
|
+
args.pop("result")
|
610
|
+
|
611
|
+
result.trace.info(f"[STAGE]: Hook-Execute: {t_func.name}@{t_func.tag}")
|
594
612
|
rs: DictData = t_func(**param2template(args, params))
|
595
613
|
|
596
614
|
# VALIDATE:
|
@@ -600,7 +618,7 @@ class HookStage(BaseStage):
|
|
600
618
|
f"Return type: '{t_func.name}@{t_func.tag}' does not serialize "
|
601
619
|
f"to result model, you change return type to `dict`."
|
602
620
|
)
|
603
|
-
return
|
621
|
+
return result.catch(status=Status.SUCCESS, context=rs)
|
604
622
|
|
605
623
|
|
606
624
|
class TriggerStage(BaseStage):
|
@@ -626,12 +644,15 @@ class TriggerStage(BaseStage):
|
|
626
644
|
description="A parameter that want to pass to workflow execution.",
|
627
645
|
)
|
628
646
|
|
629
|
-
def execute(
|
647
|
+
def execute(
|
648
|
+
self, params: DictData, *, result: Result | None = None
|
649
|
+
) -> Result:
|
630
650
|
"""Trigger another workflow execution. It will wait the trigger
|
631
651
|
workflow running complete before catching its result.
|
632
652
|
|
633
653
|
:param params: A parameter data that want to use in this execution.
|
634
|
-
:param
|
654
|
+
:param result: (Result) A result object for keeping context and status
|
655
|
+
data.
|
635
656
|
|
636
657
|
:rtype: Result
|
637
658
|
"""
|
@@ -644,13 +665,11 @@ class TriggerStage(BaseStage):
|
|
644
665
|
# NOTE: Set running workflow ID from running stage ID to external
|
645
666
|
# params on Loader object.
|
646
667
|
wf: Workflow = Workflow.from_loader(name=_trigger)
|
647
|
-
|
648
|
-
f"({cut_id(run_id)}) [STAGE]: Trigger-Execute: {_trigger!r}"
|
649
|
-
)
|
668
|
+
result.trace.info(f"[STAGE]: Trigger-Execute: {_trigger!r}")
|
650
669
|
return wf.execute(
|
651
670
|
params=param2template(self.params, params),
|
652
|
-
|
653
|
-
)
|
671
|
+
result=result,
|
672
|
+
)
|
654
673
|
|
655
674
|
|
656
675
|
# NOTE:
|
ddeutil/workflow/workflow.py
CHANGED
@@ -43,7 +43,8 @@ from typing_extensions import Self
|
|
43
43
|
|
44
44
|
from .__cron import CronJob, CronRunner
|
45
45
|
from .__types import DictData, TupleStr
|
46
|
-
from .
|
46
|
+
from .audit import Audit, get_audit
|
47
|
+
from .conf import Loader, config, get_logger
|
47
48
|
from .cron import On
|
48
49
|
from .exceptions import JobException, WorkflowException
|
49
50
|
from .job import Job
|
@@ -485,7 +486,7 @@ class Workflow(BaseModel):
|
|
485
486
|
params: DictData,
|
486
487
|
*,
|
487
488
|
run_id: str | None = None,
|
488
|
-
log: type[
|
489
|
+
log: type[Audit] = None,
|
489
490
|
queue: ReleaseQueue | None = None,
|
490
491
|
override_log_name: str | None = None,
|
491
492
|
) -> Result:
|
@@ -515,7 +516,7 @@ class Workflow(BaseModel):
|
|
515
516
|
|
516
517
|
:rtype: Result
|
517
518
|
"""
|
518
|
-
log: type[
|
519
|
+
log: type[Audit] = log or get_audit()
|
519
520
|
name: str = override_log_name or self.name
|
520
521
|
run_id: str = run_id or gen_id(name, unique=True)
|
521
522
|
rs_release: Result = Result(run_id=run_id)
|
@@ -562,15 +563,14 @@ class Workflow(BaseModel):
|
|
562
563
|
# NOTE: Saving execution result to destination of the input log object.
|
563
564
|
logger.debug(f"({cut_id(run_id)}) [LOG]: Writing log: {name!r}.")
|
564
565
|
(
|
565
|
-
log
|
566
|
-
|
567
|
-
|
568
|
-
|
569
|
-
|
570
|
-
|
571
|
-
|
572
|
-
|
573
|
-
}
|
566
|
+
log(
|
567
|
+
name=name,
|
568
|
+
release=release.date,
|
569
|
+
type=release.type,
|
570
|
+
context=rs.context,
|
571
|
+
parent_run_id=rs.parent_run_id,
|
572
|
+
run_id=rs.run_id,
|
573
|
+
execution_time=rs.alive_time(),
|
574
574
|
).save(excluded=None)
|
575
575
|
)
|
576
576
|
|
@@ -602,7 +602,7 @@ class Workflow(BaseModel):
|
|
602
602
|
offset: float,
|
603
603
|
end_date: datetime,
|
604
604
|
queue: ReleaseQueue,
|
605
|
-
log: type[
|
605
|
+
log: type[Audit],
|
606
606
|
*,
|
607
607
|
force_run: bool = False,
|
608
608
|
) -> ReleaseQueue:
|
@@ -671,7 +671,7 @@ class Workflow(BaseModel):
|
|
671
671
|
*,
|
672
672
|
run_id: str | None = None,
|
673
673
|
periods: int = 1,
|
674
|
-
log:
|
674
|
+
log: Audit | None = None,
|
675
675
|
force_run: bool = False,
|
676
676
|
timeout: int = 1800,
|
677
677
|
) -> list[Result]:
|
@@ -698,7 +698,7 @@ class Workflow(BaseModel):
|
|
698
698
|
:rtype: list[Result]
|
699
699
|
:return: A list of all results that return from ``self.release`` method.
|
700
700
|
"""
|
701
|
-
log: type[
|
701
|
+
log: type[Audit] = log or get_audit()
|
702
702
|
run_id: str = run_id or gen_id(self.name, unique=True)
|
703
703
|
|
704
704
|
# VALIDATE: Check the periods value should gather than 0.
|
@@ -820,6 +820,7 @@ class Workflow(BaseModel):
|
|
820
820
|
*,
|
821
821
|
run_id: str | None = None,
|
822
822
|
raise_error: bool = True,
|
823
|
+
result: Result | None = None,
|
823
824
|
) -> Result:
|
824
825
|
"""Job execution with passing dynamic parameters from the main workflow
|
825
826
|
execution to the target job object via job's ID.
|
@@ -837,13 +838,18 @@ class Workflow(BaseModel):
|
|
837
838
|
:param run_id: A workflow running ID for this job execution.
|
838
839
|
:param raise_error: A flag that raise error instead catching to result
|
839
840
|
if it gets exception from job execution.
|
841
|
+
:param result: (Result) A result object for keeping context and status
|
842
|
+
data.
|
840
843
|
|
841
844
|
:rtype: Result
|
842
845
|
:return: Return the result object that receive the job execution result
|
843
846
|
context.
|
844
847
|
"""
|
845
|
-
|
846
|
-
|
848
|
+
if result is None: # pragma: no cov
|
849
|
+
run_id: str = run_id or gen_id(self.name, unique=True)
|
850
|
+
result: Result = Result(run_id=run_id)
|
851
|
+
else:
|
852
|
+
run_id: str = result.run_id
|
847
853
|
|
848
854
|
# VALIDATE: check a job ID that exists in this workflow or not.
|
849
855
|
if job_id not in self.jobs:
|
@@ -852,9 +858,7 @@ class Workflow(BaseModel):
|
|
852
858
|
f"workflow."
|
853
859
|
)
|
854
860
|
|
855
|
-
|
856
|
-
f"({cut_id(run_id)}) [WORKFLOW]: Start execute job: {job_id!r}"
|
857
|
-
)
|
861
|
+
result.trace.info(f"[WORKFLOW]: Start execute job: {job_id!r}")
|
858
862
|
|
859
863
|
# IMPORTANT:
|
860
864
|
# This execution change all job running IDs to the current workflow
|
@@ -868,10 +872,7 @@ class Workflow(BaseModel):
|
|
868
872
|
to=params,
|
869
873
|
)
|
870
874
|
except JobException as err:
|
871
|
-
|
872
|
-
f"({cut_id(run_id)}) [WORKFLOW]: {err.__class__.__name__}: "
|
873
|
-
f"{err}"
|
874
|
-
)
|
875
|
+
result.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
|
875
876
|
if raise_error:
|
876
877
|
raise WorkflowException(
|
877
878
|
f"Get job execution error {job_id}: JobException: {err}"
|
@@ -880,7 +881,7 @@ class Workflow(BaseModel):
|
|
880
881
|
"Handle error from the job execution does not support yet."
|
881
882
|
) from None
|
882
883
|
|
883
|
-
return
|
884
|
+
return result.catch(status=0, context=params)
|
884
885
|
|
885
886
|
def execute(
|
886
887
|
self,
|
@@ -888,6 +889,7 @@ class Workflow(BaseModel):
|
|
888
889
|
*,
|
889
890
|
run_id: str | None = None,
|
890
891
|
timeout: int = 0,
|
892
|
+
result: Result | None = None,
|
891
893
|
) -> Result:
|
892
894
|
"""Execute workflow with passing a dynamic parameters to all jobs that
|
893
895
|
included in this workflow model with ``jobs`` field.
|
@@ -907,31 +909,32 @@ class Workflow(BaseModel):
|
|
907
909
|
|
908
910
|
:param run_id: A workflow running ID for this job execution.
|
909
911
|
:type run_id: str | None (default: None)
|
910
|
-
:param timeout: A workflow execution time out in second unit that use
|
912
|
+
:param timeout: (int) A workflow execution time out in second unit that use
|
911
913
|
for limit time of execution and waiting job dependency. This value
|
912
914
|
does not force stop the task that still running more than this limit
|
913
|
-
time.
|
914
|
-
:
|
915
|
+
time. (default: 0)
|
916
|
+
:param result: (Result) A result object for keeping context and status
|
917
|
+
data.
|
915
918
|
|
916
919
|
:rtype: Result
|
917
920
|
"""
|
918
|
-
run_id: str = run_id or gen_id(self.name, unique=True)
|
919
|
-
logger.info(
|
920
|
-
f"({cut_id(run_id)}) [WORKFLOW]: Start Execute: {self.name!r} ..."
|
921
|
-
)
|
922
|
-
|
923
921
|
# NOTE: I use this condition because this method allow passing empty
|
924
922
|
# params and I do not want to create new dict object.
|
925
923
|
ts: float = time.monotonic()
|
926
|
-
|
924
|
+
if result is None: # pragma: no cov
|
925
|
+
result: Result = Result(
|
926
|
+
run_id=(run_id or gen_id(self.name, unique=True))
|
927
|
+
)
|
928
|
+
|
929
|
+
result.trace.info(f"[WORKFLOW]: Start Execute: {self.name!r} ...")
|
927
930
|
|
928
931
|
# NOTE: It should not do anything if it does not have job.
|
929
932
|
if not self.jobs:
|
930
|
-
|
931
|
-
f"
|
932
|
-
f"
|
933
|
+
result.trace.warning(
|
934
|
+
f"[WORKFLOW]: This workflow: {self.name!r} does not have any "
|
935
|
+
f"jobs"
|
933
936
|
)
|
934
|
-
return
|
937
|
+
return result.catch(status=0, context=params)
|
935
938
|
|
936
939
|
# NOTE: Create a job queue that keep the job that want to run after
|
937
940
|
# its dependency condition.
|
@@ -952,7 +955,7 @@ class Workflow(BaseModel):
|
|
952
955
|
try:
|
953
956
|
if config.max_job_parallel == 1:
|
954
957
|
self.__exec_non_threading(
|
955
|
-
|
958
|
+
result=result,
|
956
959
|
context=context,
|
957
960
|
ts=ts,
|
958
961
|
job_queue=jq,
|
@@ -960,7 +963,7 @@ class Workflow(BaseModel):
|
|
960
963
|
)
|
961
964
|
else:
|
962
965
|
self.__exec_threading(
|
963
|
-
|
966
|
+
result=result,
|
964
967
|
context=context,
|
965
968
|
ts=ts,
|
966
969
|
job_queue=jq,
|
@@ -974,11 +977,11 @@ class Workflow(BaseModel):
|
|
974
977
|
"error_message": f"{err.__class__.__name__}: {err}",
|
975
978
|
},
|
976
979
|
)
|
977
|
-
return
|
980
|
+
return result.catch(status=status, context=context)
|
978
981
|
|
979
982
|
def __exec_threading(
|
980
983
|
self,
|
981
|
-
|
984
|
+
result: Result,
|
982
985
|
context: DictData,
|
983
986
|
ts: float,
|
984
987
|
job_queue: Queue,
|
@@ -991,6 +994,7 @@ class Workflow(BaseModel):
|
|
991
994
|
If a job need dependency, it will check dependency job ID from
|
992
995
|
context data before allow it run.
|
993
996
|
|
997
|
+
:param result: A result model.
|
994
998
|
:param context: A context workflow data that want to downstream passing.
|
995
999
|
:param ts: A start timestamp that use for checking execute time should
|
996
1000
|
time out.
|
@@ -1002,9 +1006,7 @@ class Workflow(BaseModel):
|
|
1002
1006
|
"""
|
1003
1007
|
not_timeout_flag: bool = True
|
1004
1008
|
timeout: int = timeout or config.max_job_exec_timeout
|
1005
|
-
|
1006
|
-
f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with threading."
|
1007
|
-
)
|
1009
|
+
result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with threading.")
|
1008
1010
|
|
1009
1011
|
# IMPORTANT: The job execution can run parallel and waiting by
|
1010
1012
|
# needed.
|
@@ -1041,6 +1043,7 @@ class Workflow(BaseModel):
|
|
1041
1043
|
self.execute_job,
|
1042
1044
|
job_id,
|
1043
1045
|
params=context,
|
1046
|
+
result=result,
|
1044
1047
|
),
|
1045
1048
|
)
|
1046
1049
|
|
@@ -1055,7 +1058,7 @@ class Workflow(BaseModel):
|
|
1055
1058
|
|
1056
1059
|
for future in as_completed(futures, timeout=thread_timeout):
|
1057
1060
|
if err := future.exception():
|
1058
|
-
|
1061
|
+
result.trace.error(f"[WORKFLOW]: {err}")
|
1059
1062
|
raise WorkflowException(str(err))
|
1060
1063
|
|
1061
1064
|
# NOTE: This getting result does not do anything.
|
@@ -1067,15 +1070,14 @@ class Workflow(BaseModel):
|
|
1067
1070
|
future.cancel()
|
1068
1071
|
|
1069
1072
|
# NOTE: Raise timeout error.
|
1070
|
-
|
1071
|
-
f"
|
1072
|
-
f"was timeout."
|
1073
|
+
result.trace.warning(
|
1074
|
+
f"[WORKFLOW]: Execution: {self.name!r} was timeout."
|
1073
1075
|
)
|
1074
1076
|
raise WorkflowException(f"Execution: {self.name!r} was timeout.")
|
1075
1077
|
|
1076
1078
|
def __exec_non_threading(
|
1077
1079
|
self,
|
1078
|
-
|
1080
|
+
result: Result,
|
1079
1081
|
context: DictData,
|
1080
1082
|
ts: float,
|
1081
1083
|
job_queue: Queue,
|
@@ -1088,6 +1090,7 @@ class Workflow(BaseModel):
|
|
1088
1090
|
If a job need dependency, it will check dependency job ID from
|
1089
1091
|
context data before allow it run.
|
1090
1092
|
|
1093
|
+
:param result: A result model.
|
1091
1094
|
:param context: A context workflow data that want to downstream passing.
|
1092
1095
|
:param ts: A start timestamp that use for checking execute time should
|
1093
1096
|
time out.
|
@@ -1097,10 +1100,7 @@ class Workflow(BaseModel):
|
|
1097
1100
|
"""
|
1098
1101
|
not_timeout_flag: bool = True
|
1099
1102
|
timeout: int = timeout or config.max_job_exec_timeout
|
1100
|
-
|
1101
|
-
f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with "
|
1102
|
-
f"non-threading."
|
1103
|
-
)
|
1103
|
+
result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with non-threading.")
|
1104
1104
|
|
1105
1105
|
while not job_queue.empty() and (
|
1106
1106
|
not_timeout_flag := ((time.monotonic() - ts) < timeout)
|
@@ -1123,7 +1123,7 @@ class Workflow(BaseModel):
|
|
1123
1123
|
# 'params': <input-params>,
|
1124
1124
|
# 'jobs': {},
|
1125
1125
|
# }
|
1126
|
-
self.execute_job(job_id=job_id, params=context,
|
1126
|
+
self.execute_job(job_id=job_id, params=context, result=result)
|
1127
1127
|
|
1128
1128
|
# NOTE: Mark this job queue done.
|
1129
1129
|
job_queue.task_done()
|
@@ -1137,9 +1137,8 @@ class Workflow(BaseModel):
|
|
1137
1137
|
return context
|
1138
1138
|
|
1139
1139
|
# NOTE: Raise timeout error.
|
1140
|
-
|
1141
|
-
f"
|
1142
|
-
f"was timeout."
|
1140
|
+
result.trace.warning(
|
1141
|
+
f"[WORKFLOW]: Execution: {self.name!r} was timeout."
|
1143
1142
|
)
|
1144
1143
|
raise WorkflowException(f"Execution: {self.name!r} was timeout.")
|
1145
1144
|
|
@@ -1165,7 +1164,7 @@ class WorkflowTask:
|
|
1165
1164
|
self,
|
1166
1165
|
release: datetime | Release | None = None,
|
1167
1166
|
run_id: str | None = None,
|
1168
|
-
log: type[
|
1167
|
+
log: type[Audit] = None,
|
1169
1168
|
queue: ReleaseQueue | None = None,
|
1170
1169
|
) -> Result:
|
1171
1170
|
"""Release the workflow task data.
|
@@ -1177,7 +1176,7 @@ class WorkflowTask:
|
|
1177
1176
|
|
1178
1177
|
:rtype: Result
|
1179
1178
|
"""
|
1180
|
-
log: type[
|
1179
|
+
log: type[Audit] = log or get_audit()
|
1181
1180
|
|
1182
1181
|
if release is None:
|
1183
1182
|
|
@@ -1213,7 +1212,7 @@ class WorkflowTask:
|
|
1213
1212
|
self,
|
1214
1213
|
end_date: datetime,
|
1215
1214
|
queue: ReleaseQueue,
|
1216
|
-
log: type[
|
1215
|
+
log: type[Audit],
|
1217
1216
|
*,
|
1218
1217
|
force_run: bool = False,
|
1219
1218
|
) -> ReleaseQueue:
|
@@ -1223,8 +1222,8 @@ class WorkflowTask:
|
|
1223
1222
|
:param end_date: An end datetime object.
|
1224
1223
|
:param queue: A workflow queue object.
|
1225
1224
|
:param log: A log class that want to make log object.
|
1226
|
-
:param force_run: A flag that allow to release workflow if the
|
1227
|
-
that release was pointed.
|
1225
|
+
:param force_run: (bool) A flag that allow to release workflow if the
|
1226
|
+
log with that release was pointed.
|
1228
1227
|
|
1229
1228
|
:rtype: ReleaseQueue
|
1230
1229
|
"""
|
@@ -1260,7 +1259,7 @@ class WorkflowTask:
|
|
1260
1259
|
return queue
|
1261
1260
|
|
1262
1261
|
def __repr__(self) -> str:
|
1263
|
-
"""Override
|
1262
|
+
"""Override the `__repr__` method."""
|
1264
1263
|
return (
|
1265
1264
|
f"{self.__class__.__name__}(alias={self.alias!r}, "
|
1266
1265
|
f"workflow={self.workflow.name!r}, runner={self.runner!r}, "
|