ddeutil-workflow 0.0.49__py3-none-any.whl → 0.0.50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +4 -0
- ddeutil/workflow/conf.py +9 -9
- ddeutil/workflow/exceptions.py +3 -3
- ddeutil/workflow/job.py +122 -71
- ddeutil/workflow/logs.py +1 -1
- ddeutil/workflow/result.py +12 -4
- ddeutil/workflow/stages.py +315 -115
- ddeutil/workflow/utils.py +42 -38
- ddeutil/workflow/workflow.py +18 -25
- {ddeutil_workflow-0.0.49.dist-info → ddeutil_workflow-0.0.50.dist-info}/METADATA +3 -2
- {ddeutil_workflow-0.0.49.dist-info → ddeutil_workflow-0.0.50.dist-info}/RECORD +15 -15
- {ddeutil_workflow-0.0.49.dist-info → ddeutil_workflow-0.0.50.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.49.dist-info → ddeutil_workflow-0.0.50.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.49.dist-info → ddeutil_workflow-0.0.50.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.50"
|
ddeutil/workflow/__init__.py
CHANGED
@@ -49,6 +49,7 @@ from .params import (
|
|
49
49
|
StrParam,
|
50
50
|
)
|
51
51
|
from .result import (
|
52
|
+
CANCEL,
|
52
53
|
FAILED,
|
53
54
|
SKIP,
|
54
55
|
SUCCESS,
|
@@ -101,6 +102,9 @@ from .utils import (
|
|
101
102
|
get_diff_sec,
|
102
103
|
get_dt_now,
|
103
104
|
make_exec,
|
105
|
+
reach_next_minute,
|
106
|
+
replace_sec,
|
107
|
+
wait_to_next_minute,
|
104
108
|
)
|
105
109
|
from .workflow import (
|
106
110
|
Release,
|
ddeutil/workflow/conf.py
CHANGED
@@ -53,7 +53,6 @@ class Config: # pragma: no cov
|
|
53
53
|
The config value can change when you call that config property again.
|
54
54
|
"""
|
55
55
|
|
56
|
-
# NOTE: Core
|
57
56
|
@property
|
58
57
|
def conf_path(self) -> Path:
|
59
58
|
"""Config path that keep all workflow template YAML files.
|
@@ -73,9 +72,13 @@ class Config: # pragma: no cov
|
|
73
72
|
|
74
73
|
@property
|
75
74
|
def generate_id_simple_mode(self) -> bool:
|
75
|
+
"""Flag for generate running ID with simple mode. That does not use
|
76
|
+
`md5` function after generate simple mode.
|
77
|
+
|
78
|
+
:rtype: bool
|
79
|
+
"""
|
76
80
|
return str2bool(env("CORE_GENERATE_ID_SIMPLE_MODE", "true"))
|
77
81
|
|
78
|
-
# NOTE: Register
|
79
82
|
@property
|
80
83
|
def registry_caller(self) -> list[str]:
|
81
84
|
"""Register Caller that is a list of importable string for the call
|
@@ -98,13 +101,16 @@ class Config: # pragma: no cov
|
|
98
101
|
)
|
99
102
|
return [r.strip() for r in regis_filter_str.split(",")]
|
100
103
|
|
101
|
-
# NOTE: Log
|
102
104
|
@property
|
103
105
|
def trace_path(self) -> Path:
|
104
106
|
return Path(env("LOG_TRACE_PATH", "./logs"))
|
105
107
|
|
106
108
|
@property
|
107
109
|
def debug(self) -> bool:
|
110
|
+
"""Debug flag for echo log that use DEBUG mode.
|
111
|
+
|
112
|
+
:rtype: bool
|
113
|
+
"""
|
108
114
|
return str2bool(env("LOG_DEBUG_MODE", "true"))
|
109
115
|
|
110
116
|
@property
|
@@ -144,7 +150,6 @@ class Config: # pragma: no cov
|
|
144
150
|
def log_datetime_format(self) -> str:
|
145
151
|
return env("LOG_DATETIME_FORMAT", "%Y-%m-%d %H:%M:%S")
|
146
152
|
|
147
|
-
# NOTE: Stage
|
148
153
|
@property
|
149
154
|
def stage_raise_error(self) -> bool:
|
150
155
|
return str2bool(env("CORE_STAGE_RAISE_ERROR", "false"))
|
@@ -153,11 +158,6 @@ class Config: # pragma: no cov
|
|
153
158
|
def stage_default_id(self) -> bool:
|
154
159
|
return str2bool(env("CORE_STAGE_DEFAULT_ID", "false"))
|
155
160
|
|
156
|
-
# NOTE: Job
|
157
|
-
@property
|
158
|
-
def job_raise_error(self) -> bool:
|
159
|
-
return str2bool(env("CORE_JOB_RAISE_ERROR", "true"))
|
160
|
-
|
161
161
|
@property
|
162
162
|
def max_cron_per_workflow(self) -> int:
|
163
163
|
"""The maximum on value that store in workflow model.
|
ddeutil/workflow/exceptions.py
CHANGED
@@ -39,6 +39,9 @@ class BaseWorkflowException(Exception):
|
|
39
39
|
class UtilException(BaseWorkflowException): ...
|
40
40
|
|
41
41
|
|
42
|
+
class ResultException(UtilException): ...
|
43
|
+
|
44
|
+
|
42
45
|
class StageException(BaseWorkflowException): ...
|
43
46
|
|
44
47
|
|
@@ -48,9 +51,6 @@ class JobException(BaseWorkflowException): ...
|
|
48
51
|
class WorkflowException(BaseWorkflowException): ...
|
49
52
|
|
50
53
|
|
51
|
-
class WorkflowFailException(WorkflowException): ...
|
52
|
-
|
53
|
-
|
54
54
|
class ParamValueException(WorkflowException): ...
|
55
55
|
|
56
56
|
|
ddeutil/workflow/job.py
CHANGED
@@ -3,7 +3,6 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
# [x] Use dynamic config
|
7
6
|
"""Job Model that use for keeping stages and node that running its stages.
|
8
7
|
The job handle the lineage of stages and location of execution of stages that
|
9
8
|
mean the job model able to define `runs-on` key that allow you to run this
|
@@ -33,7 +32,6 @@ from pydantic.functional_validators import field_validator, model_validator
|
|
33
32
|
from typing_extensions import Self
|
34
33
|
|
35
34
|
from .__types import DictData, DictStr, Matrix, TupleStr
|
36
|
-
from .conf import dynamic
|
37
35
|
from .exceptions import (
|
38
36
|
JobException,
|
39
37
|
StageException,
|
@@ -51,11 +49,12 @@ MatrixFilter = list[dict[str, Union[str, int]]]
|
|
51
49
|
__all__: TupleStr = (
|
52
50
|
"Strategy",
|
53
51
|
"Job",
|
54
|
-
"
|
52
|
+
"Rule",
|
55
53
|
"RunsOn",
|
56
|
-
"
|
57
|
-
"
|
58
|
-
"
|
54
|
+
"RunsOnModel",
|
55
|
+
"OnLocal",
|
56
|
+
"OnSelfHosted",
|
57
|
+
"OnK8s",
|
59
58
|
"make",
|
60
59
|
"local_execute_strategy",
|
61
60
|
"local_execute",
|
@@ -194,24 +193,25 @@ class Strategy(BaseModel):
|
|
194
193
|
return make(self.matrix, self.include, self.exclude)
|
195
194
|
|
196
195
|
|
197
|
-
class
|
196
|
+
class Rule(str, Enum):
|
198
197
|
"""Trigger rules enum object."""
|
199
198
|
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
199
|
+
ALL_SUCCESS: str = "all_success"
|
200
|
+
ALL_FAILED: str = "all_failed"
|
201
|
+
ALL_DONE: str = "all_done"
|
202
|
+
ONE_FAILED: str = "one_failed"
|
203
|
+
ONE_SUCCESS: str = "one_success"
|
204
|
+
NONE_FAILED: str = "none_failed"
|
205
|
+
NONE_SKIPPED: str = "none_skipped"
|
207
206
|
|
208
207
|
|
209
|
-
class
|
208
|
+
class RunsOn(str, Enum):
|
210
209
|
"""Runs-On enum object."""
|
211
210
|
|
212
211
|
LOCAL: str = "local"
|
213
212
|
SELF_HOSTED: str = "self_hosted"
|
214
213
|
K8S: str = "k8s"
|
214
|
+
AZ_BATCH: str = "azure_batch"
|
215
215
|
|
216
216
|
|
217
217
|
class BaseRunsOn(BaseModel): # pragma: no cov
|
@@ -221,47 +221,45 @@ class BaseRunsOn(BaseModel): # pragma: no cov
|
|
221
221
|
|
222
222
|
model_config = ConfigDict(use_enum_values=True)
|
223
223
|
|
224
|
-
type: Literal[
|
224
|
+
type: Literal[RunsOn.LOCAL]
|
225
225
|
args: DictData = Field(
|
226
226
|
default_factory=dict,
|
227
227
|
alias="with",
|
228
228
|
)
|
229
229
|
|
230
230
|
|
231
|
-
class
|
231
|
+
class OnLocal(BaseRunsOn): # pragma: no cov
|
232
232
|
"""Runs-on local."""
|
233
233
|
|
234
|
-
type: Literal[
|
234
|
+
type: Literal[RunsOn.LOCAL] = Field(default=RunsOn.LOCAL)
|
235
235
|
|
236
236
|
|
237
237
|
class SelfHostedArgs(BaseModel):
|
238
238
|
host: str
|
239
239
|
|
240
240
|
|
241
|
-
class
|
241
|
+
class OnSelfHosted(BaseRunsOn): # pragma: no cov
|
242
242
|
"""Runs-on self-hosted."""
|
243
243
|
|
244
|
-
type: Literal[
|
245
|
-
default=RunsOnType.SELF_HOSTED
|
246
|
-
)
|
244
|
+
type: Literal[RunsOn.SELF_HOSTED] = Field(default=RunsOn.SELF_HOSTED)
|
247
245
|
args: SelfHostedArgs = Field(alias="with")
|
248
246
|
|
249
247
|
|
250
|
-
class
|
248
|
+
class OnK8s(BaseRunsOn): # pragma: no cov
|
251
249
|
"""Runs-on Kubernetes."""
|
252
250
|
|
253
|
-
type: Literal[
|
251
|
+
type: Literal[RunsOn.K8S] = Field(default=RunsOn.K8S)
|
254
252
|
|
255
253
|
|
256
254
|
def get_discriminator_runs_on(model: dict[str, Any]) -> str:
|
257
255
|
return model.get("type", "local")
|
258
256
|
|
259
257
|
|
260
|
-
|
258
|
+
RunsOnModel = Annotated[
|
261
259
|
Union[
|
262
|
-
Annotated[
|
263
|
-
Annotated[
|
264
|
-
Annotated[
|
260
|
+
Annotated[OnK8s, Tag(RunsOn.K8S)],
|
261
|
+
Annotated[OnSelfHosted, Tag(RunsOn.SELF_HOSTED)],
|
262
|
+
Annotated[OnLocal, Tag(RunsOn.LOCAL)],
|
265
263
|
],
|
266
264
|
Discriminator(get_discriminator_runs_on),
|
267
265
|
]
|
@@ -305,8 +303,8 @@ class Job(BaseModel):
|
|
305
303
|
default=None,
|
306
304
|
description="A job description that can be string of markdown content.",
|
307
305
|
)
|
308
|
-
runs_on:
|
309
|
-
default_factory=
|
306
|
+
runs_on: RunsOnModel = Field(
|
307
|
+
default_factory=OnLocal,
|
310
308
|
description="A target node for this job to use for execution.",
|
311
309
|
alias="runs-on",
|
312
310
|
)
|
@@ -319,8 +317,8 @@ class Job(BaseModel):
|
|
319
317
|
default_factory=list,
|
320
318
|
description="A list of Stage of this job.",
|
321
319
|
)
|
322
|
-
trigger_rule:
|
323
|
-
default=
|
320
|
+
trigger_rule: Rule = Field(
|
321
|
+
default=Rule.ALL_SUCCESS,
|
324
322
|
description=(
|
325
323
|
"A trigger rule of tracking needed jobs if feature will use when "
|
326
324
|
"the `raise_error` did not set from job and stage executions."
|
@@ -421,27 +419,27 @@ class Job(BaseModel):
|
|
421
419
|
return WAIT
|
422
420
|
elif all("skipped" in need_exist[job] for job in need_exist):
|
423
421
|
return SKIP
|
424
|
-
elif self.trigger_rule ==
|
422
|
+
elif self.trigger_rule == Rule.ALL_DONE:
|
425
423
|
return SUCCESS
|
426
|
-
elif self.trigger_rule ==
|
424
|
+
elif self.trigger_rule == Rule.ALL_SUCCESS:
|
427
425
|
rs = all(
|
428
426
|
k not in need_exist[job]
|
429
427
|
for k in ("errors", "skipped")
|
430
428
|
for job in need_exist
|
431
429
|
)
|
432
|
-
elif self.trigger_rule ==
|
430
|
+
elif self.trigger_rule == Rule.ALL_FAILED:
|
433
431
|
rs = all("errors" in need_exist[job] for job in need_exist)
|
434
|
-
elif self.trigger_rule ==
|
432
|
+
elif self.trigger_rule == Rule.ONE_SUCCESS:
|
435
433
|
rs = sum(
|
436
434
|
k not in need_exist[job]
|
437
435
|
for k in ("errors", "skipped")
|
438
436
|
for job in need_exist
|
439
437
|
) + 1 == len(self.needs)
|
440
|
-
elif self.trigger_rule ==
|
438
|
+
elif self.trigger_rule == Rule.ONE_FAILED:
|
441
439
|
rs = sum("errors" in need_exist[job] for job in need_exist) == 1
|
442
|
-
elif self.trigger_rule ==
|
440
|
+
elif self.trigger_rule == Rule.NONE_SKIPPED:
|
443
441
|
rs = all("skipped" not in need_exist[job] for job in need_exist)
|
444
|
-
elif self.trigger_rule ==
|
442
|
+
elif self.trigger_rule == Rule.NONE_FAILED:
|
445
443
|
rs = all("errors" not in need_exist[job] for job in need_exist)
|
446
444
|
else: # pragma: no cov
|
447
445
|
raise NotImplementedError(
|
@@ -480,8 +478,8 @@ class Job(BaseModel):
|
|
480
478
|
if not isinstance(rs, bool):
|
481
479
|
raise TypeError("Return type of condition does not be boolean")
|
482
480
|
return not rs
|
483
|
-
except Exception as
|
484
|
-
raise JobException(f"{
|
481
|
+
except Exception as e:
|
482
|
+
raise JobException(f"{e.__class__.__name__}: {e}") from e
|
485
483
|
|
486
484
|
def set_outputs(
|
487
485
|
self,
|
@@ -555,6 +553,7 @@ class Job(BaseModel):
|
|
555
553
|
parent_run_id: str | None = None,
|
556
554
|
result: Result | None = None,
|
557
555
|
event: Event | None = None,
|
556
|
+
raise_error: bool = True,
|
558
557
|
) -> Result:
|
559
558
|
"""Job execution with passing dynamic parameters from the workflow
|
560
559
|
execution. It will generate matrix values at the first step and run
|
@@ -567,6 +566,8 @@ class Job(BaseModel):
|
|
567
566
|
data.
|
568
567
|
:param event: (Event) An event manager that pass to the
|
569
568
|
PoolThreadExecutor.
|
569
|
+
:param raise_error: (bool) A flag that all this method raise error to the
|
570
|
+
strategy execution. Default is `True`.
|
570
571
|
|
571
572
|
:rtype: Result
|
572
573
|
"""
|
@@ -578,16 +579,18 @@ class Job(BaseModel):
|
|
578
579
|
extras=self.extras,
|
579
580
|
)
|
580
581
|
|
581
|
-
|
582
|
+
result.trace.info(f"[JOB]: Start execute job: {self.id!r}")
|
583
|
+
if self.runs_on.type == RunsOn.LOCAL:
|
582
584
|
return local_execute(
|
583
585
|
job=self,
|
584
586
|
params=params,
|
585
587
|
result=result,
|
586
588
|
event=event,
|
589
|
+
raise_error=raise_error,
|
587
590
|
)
|
588
|
-
elif self.runs_on.type ==
|
591
|
+
elif self.runs_on.type == RunsOn.SELF_HOSTED: # pragma: no cov
|
589
592
|
pass
|
590
|
-
elif self.runs_on.type ==
|
593
|
+
elif self.runs_on.type == RunsOn.K8S: # pragma: no cov
|
591
594
|
pass
|
592
595
|
|
593
596
|
# pragma: no cov
|
@@ -608,7 +611,7 @@ def local_execute_strategy(
|
|
608
611
|
*,
|
609
612
|
result: Result | None = None,
|
610
613
|
event: Event | None = None,
|
611
|
-
raise_error: bool
|
614
|
+
raise_error: bool = True,
|
612
615
|
) -> Result:
|
613
616
|
"""Local job strategy execution with passing dynamic parameters from the
|
614
617
|
workflow execution to strategy matrix.
|
@@ -665,7 +668,7 @@ def local_execute_strategy(
|
|
665
668
|
context={
|
666
669
|
strategy_id: {
|
667
670
|
"matrix": strategy,
|
668
|
-
"stages": context.pop("stages", {}),
|
671
|
+
"stages": filter_func(context.pop("stages", {})),
|
669
672
|
"errors": JobException(error_msg).to_dict(),
|
670
673
|
},
|
671
674
|
},
|
@@ -689,21 +692,17 @@ def local_execute_strategy(
|
|
689
692
|
context={
|
690
693
|
strategy_id: {
|
691
694
|
"matrix": strategy,
|
692
|
-
"stages": context.pop("stages", {}),
|
695
|
+
"stages": filter_func(context.pop("stages", {})),
|
693
696
|
"errors": JobException(error_msg).to_dict(),
|
694
697
|
},
|
695
698
|
},
|
696
699
|
)
|
697
700
|
|
698
|
-
except (StageException, UtilException) as
|
699
|
-
result.trace.error(f"[JOB]: {
|
700
|
-
|
701
|
-
"job_raise_error", f=raise_error, extras=job.extras
|
702
|
-
)
|
703
|
-
if do_raise:
|
701
|
+
except (StageException, UtilException) as e:
|
702
|
+
result.trace.error(f"[JOB]: {e.__class__.__name__}: {e}")
|
703
|
+
if raise_error:
|
704
704
|
raise JobException(
|
705
|
-
f"Stage execution error: {
|
706
|
-
f"{err}"
|
705
|
+
f"Stage execution error: {e.__class__.__name__}: {e}"
|
707
706
|
) from None
|
708
707
|
|
709
708
|
return result.catch(
|
@@ -711,8 +710,8 @@ def local_execute_strategy(
|
|
711
710
|
context={
|
712
711
|
strategy_id: {
|
713
712
|
"matrix": strategy,
|
714
|
-
"stages": context.pop("stages", {}),
|
715
|
-
"errors":
|
713
|
+
"stages": filter_func(context.pop("stages", {})),
|
714
|
+
"errors": e.to_dict(),
|
716
715
|
},
|
717
716
|
},
|
718
717
|
)
|
@@ -736,7 +735,7 @@ def local_execute(
|
|
736
735
|
parent_run_id: str | None = None,
|
737
736
|
result: Result | None = None,
|
738
737
|
event: Event | None = None,
|
739
|
-
raise_error: bool
|
738
|
+
raise_error: bool = True,
|
740
739
|
) -> Result:
|
741
740
|
"""Local job execution with passing dynamic parameters from the workflow
|
742
741
|
execution or itself execution. It will generate matrix values at the first
|
@@ -753,7 +752,7 @@ def local_execute(
|
|
753
752
|
data.
|
754
753
|
:param event: (Event) An event manager that pass to the PoolThreadExecutor.
|
755
754
|
:param raise_error: (bool) A flag that all this method raise error to the
|
756
|
-
strategy execution.
|
755
|
+
strategy execution. Default is `True`.
|
757
756
|
|
758
757
|
:rtype: Result
|
759
758
|
"""
|
@@ -784,9 +783,9 @@ def local_execute(
|
|
784
783
|
)
|
785
784
|
|
786
785
|
local_execute_strategy(
|
787
|
-
job
|
788
|
-
strategy
|
789
|
-
params
|
786
|
+
job,
|
787
|
+
strategy,
|
788
|
+
params,
|
790
789
|
result=result,
|
791
790
|
event=event,
|
792
791
|
raise_error=raise_error,
|
@@ -856,13 +855,12 @@ def local_execute(
|
|
856
855
|
for future in done:
|
857
856
|
try:
|
858
857
|
future.result()
|
859
|
-
except JobException as
|
858
|
+
except JobException as e:
|
860
859
|
status = FAILED
|
861
860
|
result.trace.error(
|
862
|
-
f"[JOB]: {ls} Catch:\n\t{
|
863
|
-
f"\n\t{err}"
|
861
|
+
f"[JOB]: {ls} Catch:\n\t{e.__class__.__name__}:\n\t{e}"
|
864
862
|
)
|
865
|
-
context.update({"errors":
|
863
|
+
context.update({"errors": e.to_dict()})
|
866
864
|
|
867
865
|
return result.catch(status=status, context=context)
|
868
866
|
|
@@ -875,7 +873,7 @@ def self_hosted_execute(
|
|
875
873
|
parent_run_id: str | None = None,
|
876
874
|
result: Result | None = None,
|
877
875
|
event: Event | None = None,
|
878
|
-
raise_error: bool
|
876
|
+
raise_error: bool = True,
|
879
877
|
) -> Result: # pragma: no cov
|
880
878
|
"""Self-Hosted job execution with passing dynamic parameters from the
|
881
879
|
workflow execution or itself execution. It will make request to the
|
@@ -929,10 +927,7 @@ def self_hosted_execute(
|
|
929
927
|
return result.catch(status=FAILED, context={"errors": to_dict(e)})
|
930
928
|
|
931
929
|
if resp.status_code != 200:
|
932
|
-
|
933
|
-
"job_raise_error", f=raise_error, extras=job.extras
|
934
|
-
)
|
935
|
-
if do_raise:
|
930
|
+
if raise_error:
|
936
931
|
raise JobException(
|
937
932
|
f"Job execution error from request to self-hosted: "
|
938
933
|
f"{job.runs_on.args.host!r}"
|
@@ -940,3 +935,59 @@ def self_hosted_execute(
|
|
940
935
|
|
941
936
|
return result.catch(status=FAILED)
|
942
937
|
return result.catch(status=SUCCESS)
|
938
|
+
|
939
|
+
|
940
|
+
def azure_batch_execute(
|
941
|
+
job: Job,
|
942
|
+
params: DictData,
|
943
|
+
*,
|
944
|
+
run_id: str | None = None,
|
945
|
+
parent_run_id: str | None = None,
|
946
|
+
result: Result | None = None,
|
947
|
+
event: Event | None = None,
|
948
|
+
raise_error: bool | None = None,
|
949
|
+
) -> Result: # pragma no cov
|
950
|
+
"""Azure Batch job execution that will run all job's stages on the Azure
|
951
|
+
Batch Node and extract the result file to be returning context result.
|
952
|
+
|
953
|
+
Steps:
|
954
|
+
- Create a Batch account and a Batch pool.
|
955
|
+
- Create a Batch job and add tasks to the job. Each task represents a
|
956
|
+
command to run on a compute node.
|
957
|
+
- Specify the command to run the Python script in the task. You can use
|
958
|
+
the cmd /c command to run the script with the Python interpreter.
|
959
|
+
- Upload the Python script and any required input files to Azure Storage
|
960
|
+
Account.
|
961
|
+
- Configure the task to download the input files from Azure Storage to
|
962
|
+
the compute node before running the script.
|
963
|
+
- Monitor the job and retrieve the output files from Azure Storage.
|
964
|
+
|
965
|
+
:param job:
|
966
|
+
:param params:
|
967
|
+
:param run_id:
|
968
|
+
:param parent_run_id:
|
969
|
+
:param result:
|
970
|
+
:param event:
|
971
|
+
:param raise_error:
|
972
|
+
:return:
|
973
|
+
"""
|
974
|
+
result: Result = Result.construct_with_rs_or_id(
|
975
|
+
result,
|
976
|
+
run_id=run_id,
|
977
|
+
parent_run_id=parent_run_id,
|
978
|
+
id_logic=(job.id or "not-set"),
|
979
|
+
extras=job.extras,
|
980
|
+
)
|
981
|
+
if event and event.is_set():
|
982
|
+
return result.catch(
|
983
|
+
status=FAILED,
|
984
|
+
context={
|
985
|
+
"errors": JobException(
|
986
|
+
"Job azure-batch execution was canceled from event that "
|
987
|
+
"had set before start execution."
|
988
|
+
).to_dict()
|
989
|
+
},
|
990
|
+
)
|
991
|
+
print(params)
|
992
|
+
print(raise_error)
|
993
|
+
return result.catch(status=SUCCESS)
|
ddeutil/workflow/logs.py
CHANGED
@@ -394,7 +394,7 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
394
394
|
"""
|
395
395
|
cut_run_id: str = cut_id(self.run_id)
|
396
396
|
if not self.parent_run_id:
|
397
|
-
return f"{cut_run_id}
|
397
|
+
return f"{cut_run_id}"
|
398
398
|
|
399
399
|
cut_parent_run_id: str = cut_id(self.parent_run_id)
|
400
400
|
return f"{cut_parent_run_id} -> {cut_run_id}"
|
ddeutil/workflow/result.py
CHANGED
@@ -21,6 +21,7 @@ from typing_extensions import Self
|
|
21
21
|
|
22
22
|
from .__types import DictData
|
23
23
|
from .conf import dynamic
|
24
|
+
from .exceptions import ResultException
|
24
25
|
from .logs import Trace, get_dt_tznow, get_trace
|
25
26
|
from .utils import default_gen_id, gen_id, get_dt_now
|
26
27
|
|
@@ -34,12 +35,14 @@ class Status(IntEnum):
|
|
34
35
|
FAILED: int = 1
|
35
36
|
WAIT: int = 2
|
36
37
|
SKIP: int = 3
|
38
|
+
CANCEL: int = 4
|
37
39
|
|
38
40
|
|
39
41
|
SUCCESS = Status.SUCCESS
|
40
42
|
FAILED = Status.FAILED
|
41
43
|
WAIT = Status.WAIT
|
42
44
|
SKIP = Status.SKIP
|
45
|
+
CANCEL = Status.CANCEL
|
43
46
|
|
44
47
|
|
45
48
|
@dataclass(
|
@@ -63,7 +66,6 @@ class Result:
|
|
63
66
|
|
64
67
|
status: Status = field(default=WAIT)
|
65
68
|
context: DictData = field(default_factory=dict)
|
66
|
-
errors: DictData = field(default_factory=dict)
|
67
69
|
run_id: Optional[str] = field(default_factory=default_gen_id)
|
68
70
|
parent_run_id: Optional[str] = field(default=None, compare=False)
|
69
71
|
ts: datetime = field(default_factory=get_dt_tznow, compare=False)
|
@@ -137,7 +139,7 @@ class Result:
|
|
137
139
|
self,
|
138
140
|
status: int | Status,
|
139
141
|
context: DictData | None = None,
|
140
|
-
|
142
|
+
**kwargs,
|
141
143
|
) -> Self:
|
142
144
|
"""Catch the status and context to this Result object. This method will
|
143
145
|
use between a child execution return a result, and it wants to pass
|
@@ -145,7 +147,6 @@ class Result:
|
|
145
147
|
|
146
148
|
:param status: A status enum object.
|
147
149
|
:param context: A context data that will update to the current context.
|
148
|
-
:param error: An error data that will update to the current errors.
|
149
150
|
|
150
151
|
:rtype: Self
|
151
152
|
"""
|
@@ -153,7 +154,14 @@ class Result:
|
|
153
154
|
Status(status) if isinstance(status, int) else status
|
154
155
|
)
|
155
156
|
self.__dict__["context"].update(context or {})
|
156
|
-
|
157
|
+
if kwargs:
|
158
|
+
for k in kwargs:
|
159
|
+
if k in self.__dict__["context"]:
|
160
|
+
self.__dict__["context"][k].update(kwargs[k])
|
161
|
+
else:
|
162
|
+
raise ResultException(
|
163
|
+
f"The key {k!r} does not exists on context data."
|
164
|
+
)
|
157
165
|
return self
|
158
166
|
|
159
167
|
def alive_time(self) -> float: # pragma: no cov
|