ddeutil-workflow 0.0.48__py3-none-any.whl → 0.0.50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +8 -1
- ddeutil/workflow/api/routes/logs.py +6 -5
- ddeutil/workflow/conf.py +40 -40
- ddeutil/workflow/exceptions.py +3 -3
- ddeutil/workflow/job.py +132 -76
- ddeutil/workflow/logs.py +145 -81
- ddeutil/workflow/result.py +20 -10
- ddeutil/workflow/reusables.py +3 -3
- ddeutil/workflow/scheduler.py +54 -44
- ddeutil/workflow/stages.py +514 -114
- ddeutil/workflow/utils.py +44 -40
- ddeutil/workflow/workflow.py +125 -112
- {ddeutil_workflow-0.0.48.dist-info → ddeutil_workflow-0.0.50.dist-info}/METADATA +5 -6
- ddeutil_workflow-0.0.50.dist-info/RECORD +31 -0
- ddeutil_workflow-0.0.48.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.48.dist-info → ddeutil_workflow-0.0.50.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.48.dist-info → ddeutil_workflow-0.0.50.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.48.dist-info → ddeutil_workflow-0.0.50.dist-info}/top_level.txt +0 -0
ddeutil/workflow/job.py
CHANGED
@@ -3,7 +3,6 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
# [x] Use dynamic config
|
7
6
|
"""Job Model that use for keeping stages and node that running its stages.
|
8
7
|
The job handle the lineage of stages and location of execution of stages that
|
9
8
|
mean the job model able to define `runs-on` key that allow you to run this
|
@@ -33,7 +32,6 @@ from pydantic.functional_validators import field_validator, model_validator
|
|
33
32
|
from typing_extensions import Self
|
34
33
|
|
35
34
|
from .__types import DictData, DictStr, Matrix, TupleStr
|
36
|
-
from .conf import dynamic
|
37
35
|
from .exceptions import (
|
38
36
|
JobException,
|
39
37
|
StageException,
|
@@ -51,11 +49,12 @@ MatrixFilter = list[dict[str, Union[str, int]]]
|
|
51
49
|
__all__: TupleStr = (
|
52
50
|
"Strategy",
|
53
51
|
"Job",
|
54
|
-
"
|
52
|
+
"Rule",
|
55
53
|
"RunsOn",
|
56
|
-
"
|
57
|
-
"
|
58
|
-
"
|
54
|
+
"RunsOnModel",
|
55
|
+
"OnLocal",
|
56
|
+
"OnSelfHosted",
|
57
|
+
"OnK8s",
|
59
58
|
"make",
|
60
59
|
"local_execute_strategy",
|
61
60
|
"local_execute",
|
@@ -194,24 +193,25 @@ class Strategy(BaseModel):
|
|
194
193
|
return make(self.matrix, self.include, self.exclude)
|
195
194
|
|
196
195
|
|
197
|
-
class
|
196
|
+
class Rule(str, Enum):
|
198
197
|
"""Trigger rules enum object."""
|
199
198
|
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
199
|
+
ALL_SUCCESS: str = "all_success"
|
200
|
+
ALL_FAILED: str = "all_failed"
|
201
|
+
ALL_DONE: str = "all_done"
|
202
|
+
ONE_FAILED: str = "one_failed"
|
203
|
+
ONE_SUCCESS: str = "one_success"
|
204
|
+
NONE_FAILED: str = "none_failed"
|
205
|
+
NONE_SKIPPED: str = "none_skipped"
|
207
206
|
|
208
207
|
|
209
|
-
class
|
208
|
+
class RunsOn(str, Enum):
|
210
209
|
"""Runs-On enum object."""
|
211
210
|
|
212
211
|
LOCAL: str = "local"
|
213
212
|
SELF_HOSTED: str = "self_hosted"
|
214
213
|
K8S: str = "k8s"
|
214
|
+
AZ_BATCH: str = "azure_batch"
|
215
215
|
|
216
216
|
|
217
217
|
class BaseRunsOn(BaseModel): # pragma: no cov
|
@@ -221,47 +221,45 @@ class BaseRunsOn(BaseModel): # pragma: no cov
|
|
221
221
|
|
222
222
|
model_config = ConfigDict(use_enum_values=True)
|
223
223
|
|
224
|
-
type: Literal[
|
224
|
+
type: Literal[RunsOn.LOCAL]
|
225
225
|
args: DictData = Field(
|
226
226
|
default_factory=dict,
|
227
227
|
alias="with",
|
228
228
|
)
|
229
229
|
|
230
230
|
|
231
|
-
class
|
231
|
+
class OnLocal(BaseRunsOn): # pragma: no cov
|
232
232
|
"""Runs-on local."""
|
233
233
|
|
234
|
-
type: Literal[
|
234
|
+
type: Literal[RunsOn.LOCAL] = Field(default=RunsOn.LOCAL)
|
235
235
|
|
236
236
|
|
237
237
|
class SelfHostedArgs(BaseModel):
|
238
238
|
host: str
|
239
239
|
|
240
240
|
|
241
|
-
class
|
241
|
+
class OnSelfHosted(BaseRunsOn): # pragma: no cov
|
242
242
|
"""Runs-on self-hosted."""
|
243
243
|
|
244
|
-
type: Literal[
|
245
|
-
default=RunsOnType.SELF_HOSTED
|
246
|
-
)
|
244
|
+
type: Literal[RunsOn.SELF_HOSTED] = Field(default=RunsOn.SELF_HOSTED)
|
247
245
|
args: SelfHostedArgs = Field(alias="with")
|
248
246
|
|
249
247
|
|
250
|
-
class
|
248
|
+
class OnK8s(BaseRunsOn): # pragma: no cov
|
251
249
|
"""Runs-on Kubernetes."""
|
252
250
|
|
253
|
-
type: Literal[
|
251
|
+
type: Literal[RunsOn.K8S] = Field(default=RunsOn.K8S)
|
254
252
|
|
255
253
|
|
256
254
|
def get_discriminator_runs_on(model: dict[str, Any]) -> str:
|
257
255
|
return model.get("type", "local")
|
258
256
|
|
259
257
|
|
260
|
-
|
258
|
+
RunsOnModel = Annotated[
|
261
259
|
Union[
|
262
|
-
Annotated[
|
263
|
-
Annotated[
|
264
|
-
Annotated[
|
260
|
+
Annotated[OnK8s, Tag(RunsOn.K8S)],
|
261
|
+
Annotated[OnSelfHosted, Tag(RunsOn.SELF_HOSTED)],
|
262
|
+
Annotated[OnLocal, Tag(RunsOn.LOCAL)],
|
265
263
|
],
|
266
264
|
Discriminator(get_discriminator_runs_on),
|
267
265
|
]
|
@@ -305,8 +303,8 @@ class Job(BaseModel):
|
|
305
303
|
default=None,
|
306
304
|
description="A job description that can be string of markdown content.",
|
307
305
|
)
|
308
|
-
runs_on:
|
309
|
-
default_factory=
|
306
|
+
runs_on: RunsOnModel = Field(
|
307
|
+
default_factory=OnLocal,
|
310
308
|
description="A target node for this job to use for execution.",
|
311
309
|
alias="runs-on",
|
312
310
|
)
|
@@ -319,8 +317,8 @@ class Job(BaseModel):
|
|
319
317
|
default_factory=list,
|
320
318
|
description="A list of Stage of this job.",
|
321
319
|
)
|
322
|
-
trigger_rule:
|
323
|
-
default=
|
320
|
+
trigger_rule: Rule = Field(
|
321
|
+
default=Rule.ALL_SUCCESS,
|
324
322
|
description=(
|
325
323
|
"A trigger rule of tracking needed jobs if feature will use when "
|
326
324
|
"the `raise_error` did not set from job and stage executions."
|
@@ -421,27 +419,27 @@ class Job(BaseModel):
|
|
421
419
|
return WAIT
|
422
420
|
elif all("skipped" in need_exist[job] for job in need_exist):
|
423
421
|
return SKIP
|
424
|
-
elif self.trigger_rule ==
|
422
|
+
elif self.trigger_rule == Rule.ALL_DONE:
|
425
423
|
return SUCCESS
|
426
|
-
elif self.trigger_rule ==
|
424
|
+
elif self.trigger_rule == Rule.ALL_SUCCESS:
|
427
425
|
rs = all(
|
428
426
|
k not in need_exist[job]
|
429
427
|
for k in ("errors", "skipped")
|
430
428
|
for job in need_exist
|
431
429
|
)
|
432
|
-
elif self.trigger_rule ==
|
430
|
+
elif self.trigger_rule == Rule.ALL_FAILED:
|
433
431
|
rs = all("errors" in need_exist[job] for job in need_exist)
|
434
|
-
elif self.trigger_rule ==
|
432
|
+
elif self.trigger_rule == Rule.ONE_SUCCESS:
|
435
433
|
rs = sum(
|
436
434
|
k not in need_exist[job]
|
437
435
|
for k in ("errors", "skipped")
|
438
436
|
for job in need_exist
|
439
437
|
) + 1 == len(self.needs)
|
440
|
-
elif self.trigger_rule ==
|
438
|
+
elif self.trigger_rule == Rule.ONE_FAILED:
|
441
439
|
rs = sum("errors" in need_exist[job] for job in need_exist) == 1
|
442
|
-
elif self.trigger_rule ==
|
440
|
+
elif self.trigger_rule == Rule.NONE_SKIPPED:
|
443
441
|
rs = all("skipped" not in need_exist[job] for job in need_exist)
|
444
|
-
elif self.trigger_rule ==
|
442
|
+
elif self.trigger_rule == Rule.NONE_FAILED:
|
445
443
|
rs = all("errors" not in need_exist[job] for job in need_exist)
|
446
444
|
else: # pragma: no cov
|
447
445
|
raise NotImplementedError(
|
@@ -480,10 +478,16 @@ class Job(BaseModel):
|
|
480
478
|
if not isinstance(rs, bool):
|
481
479
|
raise TypeError("Return type of condition does not be boolean")
|
482
480
|
return not rs
|
483
|
-
except Exception as
|
484
|
-
raise JobException(f"{
|
481
|
+
except Exception as e:
|
482
|
+
raise JobException(f"{e.__class__.__name__}: {e}") from e
|
485
483
|
|
486
|
-
def set_outputs(
|
484
|
+
def set_outputs(
|
485
|
+
self,
|
486
|
+
output: DictData,
|
487
|
+
to: DictData,
|
488
|
+
*,
|
489
|
+
job_id: Optional[None] = None,
|
490
|
+
) -> DictData:
|
487
491
|
"""Set an outputs from execution process to the received context. The
|
488
492
|
result from execution will pass to value of `strategies` key.
|
489
493
|
|
@@ -511,22 +515,21 @@ class Job(BaseModel):
|
|
511
515
|
|
512
516
|
:param output: An output context.
|
513
517
|
:param to: A context data that want to add output result.
|
518
|
+
:param job_id: A job ID if the id field does not set.
|
514
519
|
|
515
520
|
:rtype: DictData
|
516
521
|
"""
|
517
522
|
if "jobs" not in to:
|
518
523
|
to["jobs"] = {}
|
519
524
|
|
520
|
-
if self.id is None and
|
521
|
-
"job_default_id", extras=self.extras
|
522
|
-
):
|
525
|
+
if self.id is None and job_id is None:
|
523
526
|
raise JobException(
|
524
527
|
"This job do not set the ID before setting execution output."
|
525
528
|
)
|
526
529
|
|
527
530
|
# NOTE: If the job ID did not set, it will use index of jobs key
|
528
531
|
# instead.
|
529
|
-
_id: str = self.id or
|
532
|
+
_id: str = self.id or job_id
|
530
533
|
|
531
534
|
errors: DictData = (
|
532
535
|
{"errors": output.pop("errors", {})} if "errors" in output else {}
|
@@ -550,6 +553,7 @@ class Job(BaseModel):
|
|
550
553
|
parent_run_id: str | None = None,
|
551
554
|
result: Result | None = None,
|
552
555
|
event: Event | None = None,
|
556
|
+
raise_error: bool = True,
|
553
557
|
) -> Result:
|
554
558
|
"""Job execution with passing dynamic parameters from the workflow
|
555
559
|
execution. It will generate matrix values at the first step and run
|
@@ -562,6 +566,8 @@ class Job(BaseModel):
|
|
562
566
|
data.
|
563
567
|
:param event: (Event) An event manager that pass to the
|
564
568
|
PoolThreadExecutor.
|
569
|
+
:param raise_error: (bool) A flag that all this method raise error to the
|
570
|
+
strategy execution. Default is `True`.
|
565
571
|
|
566
572
|
:rtype: Result
|
567
573
|
"""
|
@@ -573,16 +579,18 @@ class Job(BaseModel):
|
|
573
579
|
extras=self.extras,
|
574
580
|
)
|
575
581
|
|
576
|
-
|
582
|
+
result.trace.info(f"[JOB]: Start execute job: {self.id!r}")
|
583
|
+
if self.runs_on.type == RunsOn.LOCAL:
|
577
584
|
return local_execute(
|
578
585
|
job=self,
|
579
586
|
params=params,
|
580
587
|
result=result,
|
581
588
|
event=event,
|
589
|
+
raise_error=raise_error,
|
582
590
|
)
|
583
|
-
elif self.runs_on.type ==
|
591
|
+
elif self.runs_on.type == RunsOn.SELF_HOSTED: # pragma: no cov
|
584
592
|
pass
|
585
|
-
elif self.runs_on.type ==
|
593
|
+
elif self.runs_on.type == RunsOn.K8S: # pragma: no cov
|
586
594
|
pass
|
587
595
|
|
588
596
|
# pragma: no cov
|
@@ -603,7 +611,7 @@ def local_execute_strategy(
|
|
603
611
|
*,
|
604
612
|
result: Result | None = None,
|
605
613
|
event: Event | None = None,
|
606
|
-
raise_error: bool
|
614
|
+
raise_error: bool = True,
|
607
615
|
) -> Result:
|
608
616
|
"""Local job strategy execution with passing dynamic parameters from the
|
609
617
|
workflow execution to strategy matrix.
|
@@ -660,7 +668,7 @@ def local_execute_strategy(
|
|
660
668
|
context={
|
661
669
|
strategy_id: {
|
662
670
|
"matrix": strategy,
|
663
|
-
"stages": context.pop("stages", {}),
|
671
|
+
"stages": filter_func(context.pop("stages", {})),
|
664
672
|
"errors": JobException(error_msg).to_dict(),
|
665
673
|
},
|
666
674
|
},
|
@@ -684,21 +692,17 @@ def local_execute_strategy(
|
|
684
692
|
context={
|
685
693
|
strategy_id: {
|
686
694
|
"matrix": strategy,
|
687
|
-
"stages": context.pop("stages", {}),
|
695
|
+
"stages": filter_func(context.pop("stages", {})),
|
688
696
|
"errors": JobException(error_msg).to_dict(),
|
689
697
|
},
|
690
698
|
},
|
691
699
|
)
|
692
700
|
|
693
|
-
except (StageException, UtilException) as
|
694
|
-
result.trace.error(f"[JOB]: {
|
695
|
-
|
696
|
-
"job_raise_error", f=raise_error, extras=job.extras
|
697
|
-
)
|
698
|
-
if do_raise:
|
701
|
+
except (StageException, UtilException) as e:
|
702
|
+
result.trace.error(f"[JOB]: {e.__class__.__name__}: {e}")
|
703
|
+
if raise_error:
|
699
704
|
raise JobException(
|
700
|
-
f"Stage execution error: {
|
701
|
-
f"{err}"
|
705
|
+
f"Stage execution error: {e.__class__.__name__}: {e}"
|
702
706
|
) from None
|
703
707
|
|
704
708
|
return result.catch(
|
@@ -706,8 +710,8 @@ def local_execute_strategy(
|
|
706
710
|
context={
|
707
711
|
strategy_id: {
|
708
712
|
"matrix": strategy,
|
709
|
-
"stages": context.pop("stages", {}),
|
710
|
-
"errors":
|
713
|
+
"stages": filter_func(context.pop("stages", {})),
|
714
|
+
"errors": e.to_dict(),
|
711
715
|
},
|
712
716
|
},
|
713
717
|
)
|
@@ -731,7 +735,7 @@ def local_execute(
|
|
731
735
|
parent_run_id: str | None = None,
|
732
736
|
result: Result | None = None,
|
733
737
|
event: Event | None = None,
|
734
|
-
raise_error: bool
|
738
|
+
raise_error: bool = True,
|
735
739
|
) -> Result:
|
736
740
|
"""Local job execution with passing dynamic parameters from the workflow
|
737
741
|
execution or itself execution. It will generate matrix values at the first
|
@@ -748,7 +752,7 @@ def local_execute(
|
|
748
752
|
data.
|
749
753
|
:param event: (Event) An event manager that pass to the PoolThreadExecutor.
|
750
754
|
:param raise_error: (bool) A flag that all this method raise error to the
|
751
|
-
strategy execution.
|
755
|
+
strategy execution. Default is `True`.
|
752
756
|
|
753
757
|
:rtype: Result
|
754
758
|
"""
|
@@ -779,9 +783,9 @@ def local_execute(
|
|
779
783
|
)
|
780
784
|
|
781
785
|
local_execute_strategy(
|
782
|
-
job
|
783
|
-
strategy
|
784
|
-
params
|
786
|
+
job,
|
787
|
+
strategy,
|
788
|
+
params,
|
785
789
|
result=result,
|
786
790
|
event=event,
|
787
791
|
raise_error=raise_error,
|
@@ -851,13 +855,12 @@ def local_execute(
|
|
851
855
|
for future in done:
|
852
856
|
try:
|
853
857
|
future.result()
|
854
|
-
except JobException as
|
858
|
+
except JobException as e:
|
855
859
|
status = FAILED
|
856
860
|
result.trace.error(
|
857
|
-
f"[JOB]: {ls} Catch:\n\t{
|
858
|
-
f"\n\t{err}"
|
861
|
+
f"[JOB]: {ls} Catch:\n\t{e.__class__.__name__}:\n\t{e}"
|
859
862
|
)
|
860
|
-
context.update({"errors":
|
863
|
+
context.update({"errors": e.to_dict()})
|
861
864
|
|
862
865
|
return result.catch(status=status, context=context)
|
863
866
|
|
@@ -870,7 +873,7 @@ def self_hosted_execute(
|
|
870
873
|
parent_run_id: str | None = None,
|
871
874
|
result: Result | None = None,
|
872
875
|
event: Event | None = None,
|
873
|
-
raise_error: bool
|
876
|
+
raise_error: bool = True,
|
874
877
|
) -> Result: # pragma: no cov
|
875
878
|
"""Self-Hosted job execution with passing dynamic parameters from the
|
876
879
|
workflow execution or itself execution. It will make request to the
|
@@ -924,10 +927,7 @@ def self_hosted_execute(
|
|
924
927
|
return result.catch(status=FAILED, context={"errors": to_dict(e)})
|
925
928
|
|
926
929
|
if resp.status_code != 200:
|
927
|
-
|
928
|
-
"job_raise_error", f=raise_error, extras=job.extras
|
929
|
-
)
|
930
|
-
if do_raise:
|
930
|
+
if raise_error:
|
931
931
|
raise JobException(
|
932
932
|
f"Job execution error from request to self-hosted: "
|
933
933
|
f"{job.runs_on.args.host!r}"
|
@@ -935,3 +935,59 @@ def self_hosted_execute(
|
|
935
935
|
|
936
936
|
return result.catch(status=FAILED)
|
937
937
|
return result.catch(status=SUCCESS)
|
938
|
+
|
939
|
+
|
940
|
+
def azure_batch_execute(
|
941
|
+
job: Job,
|
942
|
+
params: DictData,
|
943
|
+
*,
|
944
|
+
run_id: str | None = None,
|
945
|
+
parent_run_id: str | None = None,
|
946
|
+
result: Result | None = None,
|
947
|
+
event: Event | None = None,
|
948
|
+
raise_error: bool | None = None,
|
949
|
+
) -> Result: # pragma no cov
|
950
|
+
"""Azure Batch job execution that will run all job's stages on the Azure
|
951
|
+
Batch Node and extract the result file to be returning context result.
|
952
|
+
|
953
|
+
Steps:
|
954
|
+
- Create a Batch account and a Batch pool.
|
955
|
+
- Create a Batch job and add tasks to the job. Each task represents a
|
956
|
+
command to run on a compute node.
|
957
|
+
- Specify the command to run the Python script in the task. You can use
|
958
|
+
the cmd /c command to run the script with the Python interpreter.
|
959
|
+
- Upload the Python script and any required input files to Azure Storage
|
960
|
+
Account.
|
961
|
+
- Configure the task to download the input files from Azure Storage to
|
962
|
+
the compute node before running the script.
|
963
|
+
- Monitor the job and retrieve the output files from Azure Storage.
|
964
|
+
|
965
|
+
:param job:
|
966
|
+
:param params:
|
967
|
+
:param run_id:
|
968
|
+
:param parent_run_id:
|
969
|
+
:param result:
|
970
|
+
:param event:
|
971
|
+
:param raise_error:
|
972
|
+
:return:
|
973
|
+
"""
|
974
|
+
result: Result = Result.construct_with_rs_or_id(
|
975
|
+
result,
|
976
|
+
run_id=run_id,
|
977
|
+
parent_run_id=parent_run_id,
|
978
|
+
id_logic=(job.id or "not-set"),
|
979
|
+
extras=job.extras,
|
980
|
+
)
|
981
|
+
if event and event.is_set():
|
982
|
+
return result.catch(
|
983
|
+
status=FAILED,
|
984
|
+
context={
|
985
|
+
"errors": JobException(
|
986
|
+
"Job azure-batch execution was canceled from event that "
|
987
|
+
"had set before start execution."
|
988
|
+
).to_dict()
|
989
|
+
},
|
990
|
+
)
|
991
|
+
print(params)
|
992
|
+
print(raise_error)
|
993
|
+
return result.catch(status=SUCCESS)
|