ddeutil-workflow 0.0.84__py3-none-any.whl → 0.0.86__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/job.py CHANGED
@@ -56,8 +56,10 @@ from pydantic.functional_serializers import field_serializer
56
56
  from pydantic.functional_validators import field_validator, model_validator
57
57
  from typing_extensions import Self
58
58
 
59
+ from . import JobSkipError
59
60
  from .__types import DictData, DictStr, Matrix, StrOrNone
60
- from .errors import JobCancelError, JobError, to_dict
61
+ from .conf import pass_env
62
+ from .errors import JobCancelError, JobError, mark_errors, to_dict
61
63
  from .result import (
62
64
  CANCEL,
63
65
  FAILED,
@@ -73,7 +75,7 @@ from .result import (
73
75
  from .reusables import has_template, param2template
74
76
  from .stages import Stage
75
77
  from .traces import Trace, get_trace
76
- from .utils import cross_product, extract_id, filter_func, gen_id
78
+ from .utils import cross_product, extract_id, filter_func, gen_id, get_dt_now
77
79
 
78
80
  MatrixFilter = list[dict[str, Union[str, int]]]
79
81
 
@@ -452,41 +454,22 @@ class Job(BaseModel):
452
454
  execution, dependency management, conditional execution, and multienvironment
453
455
  deployment.
454
456
 
455
- Jobs are the primary execution units within workflows, providing:
456
- - Stage lifecycle management
457
- - Execution environment abstraction
458
- - Matrix strategy support for parallel execution
459
- - Dependency resolution via job needs
460
- - Output coordination between stages
461
-
462
- Attributes:
463
- id (str, optional): Unique job identifier within workflow
464
- desc (str, optional): Job description in Markdown format
465
- runs_on (RunsOnModel): Execution environment configuration
466
- condition (str, optional): Conditional execution expression
467
- stages (list[Stage]): Ordered list of stages to execute
468
- trigger_rule (Rule): Rule for handling job dependencies
469
- needs (list[str]): List of prerequisite job IDs
470
- strategy (Strategy): Matrix strategy for parameterized execution
471
- extras (dict): Additional configuration parameters
472
-
473
457
  Example:
474
- ```python
475
- job = Job(
476
- id="data-processing",
477
- desc="Process daily data files",
478
- runs_on=OnLocal(),
479
- stages=[
480
- EmptyStage(name="Start", echo="Processing started"),
481
- PyStage(name="Process", run="process_data()"),
482
- EmptyStage(name="Complete", echo="Processing finished")
483
- ],
484
- strategy=Strategy(
485
- matrix={'env': ['dev', 'prod']},
486
- max_parallel=2
487
- )
488
- )
489
- ```
458
+ >>> from ddeutil.workflow.stages import EmptyStage, PyStage
459
+ >>> job = Job(
460
+ ... id="data-processing",
461
+ ... desc="Process daily data files",
462
+ ... runs_on=OnLocal(),
463
+ ... stages=[
464
+ ... EmptyStage(name="Start", echo="Processing started"),
465
+ ... PyStage(name="Process", run="process_data()"),
466
+ ... EmptyStage(name="Complete", echo="Processing finished")
467
+ ... ],
468
+ ... strategy=Strategy(
469
+ ... matrix={'env': ['dev', 'prod']},
470
+ ... max_parallel=2
471
+ ... )
472
+ ... )
490
473
  """
491
474
 
492
475
  id: StrOrNone = Field(
@@ -514,6 +497,15 @@ class Job(BaseModel):
514
497
  default_factory=list,
515
498
  description="A list of Stage model of this job.",
516
499
  )
500
+ retry: int = Field(
501
+ default=0,
502
+ ge=0,
503
+ lt=20,
504
+ description=(
505
+ "A retry number if job route execution got the error exclude skip "
506
+ "and cancel exception class."
507
+ ),
508
+ )
517
509
  trigger_rule: Rule = Field(
518
510
  default=Rule.ALL_SUCCESS,
519
511
  validate_default=True,
@@ -751,7 +743,7 @@ class Job(BaseModel):
751
743
  # should use the `re` module to validate eval-string before
752
744
  # running.
753
745
  rs: bool = eval(
754
- param2template(self.condition, params, extras=self.extras),
746
+ self.pass_template(self.condition, params),
755
747
  globals() | params,
756
748
  {},
757
749
  )
@@ -802,8 +794,9 @@ class Job(BaseModel):
802
794
  extract from the result context if it exists. If it does not found, it
803
795
  will not set on the received context.
804
796
 
805
- :raise JobError: If the job's ID does not set and the setting
806
- default job ID flag does not set.
797
+ Raises:
798
+ JobError: If the job's ID does not set and the setting default job
799
+ ID flag does not set.
807
800
 
808
801
  Args:
809
802
  output: (DictData) A result data context that want to extract
@@ -831,9 +824,14 @@ class Job(BaseModel):
831
824
  status: dict[str, Status] = (
832
825
  {"status": output.pop("status")} if "status" in output else {}
833
826
  )
827
+ info: DictData = (
828
+ {"info": output.pop("info")} if "info" in output else {}
829
+ )
834
830
  kwargs: DictData = kwargs or {}
835
831
  if self.strategy.is_set():
836
- to["jobs"][_id] = {"strategies": output} | errors | status | kwargs
832
+ to["jobs"][_id] = (
833
+ {"strategies": output} | errors | status | info | kwargs
834
+ )
837
835
  elif len(k := output.keys()) > 1: # pragma: no cov
838
836
  raise JobError(
839
837
  "Strategy output from execution return more than one ID while "
@@ -842,7 +840,7 @@ class Job(BaseModel):
842
840
  else:
843
841
  _output: DictData = {} if len(k) == 0 else output[list(k)[0]]
844
842
  _output.pop("matrix", {})
845
- to["jobs"][_id] = _output | errors | status | kwargs
843
+ to["jobs"][_id] = _output | errors | status | info | kwargs
846
844
  return to
847
845
 
848
846
  def get_outputs(
@@ -854,10 +852,12 @@ class Job(BaseModel):
854
852
  """Get the outputs from jobs data. It will get this job ID or passing
855
853
  custom ID from the job outputs mapping.
856
854
 
857
- :param output: (DictData) A job outputs data that want to extract
858
- :param job_id: (StrOrNone) A job ID if the `id` field does not set.
855
+ Args:
856
+ output (DictData): A job outputs data that want to extract
857
+ job_id (StrOrNone): A job ID if the `id` field does not set.
859
858
 
860
- :rtype: DictData
859
+ Returns:
860
+ DictData: An output data.
861
861
  """
862
862
  _id: str = self.id or job_id
863
863
  if self.strategy.is_set():
@@ -865,141 +865,291 @@ class Job(BaseModel):
865
865
  else:
866
866
  return output.get("jobs", {}).get(_id, {})
867
867
 
868
- def execute(
868
+ def pass_template(self, value: Any, params: DictData) -> Any:
869
+ """Pass template and environment variable to any value that can
870
+ templating.
871
+
872
+ Args:
873
+ value (Any): An any value.
874
+ params (DictData): A parameter data that want to use in this
875
+ execution.
876
+
877
+ Returns:
878
+ Any: A templated value.
879
+ """
880
+ return pass_env(param2template(value, params, extras=self.extras))
881
+
882
+ def process(
869
883
  self,
870
884
  params: DictData,
885
+ run_id: str,
886
+ context: DictData,
871
887
  *,
872
- run_id: StrOrNone = None,
888
+ parent_run_id: Optional[str] = None,
873
889
  event: Optional[Event] = None,
874
890
  ) -> Result:
875
- """Job execution with passing dynamic parameters from the workflow
876
- execution. It will generate matrix values at the first step and run
877
- multithread on this metrics to the `stages` field of this job.
878
-
879
- This method be execution routing for call dynamic execution function
880
- with specific target `runs-on` value.
891
+ """Process routing method that will route the provider function depend
892
+ on runs-on value.
881
893
 
882
- Args
883
- params: (DictData) A parameter context that also pass from the
884
- workflow execute method.
885
- run_id: (str) An execution running ID.
886
- event: (Event) An Event manager instance that use to cancel this
887
- execution if it forces stopped by parent execution.
894
+ Args:
895
+ params (DictData): A parameter data that want to use in this
896
+ execution.
897
+ run_id (str): A running stage ID.
898
+ context (DictData): A context data that was passed from handler
899
+ method.
900
+ parent_run_id (str, default None): A parent running ID.
901
+ event (Event, default None): An event manager that use to track
902
+ parent process was not force stopped.
888
903
 
889
- Returns
890
- Result: Return Result object that create from execution context.
904
+ Returns:
905
+ Result: The execution result with status and context data.
891
906
  """
892
- ts: float = time.monotonic()
893
- parent_run_id, run_id = extract_id(
894
- (self.id or "EMPTY"), run_id=run_id, extras=self.extras
895
- )
896
907
  trace: Trace = get_trace(
897
908
  run_id, parent_run_id=parent_run_id, extras=self.extras
898
909
  )
899
910
  trace.info(
900
- f"[JOB]: Routing for "
911
+ f"[JOB]: Routing "
901
912
  f"{''.join(self.runs_on.type.value.split('_')).title()}: "
902
913
  f"{self.id!r}"
903
914
  )
904
-
915
+ rs: Optional[Result] = None
905
916
  if self.runs_on.type == LOCAL:
906
- return local_execute(
917
+ rs = local_process(
907
918
  self,
908
919
  params,
920
+ context=context,
909
921
  run_id=parent_run_id,
910
922
  event=event,
911
- ).make_info({"execution_time": time.monotonic() - ts})
923
+ )
912
924
  elif self.runs_on.type == SELF_HOSTED: # pragma: no cov
913
925
  pass
914
926
  elif self.runs_on.type == AZ_BATCH: # pragma: no cov
915
927
  from .plugins.providers.az import azure_batch_execute
916
928
 
917
- return azure_batch_execute(
929
+ rs = azure_batch_execute(
918
930
  self,
919
931
  params,
920
932
  run_id=parent_run_id,
921
933
  event=event,
922
- ).make_info({"execution_time": time.monotonic() - ts})
934
+ )
923
935
  elif self.runs_on.type == DOCKER: # pragma: no cov
924
- return docker_execution(
936
+ rs = docker_process(
925
937
  self,
926
938
  params,
927
939
  run_id=parent_run_id,
928
940
  event=event,
929
- ).make_info({"execution_time": time.monotonic() - ts})
941
+ )
930
942
  elif self.runs_on.type == CONTAINER: # pragma: no cov
931
943
  from .plugins.providers.container import container_execute
932
944
 
933
- return container_execute(
945
+ rs = container_execute(
934
946
  self,
935
947
  params,
936
948
  run_id=parent_run_id,
937
949
  event=event,
938
- ).make_info({"execution_time": time.monotonic() - ts})
950
+ )
939
951
  elif self.runs_on.type == AWS_BATCH: # pragma: no cov
940
952
  from .plugins.providers.aws import aws_batch_execute
941
953
 
942
- return aws_batch_execute(
954
+ rs = aws_batch_execute(
943
955
  self,
944
956
  params,
945
957
  run_id=parent_run_id,
946
958
  event=event,
947
- ).make_info({"execution_time": time.monotonic() - ts})
959
+ )
948
960
  elif self.runs_on.type == GCP_BATCH: # pragma: no cov
949
961
  from .plugins.providers.gcs import gcp_batch_execute
950
962
 
951
- return gcp_batch_execute(
963
+ rs = gcp_batch_execute(
952
964
  self,
953
965
  params,
954
966
  run_id=parent_run_id,
955
967
  event=event,
956
- ).make_info({"execution_time": time.monotonic() - ts})
968
+ )
957
969
 
958
- trace.error(
959
- f"[JOB]: Execution not support runs-on: {self.runs_on.type.value!r} "
960
- f"yet."
961
- )
962
- return Result(
963
- status=FAILED,
964
- run_id=run_id,
965
- parent_run_id=parent_run_id,
966
- context={
967
- "status": FAILED,
968
- "errors": JobError(
969
- f"Execute runs-on type: {self.runs_on.type.value!r} does "
970
- f"not support yet."
971
- ).to_dict(),
972
- },
973
- info={"execution_time": time.monotonic() - ts},
974
- extras=self.extras,
970
+ if rs is None:
971
+ trace.error(
972
+ f"[JOB]: Execution not support runs-on: {self.runs_on.type.value!r} "
973
+ f"yet."
974
+ )
975
+ return Result(
976
+ status=FAILED,
977
+ run_id=run_id,
978
+ parent_run_id=parent_run_id,
979
+ context={
980
+ "status": FAILED,
981
+ "errors": JobError(
982
+ f"Job runs-on type: {self.runs_on.type.value!r} does "
983
+ f"not support yet."
984
+ ).to_dict(),
985
+ },
986
+ extras=self.extras,
987
+ )
988
+
989
+ if rs.status == SKIP:
990
+ raise JobSkipError("Job got skipped status.")
991
+ elif rs.status == CANCEL:
992
+ raise JobCancelError("Job got canceled status.")
993
+ elif rs.status == FAILED:
994
+ raise JobError("Job process error")
995
+ return rs
996
+
997
+ def _execute(
998
+ self,
999
+ params: DictData,
1000
+ context: DictData,
1001
+ trace: Trace,
1002
+ event: Optional[Event] = None,
1003
+ ) -> Result:
1004
+ """Wrapped the route execute method before returning to handler
1005
+ execution.
1006
+
1007
+ This method call to make retry strategy for process routing
1008
+ method.
1009
+
1010
+ Args:
1011
+ params: A parameter data that want to use in this execution
1012
+ context:
1013
+ trace (Trace):
1014
+ event (Event, default None):
1015
+
1016
+ Returns:
1017
+ Result: The wrapped execution result.
1018
+ """
1019
+ current_retry: int = 0
1020
+ maximum_retry: int = self.retry + 1
1021
+ exception: Exception
1022
+ catch(context, status=WAIT)
1023
+ try:
1024
+ return self.process(
1025
+ params,
1026
+ run_id=trace.run_id,
1027
+ context=context,
1028
+ parent_run_id=trace.parent_run_id,
1029
+ event=event,
1030
+ )
1031
+ except (JobCancelError, JobSkipError):
1032
+ trace.debug("[JOB]: process raise skip or cancel error.")
1033
+ raise
1034
+ except Exception as e:
1035
+ if self.retry == 0:
1036
+ raise
1037
+
1038
+ current_retry += 1
1039
+ exception = e
1040
+
1041
+ trace.warning(
1042
+ f"[JOB]: Retry count: {current_retry}/{maximum_retry} ... "
1043
+ f"( {exception.__class__.__name__} )"
975
1044
  )
1045
+ while current_retry < maximum_retry:
1046
+ try:
1047
+ catch(
1048
+ context=context,
1049
+ status=WAIT,
1050
+ updated={"retry": current_retry},
1051
+ )
1052
+ return self.process(
1053
+ params,
1054
+ run_id=trace.run_id,
1055
+ context=context,
1056
+ parent_run_id=trace.parent_run_id,
1057
+ event=event,
1058
+ )
1059
+ except (JobCancelError, JobSkipError):
1060
+ trace.debug("[JOB]: process raise skip or cancel error.")
1061
+ raise
1062
+ except Exception as e:
1063
+ current_retry += 1
1064
+ trace.warning(
1065
+ f"[JOB]: Retry count: {current_retry}/{maximum_retry} ... "
1066
+ f"( {e.__class__.__name__} )"
1067
+ )
1068
+ exception = e
1069
+ time.sleep(1.2**current_retry)
976
1070
 
1071
+ trace.error(f"[JOB]: Reach the maximum of retry number: {self.retry}.")
1072
+ raise exception
977
1073
 
978
- def mark_errors(context: DictData, error: JobError) -> None:
979
- """Make the errors context result with the refs value depends on the nested
980
- execute func.
1074
+ def execute(
1075
+ self,
1076
+ params: DictData,
1077
+ *,
1078
+ run_id: StrOrNone = None,
1079
+ event: Optional[Event] = None,
1080
+ ) -> Result:
1081
+ """Job execution with passing dynamic parameters from the workflow
1082
+ execution. It will generate matrix values at the first step and run
1083
+ multithread on this metrics to the `stages` field of this job.
981
1084
 
982
- :param context: (DictData) A context data.
983
- :param error: (JobError) A stage exception object.
984
- """
985
- if "errors" in context:
986
- context["errors"][error.refs] = error.to_dict()
987
- else:
988
- context["errors"] = error.to_dict(with_refs=True)
1085
+ This method be execution routing for call dynamic execution function
1086
+ with specific target `runs-on` value.
1087
+
1088
+ Args
1089
+ params: (DictData) A parameter context that also pass from the
1090
+ workflow execute method.
1091
+ run_id: (str) An execution running ID.
1092
+ event: (Event) An Event manager instance that use to cancel this
1093
+ execution if it forces stopped by parent execution.
1094
+
1095
+ Returns
1096
+ Result: Return Result object that create from execution context.
1097
+ """
1098
+ ts: float = time.monotonic()
1099
+ parent_run_id, run_id = extract_id(
1100
+ (self.id or "EMPTY"), run_id=run_id, extras=self.extras
1101
+ )
1102
+ context: DictData = {
1103
+ "status": WAIT,
1104
+ "info": {"exec_start": get_dt_now()},
1105
+ }
1106
+ trace: Trace = get_trace(
1107
+ run_id, parent_run_id=parent_run_id, extras=self.extras
1108
+ )
1109
+ try:
1110
+ trace.info(
1111
+ f"[JOB]: Handler {self.runs_on.type.name}: "
1112
+ f"{(self.id or 'EMPTY')!r}."
1113
+ )
1114
+ result: Result = self._execute(
1115
+ params,
1116
+ context=context,
1117
+ trace=trace,
1118
+ event=event,
1119
+ )
1120
+ return result
1121
+ except JobError as e: # pragma: no cov
1122
+ if isinstance(e, JobSkipError):
1123
+ trace.error(f"[JOB]: ⏭️ Skip: {e}")
1124
+
1125
+ st: Status = get_status_from_error(e)
1126
+ return Result.from_trace(trace).catch(
1127
+ status=st, context=catch(context, status=st)
1128
+ )
1129
+ finally:
1130
+ context["info"].update(
1131
+ {
1132
+ "exec_end": get_dt_now(),
1133
+ "exec_latency": round(time.monotonic() - ts, 6),
1134
+ }
1135
+ )
1136
+ trace.debug("[JOB]: End Handler job execution.")
989
1137
 
990
1138
 
991
1139
  def pop_stages(context: DictData) -> DictData:
1140
+ """Pop a stages key from the context data. It will return empty dict if it
1141
+ does not exist.
1142
+ """
992
1143
  return filter_func(context.pop("stages", {}))
993
1144
 
994
1145
 
995
- def local_execute_strategy(
1146
+ def local_process_strategy(
996
1147
  job: Job,
997
1148
  strategy: DictData,
998
1149
  params: DictData,
999
- run_id: str,
1150
+ trace: Trace,
1000
1151
  context: DictData,
1001
1152
  *,
1002
- parent_run_id: Optional[str] = None,
1003
1153
  event: Optional[Event] = None,
1004
1154
  ) -> tuple[Status, DictData]:
1005
1155
  """Local strategy execution with passing dynamic parameters from the
@@ -1014,25 +1164,24 @@ def local_execute_strategy(
1014
1164
  For each stage that execution with this strategy metrix, it will use the
1015
1165
  `set_outputs` method for reconstruct result context data.
1016
1166
 
1017
- :param job: (Job) A job model that want to execute.
1018
- :param strategy: (DictData) A strategy metrix value. This value will pass
1019
- to the `matrix` key for templating in context data.
1020
- :param params: (DictData) A parameter data.
1021
- :param run_id: (str)
1022
- :param context: (DictData)
1023
- :param parent_run_id: (str | None)
1024
- :param event: (Event) An Event manager instance that use to cancel this
1025
- execution if it forces stopped by parent execution.
1026
-
1027
- :raise JobError: If event was set.
1028
- :raise JobError: If stage execution raise any error as `StageError`.
1029
- :raise JobError: If the result from execution has `FAILED` status.
1030
-
1031
- :rtype: tuple[Status, DictData]
1167
+ Args:
1168
+ job (Job): A job model that want to execute.
1169
+ strategy (DictData): A strategy metrix value. This value will pass
1170
+ to the `matrix` key for templating in context data.
1171
+ params (DictData): A parameter data.
1172
+ trace (Trace):
1173
+ context (DictData):
1174
+ event (Event): An Event manager instance that use to cancel this
1175
+ execution if it forces stopped by parent execution.
1176
+
1177
+ Raises:
1178
+ JobError: If event was set.
1179
+ JobError: If stage execution raise any error as `StageError`.
1180
+ JobError: If the result from execution has `FAILED` status.
1181
+
1182
+ Returns:
1183
+ tuple[Status, DictData]: A pair of Status and DictData objects.
1032
1184
  """
1033
- trace: Trace = get_trace(
1034
- run_id, parent_run_id=parent_run_id, extras=job.extras
1035
- )
1036
1185
  if strategy:
1037
1186
  strategy_id: str = gen_id(strategy)
1038
1187
  trace.info(f"[JOB]: Execute Strategy: {strategy_id!r}")
@@ -1071,7 +1220,7 @@ def local_execute_strategy(
1071
1220
  trace.info(f"[JOB]: Execute Stage: {stage.iden!r}")
1072
1221
  rs: Result = stage.execute(
1073
1222
  params=current_context,
1074
- run_id=parent_run_id,
1223
+ run_id=trace.parent_run_id,
1075
1224
  event=event,
1076
1225
  )
1077
1226
  stage.set_outputs(rs.context, to=current_context)
@@ -1133,11 +1282,12 @@ def local_execute_strategy(
1133
1282
  return status, context
1134
1283
 
1135
1284
 
1136
- def local_execute(
1285
+ def local_process(
1137
1286
  job: Job,
1138
1287
  params: DictData,
1288
+ run_id: str,
1289
+ context: DictData,
1139
1290
  *,
1140
- run_id: StrOrNone = None,
1141
1291
  event: Optional[Event] = None,
1142
1292
  ) -> Result:
1143
1293
  """Local job execution with passing dynamic parameters from the workflow
@@ -1155,21 +1305,23 @@ def local_execute(
1155
1305
  ]
1156
1306
  }
1157
1307
 
1158
- :param job: (Job) A job model.
1159
- :param params: (DictData) A parameter data.
1160
- :param run_id: (str) A job running ID.
1161
- :param event: (Event) An Event manager instance that use to cancel this
1162
- execution if it forces stopped by parent execution.
1308
+ Args:
1309
+ job (Job): A job model.
1310
+ params (DictData): A parameter data.
1311
+ run_id (str): A job running ID.
1312
+ context (DictData):
1313
+ event (Event, default None): An Event manager instance that use to
1314
+ cancel this execution if it forces stopped by parent execution.
1163
1315
 
1164
- :rtype: Result
1316
+ Returns:
1317
+ Result: A job process result.
1165
1318
  """
1166
- ts: float = time.monotonic()
1167
- parent_run_id: StrOrNone = run_id
1168
- run_id: str = gen_id((job.id or "EMPTY"), unique=True)
1319
+ parent_run_id, run_id = extract_id(
1320
+ (job.id or "EMPTY"), run_id=run_id, extras=job.extras
1321
+ )
1169
1322
  trace: Trace = get_trace(
1170
1323
  run_id, parent_run_id=parent_run_id, extras=job.extras
1171
1324
  )
1172
- context: DictData = {"status": WAIT}
1173
1325
  trace.info("[JOB]: Start Local executor.")
1174
1326
 
1175
1327
  if job.desc:
@@ -1182,7 +1334,6 @@ def local_execute(
1182
1334
  parent_run_id=parent_run_id,
1183
1335
  status=SKIP,
1184
1336
  context=catch(context, status=SKIP),
1185
- info={"execution_time": time.monotonic() - ts},
1186
1337
  extras=job.extras,
1187
1338
  )
1188
1339
 
@@ -1208,7 +1359,6 @@ def local_execute(
1208
1359
  status=FAILED,
1209
1360
  updated={"errors": to_dict(err)},
1210
1361
  ),
1211
- info={"execution_time": time.monotonic() - ts},
1212
1362
  extras=job.extras,
1213
1363
  )
1214
1364
  if workers >= 10:
@@ -1226,7 +1376,6 @@ def local_execute(
1226
1376
  status=FAILED,
1227
1377
  updated={"errors": JobError(err_msg).to_dict()},
1228
1378
  ),
1229
- info={"execution_time": time.monotonic() - ts},
1230
1379
  extras=job.extras,
1231
1380
  )
1232
1381
 
@@ -1252,20 +1401,18 @@ def local_execute(
1252
1401
  ).to_dict()
1253
1402
  },
1254
1403
  ),
1255
- info={"execution_time": time.monotonic() - ts},
1256
1404
  extras=job.extras,
1257
1405
  )
1258
1406
 
1259
1407
  with ThreadPoolExecutor(workers, "jb_stg") as executor:
1260
1408
  futures: list[Future] = [
1261
1409
  executor.submit(
1262
- local_execute_strategy,
1410
+ local_process_strategy,
1263
1411
  job=job,
1264
1412
  strategy=strategy,
1265
1413
  params=params,
1266
- run_id=run_id,
1414
+ trace=trace,
1267
1415
  context=context,
1268
- parent_run_id=parent_run_id,
1269
1416
  event=event,
1270
1417
  )
1271
1418
  for strategy in strategies
@@ -1314,11 +1461,10 @@ def local_execute(
1314
1461
  return Result.from_trace(trace).catch(
1315
1462
  status=status,
1316
1463
  context=catch(context, status=status, updated=errors),
1317
- info={"execution_time": time.monotonic() - ts},
1318
1464
  )
1319
1465
 
1320
1466
 
1321
- def self_hosted_execute(
1467
+ def self_hosted_process(
1322
1468
  job: Job,
1323
1469
  params: DictData,
1324
1470
  *,
@@ -1329,13 +1475,15 @@ def self_hosted_execute(
1329
1475
  workflow execution or itself execution. It will make request to the
1330
1476
  self-hosted host url.
1331
1477
 
1332
- :param job: (Job) A job model that want to execute.
1333
- :param params: (DictData) A parameter data.
1334
- :param run_id: (str) A job running ID.
1335
- :param event: (Event) An Event manager instance that use to cancel this
1336
- execution if it forces stopped by parent execution.
1478
+ Args:
1479
+ job (Job): A job model that want to execute.
1480
+ params (DictData): A parameter data.
1481
+ run_id (str): A job running ID.
1482
+ event (Event): An Event manager instance that use to cancel this
1483
+ execution if it forces stopped by parent execution.
1337
1484
 
1338
- :rtype: Result
1485
+ Returns:
1486
+ Result: A Result object.
1339
1487
  """
1340
1488
  parent_run_id: StrOrNone = run_id
1341
1489
  run_id: str = gen_id((job.id or "EMPTY"), unique=True)
@@ -1402,11 +1550,7 @@ def self_hosted_execute(
1402
1550
  )
1403
1551
 
1404
1552
 
1405
- # Azure Batch execution is now handled by the Azure Batch provider
1406
- # See src/ddeutil/workflow/plugins/providers/az.py for implementation
1407
-
1408
-
1409
- def docker_execution(
1553
+ def docker_process(
1410
1554
  job: Job,
1411
1555
  params: DictData,
1412
1556
  *,