ddeutil-workflow 0.0.83__py3-none-any.whl → 0.0.85__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/job.py CHANGED
@@ -56,8 +56,10 @@ from pydantic.functional_serializers import field_serializer
56
56
  from pydantic.functional_validators import field_validator, model_validator
57
57
  from typing_extensions import Self
58
58
 
59
+ from . import JobSkipError
59
60
  from .__types import DictData, DictStr, Matrix, StrOrNone
60
- from .errors import JobCancelError, JobError, to_dict
61
+ from .conf import pass_env
62
+ from .errors import JobCancelError, JobError, mark_errors, to_dict
61
63
  from .result import (
62
64
  CANCEL,
63
65
  FAILED,
@@ -452,41 +454,22 @@ class Job(BaseModel):
452
454
  execution, dependency management, conditional execution, and multienvironment
453
455
  deployment.
454
456
 
455
- Jobs are the primary execution units within workflows, providing:
456
- - Stage lifecycle management
457
- - Execution environment abstraction
458
- - Matrix strategy support for parallel execution
459
- - Dependency resolution via job needs
460
- - Output coordination between stages
461
-
462
- Attributes:
463
- id (str, optional): Unique job identifier within workflow
464
- desc (str, optional): Job description in Markdown format
465
- runs_on (RunsOnModel): Execution environment configuration
466
- condition (str, optional): Conditional execution expression
467
- stages (list[Stage]): Ordered list of stages to execute
468
- trigger_rule (Rule): Rule for handling job dependencies
469
- needs (list[str]): List of prerequisite job IDs
470
- strategy (Strategy): Matrix strategy for parameterized execution
471
- extras (dict): Additional configuration parameters
472
-
473
457
  Example:
474
- ```python
475
- job = Job(
476
- id="data-processing",
477
- desc="Process daily data files",
478
- runs_on=OnLocal(),
479
- stages=[
480
- EmptyStage(name="Start", echo="Processing started"),
481
- PyStage(name="Process", run="process_data()"),
482
- EmptyStage(name="Complete", echo="Processing finished")
483
- ],
484
- strategy=Strategy(
485
- matrix={'env': ['dev', 'prod']},
486
- max_parallel=2
487
- )
488
- )
489
- ```
458
+ >>> from ddeutil.workflow.stages import EmptyStage, PyStage
459
+ >>> job = Job(
460
+ ... id="data-processing",
461
+ ... desc="Process daily data files",
462
+ ... runs_on=OnLocal(),
463
+ ... stages=[
464
+ ... EmptyStage(name="Start", echo="Processing started"),
465
+ ... PyStage(name="Process", run="process_data()"),
466
+ ... EmptyStage(name="Complete", echo="Processing finished")
467
+ ... ],
468
+ ... strategy=Strategy(
469
+ ... matrix={'env': ['dev', 'prod']},
470
+ ... max_parallel=2
471
+ ... )
472
+ ... )
490
473
  """
491
474
 
492
475
  id: StrOrNone = Field(
@@ -514,6 +497,15 @@ class Job(BaseModel):
514
497
  default_factory=list,
515
498
  description="A list of Stage model of this job.",
516
499
  )
500
+ retry: int = Field(
501
+ default=0,
502
+ ge=0,
503
+ lt=20,
504
+ description=(
505
+ "A retry number if job route execution got the error exclude skip "
506
+ "and cancel exception class."
507
+ ),
508
+ )
517
509
  trigger_rule: Rule = Field(
518
510
  default=Rule.ALL_SUCCESS,
519
511
  validate_default=True,
@@ -751,7 +743,7 @@ class Job(BaseModel):
751
743
  # should use the `re` module to validate eval-string before
752
744
  # running.
753
745
  rs: bool = eval(
754
- param2template(self.condition, params, extras=self.extras),
746
+ self.pass_template(self.condition, params),
755
747
  globals() | params,
756
748
  {},
757
749
  )
@@ -802,8 +794,9 @@ class Job(BaseModel):
802
794
  extract from the result context if it exists. If it does not found, it
803
795
  will not set on the received context.
804
796
 
805
- :raise JobError: If the job's ID does not set and the setting
806
- default job ID flag does not set.
797
+ Raises:
798
+ JobError: If the job's ID does not set and the setting default job
799
+ ID flag does not set.
807
800
 
808
801
  Args:
809
802
  output: (DictData) A result data context that want to extract
@@ -854,8 +847,9 @@ class Job(BaseModel):
854
847
  """Get the outputs from jobs data. It will get this job ID or passing
855
848
  custom ID from the job outputs mapping.
856
849
 
857
- :param output: (DictData) A job outputs data that want to extract
858
- :param job_id: (StrOrNone) A job ID if the `id` field does not set.
850
+ Args:
851
+ output (DictData): A job outputs data that want to extract
852
+ job_id (StrOrNone): A job ID if the `id` field does not set.
859
853
 
860
854
  :rtype: DictData
861
855
  """
@@ -865,34 +859,31 @@ class Job(BaseModel):
865
859
  else:
866
860
  return output.get("jobs", {}).get(_id, {})
867
861
 
868
- def execute(
862
+ def pass_template(self, value: Any, params: DictData) -> Any:
863
+ """Pass template and environment variable to any value that can
864
+ templating.
865
+
866
+ Args:
867
+ value (Any): An any value.
868
+ params (DictData): A parameter data that want to use in this
869
+ execution.
870
+
871
+ Returns:
872
+ Any: A templated value.
873
+ """
874
+ return pass_env(param2template(value, params, extras=self.extras))
875
+
876
+ def process(
869
877
  self,
870
878
  params: DictData,
871
- *,
872
- run_id: StrOrNone = None,
879
+ run_id: str,
880
+ context: DictData,
881
+ parent_run_id: Optional[str] = None,
873
882
  event: Optional[Event] = None,
874
883
  ) -> Result:
875
- """Job execution with passing dynamic parameters from the workflow
876
- execution. It will generate matrix values at the first step and run
877
- multithread on this metrics to the `stages` field of this job.
878
-
879
- This method be execution routing for call dynamic execution function
880
- with specific target `runs-on` value.
881
-
882
- Args
883
- params: (DictData) A parameter context that also pass from the
884
- workflow execute method.
885
- run_id: (str) An execution running ID.
886
- event: (Event) An Event manager instance that use to cancel this
887
- execution if it forces stopped by parent execution.
888
-
889
- Returns
890
- Result: Return Result object that create from execution context.
884
+ """Process routing method that will route the provider function depend
885
+ on runs-on value.
891
886
  """
892
- ts: float = time.monotonic()
893
- parent_run_id, run_id = extract_id(
894
- (self.id or "EMPTY"), run_id=run_id, extras=self.extras
895
- )
896
887
  trace: Trace = get_trace(
897
888
  run_id, parent_run_id=parent_run_id, extras=self.extras
898
889
  )
@@ -901,105 +892,240 @@ class Job(BaseModel):
901
892
  f"{''.join(self.runs_on.type.value.split('_')).title()}: "
902
893
  f"{self.id!r}"
903
894
  )
904
-
895
+ rs: Optional[Result] = None
905
896
  if self.runs_on.type == LOCAL:
906
- return local_execute(
897
+ rs = local_process(
907
898
  self,
908
899
  params,
900
+ context=context,
909
901
  run_id=parent_run_id,
910
902
  event=event,
911
- ).make_info({"execution_time": time.monotonic() - ts})
903
+ )
912
904
  elif self.runs_on.type == SELF_HOSTED: # pragma: no cov
913
905
  pass
914
906
  elif self.runs_on.type == AZ_BATCH: # pragma: no cov
915
907
  from .plugins.providers.az import azure_batch_execute
916
908
 
917
- return azure_batch_execute(
909
+ rs = azure_batch_execute(
918
910
  self,
919
911
  params,
920
912
  run_id=parent_run_id,
921
913
  event=event,
922
- ).make_info({"execution_time": time.monotonic() - ts})
914
+ )
923
915
  elif self.runs_on.type == DOCKER: # pragma: no cov
924
- return docker_execution(
916
+ rs = docker_process(
925
917
  self,
926
918
  params,
927
919
  run_id=parent_run_id,
928
920
  event=event,
929
- ).make_info({"execution_time": time.monotonic() - ts})
921
+ )
930
922
  elif self.runs_on.type == CONTAINER: # pragma: no cov
931
923
  from .plugins.providers.container import container_execute
932
924
 
933
- return container_execute(
925
+ rs = container_execute(
934
926
  self,
935
927
  params,
936
928
  run_id=parent_run_id,
937
929
  event=event,
938
- ).make_info({"execution_time": time.monotonic() - ts})
930
+ )
939
931
  elif self.runs_on.type == AWS_BATCH: # pragma: no cov
940
932
  from .plugins.providers.aws import aws_batch_execute
941
933
 
942
- return aws_batch_execute(
934
+ rs = aws_batch_execute(
943
935
  self,
944
936
  params,
945
937
  run_id=parent_run_id,
946
938
  event=event,
947
- ).make_info({"execution_time": time.monotonic() - ts})
939
+ )
948
940
  elif self.runs_on.type == GCP_BATCH: # pragma: no cov
949
941
  from .plugins.providers.gcs import gcp_batch_execute
950
942
 
951
- return gcp_batch_execute(
943
+ rs = gcp_batch_execute(
952
944
  self,
953
945
  params,
954
946
  run_id=parent_run_id,
955
947
  event=event,
956
- ).make_info({"execution_time": time.monotonic() - ts})
948
+ )
949
+ if rs is None:
950
+ trace.error(
951
+ f"[JOB]: Execution not support runs-on: {self.runs_on.type.value!r} "
952
+ f"yet."
953
+ )
954
+ return Result(
955
+ status=FAILED,
956
+ run_id=run_id,
957
+ parent_run_id=parent_run_id,
958
+ context={
959
+ "status": FAILED,
960
+ "errors": JobError(
961
+ f"Job runs-on type: {self.runs_on.type.value!r} does "
962
+ f"not support yet."
963
+ ).to_dict(),
964
+ },
965
+ extras=self.extras,
966
+ )
967
+ if rs.status in (CANCEL, SKIP):
968
+ trace.debug(
969
+ f"[JOB]: Job process routing got result status be {rs.status}"
970
+ )
971
+ elif rs.status == FAILED:
972
+ raise JobError("[JOB]: Job process error")
973
+ return rs
957
974
 
958
- trace.error(
959
- f"[JOB]: Execution not support runs-on: {self.runs_on.type.value!r} "
960
- f"yet."
975
+ def _execute(
976
+ self,
977
+ params: DictData,
978
+ run_id: str,
979
+ context: DictData,
980
+ parent_run_id: Optional[str] = None,
981
+ event: Optional[Event] = None,
982
+ ) -> Result:
983
+ """Wrapped the route execute method before returning to handler
984
+ execution.
985
+
986
+ This method call to make retry strategy for process routing
987
+ method.
988
+
989
+ Args:
990
+ params: A parameter data that want to use in this execution
991
+ run_id:
992
+ context:
993
+ parent_run_id:
994
+ event:
995
+
996
+ Returns:
997
+ Result: The wrapped execution result.
998
+ """
999
+ current_retry: int = 0
1000
+ exception: Exception
1001
+ catch(context, status=WAIT)
1002
+ trace: Trace = get_trace(
1003
+ run_id, parent_run_id=parent_run_id, extras=self.extras
961
1004
  )
962
- return Result(
963
- status=FAILED,
964
- run_id=run_id,
965
- parent_run_id=parent_run_id,
966
- context={
967
- "status": FAILED,
968
- "errors": JobError(
969
- f"Execute runs-on type: {self.runs_on.type.value!r} does "
970
- f"not support yet."
971
- ).to_dict(),
972
- },
973
- info={"execution_time": time.monotonic() - ts},
974
- extras=self.extras,
1005
+ try:
1006
+ return self.process(
1007
+ params,
1008
+ run_id,
1009
+ context=context,
1010
+ parent_run_id=parent_run_id,
1011
+ event=event,
1012
+ )
1013
+ except (JobCancelError, JobSkipError):
1014
+ trace.debug("[JOB]: process raise skip or cancel error.")
1015
+ raise
1016
+ except Exception as e:
1017
+ current_retry += 1
1018
+ exception = e
1019
+ finally:
1020
+ trace.debug("[JOB]: Failed at the first execution.")
1021
+
1022
+ if self.retry == 0:
1023
+ raise exception
1024
+
1025
+ trace.warning(
1026
+ f"[JOB]: Retry count: {current_retry} ... "
1027
+ f"( {exception.__class__.__name__} )"
975
1028
  )
1029
+ while current_retry < (self.retry + 1):
1030
+ try:
1031
+ catch(
1032
+ context=context,
1033
+ status=WAIT,
1034
+ updated={"retry": current_retry},
1035
+ )
1036
+ return self.process(
1037
+ params,
1038
+ run_id,
1039
+ context=context,
1040
+ parent_run_id=parent_run_id,
1041
+ event=event,
1042
+ )
1043
+ except (JobCancelError, JobSkipError):
1044
+ trace.debug("[JOB]: process raise skip or cancel error.")
1045
+ raise
1046
+ except exception as e:
1047
+ current_retry += 1
1048
+ trace.warning(
1049
+ f"[JOB]: Retry count: {current_retry} ... "
1050
+ f"( {e.__class__.__name__} )"
1051
+ )
1052
+ exception = e
1053
+ time.sleep(1.2**current_retry)
976
1054
 
1055
+ trace.error(f"[JOB]: Reach the maximum of retry number: {self.retry}.")
1056
+ raise exception
977
1057
 
978
- def mark_errors(context: DictData, error: JobError) -> None:
979
- """Make the errors context result with the refs value depends on the nested
980
- execute func.
1058
+ def execute(
1059
+ self,
1060
+ params: DictData,
1061
+ *,
1062
+ run_id: StrOrNone = None,
1063
+ event: Optional[Event] = None,
1064
+ ) -> Result:
1065
+ """Job execution with passing dynamic parameters from the workflow
1066
+ execution. It will generate matrix values at the first step and run
1067
+ multithread on this metrics to the `stages` field of this job.
981
1068
 
982
- :param context: (DictData) A context data.
983
- :param error: (JobError) A stage exception object.
984
- """
985
- if "errors" in context:
986
- context["errors"][error.refs] = error.to_dict()
987
- else:
988
- context["errors"] = error.to_dict(with_refs=True)
1069
+ This method be execution routing for call dynamic execution function
1070
+ with specific target `runs-on` value.
1071
+
1072
+ Args
1073
+ params: (DictData) A parameter context that also pass from the
1074
+ workflow execute method.
1075
+ run_id: (str) An execution running ID.
1076
+ event: (Event) An Event manager instance that use to cancel this
1077
+ execution if it forces stopped by parent execution.
1078
+
1079
+ Returns
1080
+ Result: Return Result object that create from execution context.
1081
+ """
1082
+ ts: float = time.monotonic()
1083
+ parent_run_id, run_id = extract_id(
1084
+ (self.id or "EMPTY"), run_id=run_id, extras=self.extras
1085
+ )
1086
+ trace: Trace = get_trace(
1087
+ run_id, parent_run_id=parent_run_id, extras=self.extras
1088
+ )
1089
+ context: DictData = {"status": WAIT}
1090
+ try:
1091
+ trace.info(
1092
+ f"[JOB]: Handler {self.runs_on.type.name}: "
1093
+ f"{(self.id or 'EMPTY')!r}."
1094
+ )
1095
+ result_caught: Result = self._execute(
1096
+ params,
1097
+ run_id=run_id,
1098
+ context=context,
1099
+ parent_run_id=parent_run_id,
1100
+ event=event,
1101
+ )
1102
+ return result_caught.make_info(
1103
+ {"execution_time": time.monotonic() - ts}
1104
+ )
1105
+ except JobError: # pragma: no cov
1106
+ return Result.from_trace(trace).catch(
1107
+ status=FAILED,
1108
+ context=catch(context, status=FAILED),
1109
+ info={"execution_time": time.monotonic() - ts},
1110
+ )
1111
+ finally:
1112
+ trace.debug("[JOB]: End Handler job execution.")
989
1113
 
990
1114
 
991
1115
  def pop_stages(context: DictData) -> DictData:
1116
+ """Pop a stages key from the context data. It will return empty dict if it
1117
+ does not exist.
1118
+ """
992
1119
  return filter_func(context.pop("stages", {}))
993
1120
 
994
1121
 
995
- def local_execute_strategy(
1122
+ def local_process_strategy(
996
1123
  job: Job,
997
1124
  strategy: DictData,
998
1125
  params: DictData,
999
- run_id: str,
1126
+ trace: Trace,
1000
1127
  context: DictData,
1001
1128
  *,
1002
- parent_run_id: Optional[str] = None,
1003
1129
  event: Optional[Event] = None,
1004
1130
  ) -> tuple[Status, DictData]:
1005
1131
  """Local strategy execution with passing dynamic parameters from the
@@ -1014,25 +1140,24 @@ def local_execute_strategy(
1014
1140
  For each stage that execution with this strategy metrix, it will use the
1015
1141
  `set_outputs` method for reconstruct result context data.
1016
1142
 
1017
- :param job: (Job) A job model that want to execute.
1018
- :param strategy: (DictData) A strategy metrix value. This value will pass
1019
- to the `matrix` key for templating in context data.
1020
- :param params: (DictData) A parameter data.
1021
- :param run_id: (str)
1022
- :param context: (DictData)
1023
- :param parent_run_id: (str | None)
1024
- :param event: (Event) An Event manager instance that use to cancel this
1025
- execution if it forces stopped by parent execution.
1026
-
1027
- :raise JobError: If event was set.
1028
- :raise JobError: If stage execution raise any error as `StageError`.
1029
- :raise JobError: If the result from execution has `FAILED` status.
1030
-
1031
- :rtype: tuple[Status, DictData]
1143
+ Args:
1144
+ job (Job): A job model that want to execute.
1145
+ strategy (DictData): A strategy metrix value. This value will pass
1146
+ to the `matrix` key for templating in context data.
1147
+ params (DictData): A parameter data.
1148
+ trace (Trace):
1149
+ context (DictData):
1150
+ event (Event): An Event manager instance that use to cancel this
1151
+ execution if it forces stopped by parent execution.
1152
+
1153
+ Raises:
1154
+ JobError: If event was set.
1155
+ JobError: If stage execution raise any error as `StageError`.
1156
+ JobError: If the result from execution has `FAILED` status.
1157
+
1158
+ Returns:
1159
+ tuple[Status, DictData]: A pair of Status and DictData objects.
1032
1160
  """
1033
- trace: Trace = get_trace(
1034
- run_id, parent_run_id=parent_run_id, extras=job.extras
1035
- )
1036
1161
  if strategy:
1037
1162
  strategy_id: str = gen_id(strategy)
1038
1163
  trace.info(f"[JOB]: Execute Strategy: {strategy_id!r}")
@@ -1071,7 +1196,7 @@ def local_execute_strategy(
1071
1196
  trace.info(f"[JOB]: Execute Stage: {stage.iden!r}")
1072
1197
  rs: Result = stage.execute(
1073
1198
  params=current_context,
1074
- run_id=parent_run_id,
1199
+ run_id=trace.parent_run_id,
1075
1200
  event=event,
1076
1201
  )
1077
1202
  stage.set_outputs(rs.context, to=current_context)
@@ -1133,11 +1258,12 @@ def local_execute_strategy(
1133
1258
  return status, context
1134
1259
 
1135
1260
 
1136
- def local_execute(
1261
+ def local_process(
1137
1262
  job: Job,
1138
1263
  params: DictData,
1264
+ run_id: str,
1265
+ context: DictData,
1139
1266
  *,
1140
- run_id: StrOrNone = None,
1141
1267
  event: Optional[Event] = None,
1142
1268
  ) -> Result:
1143
1269
  """Local job execution with passing dynamic parameters from the workflow
@@ -1155,21 +1281,23 @@ def local_execute(
1155
1281
  ]
1156
1282
  }
1157
1283
 
1158
- :param job: (Job) A job model.
1159
- :param params: (DictData) A parameter data.
1160
- :param run_id: (str) A job running ID.
1161
- :param event: (Event) An Event manager instance that use to cancel this
1162
- execution if it forces stopped by parent execution.
1284
+ Args:
1285
+ job (Job): A job model.
1286
+ params (DictData): A parameter data.
1287
+ run_id (str): A job running ID.
1288
+ context (DictData):
1289
+ event (Event, default None): An Event manager instance that use to
1290
+ cancel this execution if it forces stopped by parent execution.
1163
1291
 
1164
- :rtype: Result
1292
+ Returns:
1293
+ Result: A job process result.
1165
1294
  """
1166
- ts: float = time.monotonic()
1167
- parent_run_id: StrOrNone = run_id
1168
- run_id: str = gen_id((job.id or "EMPTY"), unique=True)
1295
+ parent_run_id, run_id = extract_id(
1296
+ (job.id or "EMPTY"), run_id=run_id, extras=job.extras
1297
+ )
1169
1298
  trace: Trace = get_trace(
1170
1299
  run_id, parent_run_id=parent_run_id, extras=job.extras
1171
1300
  )
1172
- context: DictData = {"status": WAIT}
1173
1301
  trace.info("[JOB]: Start Local executor.")
1174
1302
 
1175
1303
  if job.desc:
@@ -1182,7 +1310,6 @@ def local_execute(
1182
1310
  parent_run_id=parent_run_id,
1183
1311
  status=SKIP,
1184
1312
  context=catch(context, status=SKIP),
1185
- info={"execution_time": time.monotonic() - ts},
1186
1313
  extras=job.extras,
1187
1314
  )
1188
1315
 
@@ -1208,7 +1335,6 @@ def local_execute(
1208
1335
  status=FAILED,
1209
1336
  updated={"errors": to_dict(err)},
1210
1337
  ),
1211
- info={"execution_time": time.monotonic() - ts},
1212
1338
  extras=job.extras,
1213
1339
  )
1214
1340
  if workers >= 10:
@@ -1226,7 +1352,6 @@ def local_execute(
1226
1352
  status=FAILED,
1227
1353
  updated={"errors": JobError(err_msg).to_dict()},
1228
1354
  ),
1229
- info={"execution_time": time.monotonic() - ts},
1230
1355
  extras=job.extras,
1231
1356
  )
1232
1357
 
@@ -1252,20 +1377,18 @@ def local_execute(
1252
1377
  ).to_dict()
1253
1378
  },
1254
1379
  ),
1255
- info={"execution_time": time.monotonic() - ts},
1256
1380
  extras=job.extras,
1257
1381
  )
1258
1382
 
1259
1383
  with ThreadPoolExecutor(workers, "jb_stg") as executor:
1260
1384
  futures: list[Future] = [
1261
1385
  executor.submit(
1262
- local_execute_strategy,
1386
+ local_process_strategy,
1263
1387
  job=job,
1264
1388
  strategy=strategy,
1265
1389
  params=params,
1266
- run_id=run_id,
1390
+ trace=trace,
1267
1391
  context=context,
1268
- parent_run_id=parent_run_id,
1269
1392
  event=event,
1270
1393
  )
1271
1394
  for strategy in strategies
@@ -1314,11 +1437,10 @@ def local_execute(
1314
1437
  return Result.from_trace(trace).catch(
1315
1438
  status=status,
1316
1439
  context=catch(context, status=status, updated=errors),
1317
- info={"execution_time": time.monotonic() - ts},
1318
1440
  )
1319
1441
 
1320
1442
 
1321
- def self_hosted_execute(
1443
+ def self_hosted_process(
1322
1444
  job: Job,
1323
1445
  params: DictData,
1324
1446
  *,
@@ -1329,13 +1451,15 @@ def self_hosted_execute(
1329
1451
  workflow execution or itself execution. It will make request to the
1330
1452
  self-hosted host url.
1331
1453
 
1332
- :param job: (Job) A job model that want to execute.
1333
- :param params: (DictData) A parameter data.
1334
- :param run_id: (str) A job running ID.
1335
- :param event: (Event) An Event manager instance that use to cancel this
1336
- execution if it forces stopped by parent execution.
1454
+ Args:
1455
+ job (Job): A job model that want to execute.
1456
+ params (DictData): A parameter data.
1457
+ run_id (str): A job running ID.
1458
+ event (Event): An Event manager instance that use to cancel this
1459
+ execution if it forces stopped by parent execution.
1337
1460
 
1338
- :rtype: Result
1461
+ Returns:
1462
+ Result: A Result object.
1339
1463
  """
1340
1464
  parent_run_id: StrOrNone = run_id
1341
1465
  run_id: str = gen_id((job.id or "EMPTY"), unique=True)
@@ -1402,11 +1526,7 @@ def self_hosted_execute(
1402
1526
  )
1403
1527
 
1404
1528
 
1405
- # Azure Batch execution is now handled by the Azure Batch provider
1406
- # See src/ddeutil/workflow/plugins/providers/az.py for implementation
1407
-
1408
-
1409
- def docker_execution(
1529
+ def docker_process(
1410
1530
  job: Job,
1411
1531
  params: DictData,
1412
1532
  *,
@@ -89,9 +89,9 @@ try:
89
89
  from azure.core.exceptions import AzureError
90
90
  from azure.storage.blob import BlobServiceClient
91
91
 
92
- AZURE_AVAILABLE = True
92
+ AZURE_AVAILABLE: bool = True
93
93
  except ImportError:
94
- AZURE_AVAILABLE = False
94
+ AZURE_AVAILABLE: bool = False
95
95
 
96
96
  from pydantic import BaseModel, Field
97
97