ddeutil-workflow 0.0.73__py3-none-any.whl → 0.0.75__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/job.py CHANGED
@@ -3,17 +3,33 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """Job model that use for store Stage models and node parameter that use for
7
- running these stages. The job model handle the lineage of stages and location of
8
- execution that mean you can define `runs-on` field with the Self-Hosted mode
9
- for execute on target machine instead of the current local machine.
10
-
11
- This module include Strategy model that use on the job `strategy` field for
12
- making matrix values before execution parallelism stage execution.
13
-
14
- The Job model does not implement `handler_execute` same as Stage model
15
- because the job should raise only `JobError` class from the execution
16
- method.
6
+ """Job Execution Module.
7
+
8
+ This module contains the Job model and related components for managing stage
9
+ execution, execution strategies, and job orchestration within workflows.
10
+
11
+ The Job model serves as a container for Stage models and handles the execution
12
+ lifecycle, dependency management, and output coordination. It supports various
13
+ execution environments through the runs-on configuration.
14
+
15
+ Key Features:
16
+ - Stage execution orchestration
17
+ - Matrix strategy for parameterized execution
18
+ - Multi-environment support (local, self-hosted, Docker, Azure Batch)
19
+ - Dependency management via job needs
20
+ - Conditional execution support
21
+ - Parallel execution capabilities
22
+
23
+ Classes:
24
+ Job: Main job execution container
25
+ Strategy: Matrix strategy for parameterized execution
26
+ Rule: Trigger rules for job execution
27
+ RunsOn: Execution environment enumeration
28
+ BaseRunsOn: Base class for execution environments
29
+
30
+ Note:
31
+ Jobs raise JobError on execution failures, providing consistent error
32
+ handling across the workflow system.
17
33
  """
18
34
  from __future__ import annotations
19
35
 
@@ -49,11 +65,13 @@ from .result import (
49
65
  WAIT,
50
66
  Result,
51
67
  Status,
68
+ catch,
52
69
  get_status_from_error,
53
70
  validate_statuses,
54
71
  )
55
72
  from .reusables import has_template, param2template
56
73
  from .stages import Stage
74
+ from .traces import Trace, get_trace
57
75
  from .utils import cross_product, filter_func, gen_id
58
76
 
59
77
  MatrixFilter = list[dict[str, Union[str, int]]]
@@ -123,27 +141,41 @@ def make(
123
141
 
124
142
 
125
143
  class Strategy(BaseModel):
126
- """Strategy model that will combine a matrix together for running the
127
- special job with combination of matrix data.
128
-
129
- This model does not be the part of job only because you can use it to
130
- any model object. The objective of this model is generating metrix result
131
- that comming from combination logic with any matrix values for running it
132
- with parallelism.
133
-
134
- [1, 2, 3] x [a, b] --> [1a], [1b], [2a], [2b], [3a], [3b]
135
-
136
- Data Validate:
137
- >>> strategy = {
138
- ... 'max-parallel': 1,
139
- ... 'fail-fast': False,
140
- ... 'matrix': {
141
- ... 'first': [1, 2, 3],
142
- ... 'second': ['foo', 'bar'],
143
- ... },
144
- ... 'include': [{'first': 4, 'second': 'foo'}],
145
- ... 'exclude': [{'first': 1, 'second': 'bar'}],
146
- ... }
144
+ """Matrix strategy model for parameterized job execution.
145
+
146
+ The Strategy model generates combinations of matrix values to enable
147
+ parallel execution of jobs with different parameter sets. It supports
148
+ cross-product generation, inclusion of specific combinations, and
149
+ exclusion of unwanted combinations.
150
+
151
+ This model can be used independently or as part of job configuration
152
+ to create multiple execution contexts from a single job definition.
153
+
154
+ Matrix Combination Logic:
155
+ [1, 2, 3] × [a, b] → [1a], [1b], [2a], [2b], [3a], [3b]
156
+
157
+ Attributes:
158
+ fail_fast (bool): Cancel remaining executions on first failure
159
+ max_parallel (int): Maximum concurrent executions (1-9)
160
+ matrix (dict): Base matrix values for cross-product generation
161
+ include (list): Additional specific combinations to include
162
+ exclude (list): Specific combinations to exclude from results
163
+
164
+ Example:
165
+ ```python
166
+ strategy = Strategy(
167
+ max_parallel=2,
168
+ fail_fast=True,
169
+ matrix={
170
+ 'python_version': ['3.9', '3.10', '3.11'],
171
+ 'os': ['ubuntu', 'windows']
172
+ },
173
+ include=[{'python_version': '3.12', 'os': 'ubuntu'}],
174
+ exclude=[{'python_version': '3.9', 'os': 'windows'}]
175
+ )
176
+
177
+ combinations = strategy.make() # Returns list of parameter dicts
178
+ ```
147
179
  """
148
180
 
149
181
  fail_fast: bool = Field(
@@ -182,7 +214,8 @@ class Strategy(BaseModel):
182
214
  def is_set(self) -> bool:
183
215
  """Return True if this strategy was set from yaml template.
184
216
 
185
- :rtype: bool
217
+ Returns:
218
+ bool: True if matrix has been configured, False otherwise.
186
219
  """
187
220
  return len(self.matrix) > 0
188
221
 
@@ -190,7 +223,8 @@ class Strategy(BaseModel):
190
223
  """Return List of product of matrix values that already filter with
191
224
  exclude and add include.
192
225
 
193
- :rtype: list[DictStr]
226
+ Returns:
227
+ list[DictStr]: List of parameter combinations from matrix strategy.
194
228
  """
195
229
  return make(self.matrix, self.include, self.exclude)
196
230
 
@@ -213,9 +247,17 @@ class RunsOn(str, Enum):
213
247
  LOCAL = "local"
214
248
  SELF_HOSTED = "self_hosted"
215
249
  AZ_BATCH = "azure_batch"
250
+ AWS_BATCH = "aws_batch"
251
+ CLOUD_BATCH = "cloud_batch"
216
252
  DOCKER = "docker"
217
253
 
218
254
 
255
+ LOCAL = RunsOn.LOCAL
256
+ SELF_HOSTED = RunsOn.SELF_HOSTED
257
+ AZ_BATCH = RunsOn.AZ_BATCH
258
+ DOCKER = RunsOn.DOCKER
259
+
260
+
219
261
  class BaseRunsOn(BaseModel): # pragma: no cov
220
262
  """Base Runs-On Model for generate runs-on types via inherit this model
221
263
  object and override execute method.
@@ -297,7 +339,7 @@ class OnDocker(BaseRunsOn): # pragma: no cov
297
339
 
298
340
  def get_discriminator_runs_on(model: dict[str, Any]) -> RunsOn:
299
341
  """Get discriminator of the RunsOn models."""
300
- t = model.get("type")
342
+ t: str = model.get("type")
301
343
  return RunsOn(t) if t else RunsOn.LOCAL
302
344
 
303
345
 
@@ -312,30 +354,48 @@ RunsOnModel = Annotated[
312
354
 
313
355
 
314
356
  class Job(BaseModel):
315
- """Job Pydantic model object (short descripte: a group of stages).
316
-
317
- This job model allow you to use for-loop that call matrix strategy. If
318
- you pass matrix mapping, and it is able to generate, you will see it running
319
- with loop of matrix values.
320
-
321
- Data Validate:
322
- >>> job = {
323
- ... "runs-on": {"type": "local"},
324
- ... "strategy": {
325
- ... "max-parallel": 1,
326
- ... "matrix": {
327
- ... "first": [1, 2, 3],
328
- ... "second": ['foo', 'bar'],
329
- ... },
330
- ... },
331
- ... "needs": [],
332
- ... "stages": [
333
- ... {
334
- ... "name": "Some stage",
335
- ... "run": "print('Hello World')",
336
- ... },
337
- ... ],
338
- ... }
357
+ """Job execution container for stage orchestration.
358
+
359
+ The Job model represents a logical unit of work containing multiple stages
360
+ that execute sequentially. Jobs support matrix strategies for parameterized
361
+ execution, dependency management, conditional execution, and multienvironment
362
+ deployment.
363
+
364
+ Jobs are the primary execution units within workflows, providing:
365
+ - Stage lifecycle management
366
+ - Execution environment abstraction
367
+ - Matrix strategy support for parallel execution
368
+ - Dependency resolution via job needs
369
+ - Output coordination between stages
370
+
371
+ Attributes:
372
+ id (str, optional): Unique job identifier within workflow
373
+ desc (str, optional): Job description in Markdown format
374
+ runs_on (RunsOnModel): Execution environment configuration
375
+ condition (str, optional): Conditional execution expression
376
+ stages (list[Stage]): Ordered list of stages to execute
377
+ trigger_rule (Rule): Rule for handling job dependencies
378
+ needs (list[str]): List of prerequisite job IDs
379
+ strategy (Strategy): Matrix strategy for parameterized execution
380
+ extras (dict): Additional configuration parameters
381
+
382
+ Example:
383
+ ```python
384
+ job = Job(
385
+ id="data-processing",
386
+ desc="Process daily data files",
387
+ runs_on=OnLocal(),
388
+ stages=[
389
+ EmptyStage(name="Start", echo="Processing started"),
390
+ PyStage(name="Process", run="process_data()"),
391
+ EmptyStage(name="Complete", echo="Processing finished")
392
+ ],
393
+ strategy=Strategy(
394
+ matrix={'env': ['dev', 'prod']},
395
+ max_parallel=2
396
+ )
397
+ )
398
+ ```
339
399
  """
340
400
 
341
401
  id: StrOrNone = Field(
@@ -694,7 +754,6 @@ class Job(BaseModel):
694
754
  params: DictData,
695
755
  *,
696
756
  run_id: StrOrNone = None,
697
- parent_run_id: StrOrNone = None,
698
757
  event: Optional[Event] = None,
699
758
  ) -> Result:
700
759
  """Job execution with passing dynamic parameters from the workflow
@@ -704,58 +763,65 @@ class Job(BaseModel):
704
763
  This method be execution routing for call dynamic execution function
705
764
  with specific target `runs-on` value.
706
765
 
707
- :param params: (DictData) A parameter data.
708
- :param run_id: (str) A job running ID.
709
- :param parent_run_id: (str) A parent running ID.
710
- :param event: (Event) An Event manager instance that use to cancel this
711
- execution if it forces stopped by parent execution.
766
+ Args
767
+ params: (DictData) A parameter context that also pass from the
768
+ workflow execute method.
769
+ run_id: (str) An execution running ID.
770
+ event: (Event) An Event manager instance that use to cancel this
771
+ execution if it forces stopped by parent execution.
712
772
 
713
- :rtype: Result
773
+ Returns
774
+ Result: Return Result object that create from execution context.
714
775
  """
715
- result: Result = Result.construct_with_rs_or_id(
716
- run_id=run_id,
717
- parent_run_id=parent_run_id,
718
- id_logic=(self.id or "EMPTY"),
719
- extras=self.extras,
776
+ ts: float = time.monotonic()
777
+ parent_run_id: str = run_id
778
+ run_id: str = gen_id((self.id or "EMPTY"), unique=True)
779
+ trace: Trace = get_trace(
780
+ run_id, parent_run_id=parent_run_id, extras=self.extras
720
781
  )
721
-
722
- result.trace.info(
782
+ trace.info(
723
783
  f"[JOB]: Routing for "
724
784
  f"{''.join(self.runs_on.type.value.split('_')).title()}: "
725
785
  f"{self.id!r}"
726
786
  )
727
787
 
728
- if self.runs_on.type == RunsOn.LOCAL:
788
+ if self.runs_on.type == LOCAL:
729
789
  return local_execute(
730
790
  self,
731
791
  params,
732
- run_id=run_id,
733
- parent_run_id=parent_run_id,
792
+ run_id=parent_run_id,
734
793
  event=event,
735
- )
736
- elif self.runs_on.type == RunsOn.SELF_HOSTED: # pragma: no cov
794
+ ).make_info({"execution_time": time.monotonic() - ts})
795
+ elif self.runs_on.type == SELF_HOSTED: # pragma: no cov
796
+ pass
797
+ elif self.runs_on.type == AZ_BATCH: # pragma: no cov
737
798
  pass
738
- elif self.runs_on.type == RunsOn.DOCKER: # pragma: no cov
739
- docker_execution(
799
+ elif self.runs_on.type == DOCKER: # pragma: no cov
800
+ return docker_execution(
740
801
  self,
741
802
  params,
742
803
  run_id=run_id,
743
804
  parent_run_id=parent_run_id,
744
805
  event=event,
745
- )
806
+ ).make_info({"execution_time": time.monotonic() - ts})
746
807
 
747
- result.trace.error(
808
+ trace.error(
748
809
  f"[JOB]: Execution not support runs-on: {self.runs_on.type.value!r} "
749
810
  f"yet."
750
811
  )
751
- return result.catch(
812
+ return Result(
752
813
  status=FAILED,
814
+ run_id=run_id,
815
+ parent_run_id=parent_run_id,
753
816
  context={
817
+ "status": FAILED,
754
818
  "errors": JobError(
755
819
  f"Execute runs-on type: {self.runs_on.type.value!r} does "
756
820
  f"not support yet."
757
821
  ).to_dict(),
758
822
  },
823
+ info={"execution_time": time.monotonic() - ts},
824
+ extras=self.extras,
759
825
  )
760
826
 
761
827
 
@@ -772,14 +838,20 @@ def mark_errors(context: DictData, error: JobError) -> None:
772
838
  context["errors"] = error.to_dict(with_refs=True)
773
839
 
774
840
 
841
+ def pop_stages(context: DictData) -> DictData:
842
+ return filter_func(context.pop("stages", {}))
843
+
844
+
775
845
  def local_execute_strategy(
776
846
  job: Job,
777
847
  strategy: DictData,
778
848
  params: DictData,
849
+ run_id: str,
850
+ context: DictData,
779
851
  *,
780
- result: Optional[Result] = None,
852
+ parent_run_id: Optional[str] = None,
781
853
  event: Optional[Event] = None,
782
- ) -> tuple[Status, Result]:
854
+ ) -> tuple[Status, DictData]:
783
855
  """Local strategy execution with passing dynamic parameters from the
784
856
  job execution and strategy matrix.
785
857
 
@@ -796,7 +868,9 @@ def local_execute_strategy(
796
868
  :param strategy: (DictData) A strategy metrix value. This value will pass
797
869
  to the `matrix` key for templating in context data.
798
870
  :param params: (DictData) A parameter data.
799
- :param result: (Result) A Result instance for return context and status.
871
+ :param run_id: (str)
872
+ :param context: (DictData)
873
+ :param parent_run_id: (str | None)
800
874
  :param event: (Event) An Event manager instance that use to cancel this
801
875
  execution if it forces stopped by parent execution.
802
876
 
@@ -804,21 +878,20 @@ def local_execute_strategy(
804
878
  :raise JobError: If stage execution raise any error as `StageError`.
805
879
  :raise JobError: If the result from execution has `FAILED` status.
806
880
 
807
- :rtype: tuple[Status, Result]
881
+ :rtype: tuple[Status, DictData]
808
882
  """
809
- result: Result = result or Result(
810
- run_id=gen_id(job.id or "EMPTY", unique=True),
811
- extras=job.extras,
883
+ trace: Trace = get_trace(
884
+ run_id, parent_run_id=parent_run_id, extras=job.extras
812
885
  )
813
886
  if strategy:
814
887
  strategy_id: str = gen_id(strategy)
815
- result.trace.info(f"[JOB]: Execute Strategy: {strategy_id!r}")
816
- result.trace.info(f"[JOB]: ... matrix: {strategy!r}")
888
+ trace.info(f"[JOB]: Execute Strategy: {strategy_id!r}")
889
+ trace.info(f"[JOB]: ... matrix: {strategy!r}")
817
890
  else:
818
891
  strategy_id: str = "EMPTY"
819
892
 
820
- context: DictData = copy.deepcopy(params)
821
- context.update({"matrix": strategy, "stages": {}})
893
+ current_context: DictData = copy.deepcopy(params)
894
+ current_context.update({"matrix": strategy, "stages": {}})
822
895
  total_stage: int = len(job.stages)
823
896
  skips: list[bool] = [False] * total_stage
824
897
  for i, stage in enumerate(job.stages, start=0):
@@ -831,27 +904,27 @@ def local_execute_strategy(
831
904
  "Strategy execution was canceled from the event before "
832
905
  "start stage execution."
833
906
  )
834
- result.catch(
907
+ catch(
908
+ context=context,
835
909
  status=CANCEL,
836
- context={
910
+ updated={
837
911
  strategy_id: {
838
912
  "status": CANCEL,
839
913
  "matrix": strategy,
840
- "stages": filter_func(context.pop("stages", {})),
914
+ "stages": pop_stages(current_context),
841
915
  "errors": JobCancelError(error_msg).to_dict(),
842
916
  },
843
917
  },
844
918
  )
845
919
  raise JobCancelError(error_msg, refs=strategy_id)
846
920
 
847
- result.trace.info(f"[JOB]: Execute Stage: {stage.iden!r}")
848
- rs: Result = stage.handler_execute(
849
- params=context,
850
- run_id=result.run_id,
851
- parent_run_id=result.parent_run_id,
921
+ trace.info(f"[JOB]: Execute Stage: {stage.iden!r}")
922
+ rs: Result = stage.execute(
923
+ params=current_context,
924
+ run_id=parent_run_id,
852
925
  event=event,
853
926
  )
854
- stage.set_outputs(rs.context, to=context)
927
+ stage.set_outputs(rs.context, to=current_context)
855
928
 
856
929
  if rs.status == SKIP:
857
930
  skips[i] = True
@@ -862,13 +935,14 @@ def local_execute_strategy(
862
935
  f"Strategy execution was break because its nested-stage, "
863
936
  f"{stage.iden!r}, failed."
864
937
  )
865
- result.catch(
938
+ catch(
939
+ context=context,
866
940
  status=FAILED,
867
- context={
941
+ updated={
868
942
  strategy_id: {
869
943
  "status": FAILED,
870
944
  "matrix": strategy,
871
- "stages": filter_func(context.pop("stages", {})),
945
+ "stages": pop_stages(current_context),
872
946
  "errors": JobError(error_msg).to_dict(),
873
947
  },
874
948
  },
@@ -880,13 +954,14 @@ def local_execute_strategy(
880
954
  "Strategy execution was canceled from the event after "
881
955
  "end stage execution."
882
956
  )
883
- result.catch(
957
+ catch(
958
+ context=context,
884
959
  status=CANCEL,
885
- context={
960
+ updated={
886
961
  strategy_id: {
887
962
  "status": CANCEL,
888
963
  "matrix": strategy,
889
- "stages": filter_func(context.pop("stages", {})),
964
+ "stages": pop_stages(current_context),
890
965
  "errors": JobCancelError(error_msg).to_dict(),
891
966
  },
892
967
  },
@@ -894,17 +969,18 @@ def local_execute_strategy(
894
969
  raise JobCancelError(error_msg, refs=strategy_id)
895
970
 
896
971
  status: Status = SKIP if sum(skips) == total_stage else SUCCESS
897
- result.catch(
972
+ catch(
973
+ context=context,
898
974
  status=status,
899
- context={
975
+ updated={
900
976
  strategy_id: {
901
977
  "status": status,
902
978
  "matrix": strategy,
903
- "stages": filter_func(context.pop("stages", {})),
979
+ "stages": pop_stages(current_context),
904
980
  },
905
981
  },
906
982
  )
907
- return status, result
983
+ return status, context
908
984
 
909
985
 
910
986
  def local_execute(
@@ -912,7 +988,6 @@ def local_execute(
912
988
  params: DictData,
913
989
  *,
914
990
  run_id: StrOrNone = None,
915
- parent_run_id: StrOrNone = None,
916
991
  event: Optional[Event] = None,
917
992
  ) -> Result:
918
993
  """Local job execution with passing dynamic parameters from the workflow
@@ -933,47 +1008,61 @@ def local_execute(
933
1008
  :param job: (Job) A job model.
934
1009
  :param params: (DictData) A parameter data.
935
1010
  :param run_id: (str) A job running ID.
936
- :param parent_run_id: (str) A parent workflow running ID.
937
1011
  :param event: (Event) An Event manager instance that use to cancel this
938
1012
  execution if it forces stopped by parent execution.
939
1013
 
940
1014
  :rtype: Result
941
1015
  """
942
- result: Result = Result.construct_with_rs_or_id(
943
- run_id=run_id,
944
- parent_run_id=parent_run_id,
945
- id_logic=(job.id or "EMPTY"),
946
- extras=job.extras,
1016
+ ts: float = time.monotonic()
1017
+ parent_run_id: StrOrNone = run_id
1018
+ run_id: str = gen_id((job.id or "EMPTY"), unique=True)
1019
+ trace: Trace = get_trace(
1020
+ run_id, parent_run_id=parent_run_id, extras=job.extras
947
1021
  )
948
-
949
- result.trace.info("[JOB]: Start Local executor.")
1022
+ context: DictData = {"status": WAIT}
1023
+ trace.info("[JOB]: Start Local executor.")
950
1024
 
951
1025
  if job.desc:
952
- result.trace.debug(f"[JOB]: Description:||{job.desc}||")
1026
+ trace.debug(f"[JOB]: Description:||{job.desc}||")
953
1027
 
954
1028
  if job.is_skipped(params=params):
955
- result.trace.info("[JOB]: Skip because job condition was valid.")
956
- return result.catch(status=SKIP)
1029
+ trace.info("[JOB]: Skip because job condition was valid.")
1030
+ return Result(
1031
+ run_id=run_id,
1032
+ parent_run_id=parent_run_id,
1033
+ status=SKIP,
1034
+ context=catch(context, status=SKIP),
1035
+ info={"execution_time": time.monotonic() - ts},
1036
+ extras=job.extras,
1037
+ )
957
1038
 
958
1039
  event: Event = event or Event()
959
1040
  ls: str = "Fail-Fast" if job.strategy.fail_fast else "All-Completed"
960
1041
  workers: int = job.strategy.max_parallel
961
1042
  strategies: list[DictStr] = job.strategy.make()
962
1043
  len_strategy: int = len(strategies)
963
- result.trace.info(
1044
+ trace.info(
964
1045
  f"[JOB]: ... Mode {ls}: {job.id!r} with {workers} "
965
1046
  f"worker{'s' if workers > 1 else ''}."
966
1047
  )
967
1048
 
968
1049
  if event and event.is_set():
969
- return result.catch(
1050
+ return Result(
1051
+ run_id=run_id,
1052
+ parent_run_id=parent_run_id,
970
1053
  status=CANCEL,
971
- context={
972
- "errors": JobCancelError(
973
- "Execution was canceled from the event before start "
974
- "local job execution."
975
- ).to_dict()
976
- },
1054
+ context=catch(
1055
+ context,
1056
+ status=CANCEL,
1057
+ updated={
1058
+ "errors": JobCancelError(
1059
+ "Execution was canceled from the event before start "
1060
+ "local job execution."
1061
+ ).to_dict()
1062
+ },
1063
+ ),
1064
+ info={"execution_time": time.monotonic() - ts},
1065
+ extras=job.extras,
977
1066
  )
978
1067
 
979
1068
  with ThreadPoolExecutor(workers, "jb_stg") as executor:
@@ -983,13 +1072,15 @@ def local_execute(
983
1072
  job=job,
984
1073
  strategy=strategy,
985
1074
  params=params,
986
- result=result,
1075
+ run_id=run_id,
1076
+ context=context,
1077
+ parent_run_id=parent_run_id,
987
1078
  event=event,
988
1079
  )
989
1080
  for strategy in strategies
990
1081
  ]
991
1082
 
992
- context: DictData = {}
1083
+ errors: DictData = {}
993
1084
  statuses: list[Status] = [WAIT] * len_strategy
994
1085
  fail_fast: bool = False
995
1086
 
@@ -998,14 +1089,14 @@ def local_execute(
998
1089
  else:
999
1090
  done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
1000
1091
  if len(list(done)) != len(futures):
1001
- result.trace.warning(
1092
+ trace.warning(
1002
1093
  "[JOB]: Set the event for stop pending job-execution."
1003
1094
  )
1004
1095
  event.set()
1005
1096
  for future in not_done:
1006
1097
  future.cancel()
1007
1098
 
1008
- time.sleep(0.025)
1099
+ time.sleep(0.01)
1009
1100
  nd: str = (
1010
1101
  (
1011
1102
  f", {len(not_done)} strateg"
@@ -1014,7 +1105,7 @@ def local_execute(
1014
1105
  if not_done
1015
1106
  else ""
1016
1107
  )
1017
- result.trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
1108
+ trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
1018
1109
  done: Iterator[Future] = as_completed(futures)
1019
1110
  fail_fast: bool = True
1020
1111
 
@@ -1023,10 +1114,10 @@ def local_execute(
1023
1114
  statuses[i], _ = future.result()
1024
1115
  except JobError as e:
1025
1116
  statuses[i] = get_status_from_error(e)
1026
- result.trace.error(
1117
+ trace.error(
1027
1118
  f"[JOB]: {ls} Handler:||{e.__class__.__name__}: {e}"
1028
1119
  )
1029
- mark_errors(context, e)
1120
+ mark_errors(errors, e)
1030
1121
  except CancelledError:
1031
1122
  pass
1032
1123
 
@@ -1037,7 +1128,14 @@ def local_execute(
1037
1128
  if fail_fast and status == CANCEL:
1038
1129
  status = FAILED
1039
1130
 
1040
- return result.catch(status=status, context=context)
1131
+ return Result(
1132
+ run_id=run_id,
1133
+ parent_run_id=parent_run_id,
1134
+ status=status,
1135
+ context=catch(context, status=status, updated=errors),
1136
+ info={"execution_time": time.monotonic() - ts},
1137
+ extras=job.extras,
1138
+ )
1041
1139
 
1042
1140
 
1043
1141
  def self_hosted_execute(
@@ -1045,7 +1143,6 @@ def self_hosted_execute(
1045
1143
  params: DictData,
1046
1144
  *,
1047
1145
  run_id: StrOrNone = None,
1048
- parent_run_id: StrOrNone = None,
1049
1146
  event: Optional[Event] = None,
1050
1147
  ) -> Result: # pragma: no cov
1051
1148
  """Self-Hosted job execution with passing dynamic parameters from the
@@ -1055,30 +1152,35 @@ def self_hosted_execute(
1055
1152
  :param job: (Job) A job model that want to execute.
1056
1153
  :param params: (DictData) A parameter data.
1057
1154
  :param run_id: (str) A job running ID.
1058
- :param parent_run_id: (str) A parent workflow running ID.
1059
1155
  :param event: (Event) An Event manager instance that use to cancel this
1060
1156
  execution if it forces stopped by parent execution.
1061
1157
 
1062
1158
  :rtype: Result
1063
1159
  """
1064
- result: Result = Result.construct_with_rs_or_id(
1065
- run_id=run_id,
1066
- parent_run_id=parent_run_id,
1067
- id_logic=(job.id or "EMPTY"),
1068
- extras=job.extras,
1160
+ parent_run_id: StrOrNone = run_id
1161
+ run_id: str = gen_id((job.id or "EMPTY"), unique=True)
1162
+ trace: Trace = get_trace(
1163
+ run_id, parent_run_id=parent_run_id, extras=job.extras
1069
1164
  )
1070
-
1071
- result.trace.info("[JOB]: Start self-hosted executor.")
1165
+ context: DictData = {"status": WAIT}
1166
+ trace.info("[JOB]: Start self-hosted executor.")
1072
1167
 
1073
1168
  if event and event.is_set():
1074
- return result.catch(
1169
+ return Result(
1170
+ run_id=run_id,
1171
+ parent_run_id=parent_run_id,
1075
1172
  status=CANCEL,
1076
- context={
1077
- "errors": JobCancelError(
1078
- "Execution was canceled from the event before start "
1079
- "self-hosted execution."
1080
- ).to_dict()
1081
- },
1173
+ context=catch(
1174
+ context,
1175
+ status=CANCEL,
1176
+ updated={
1177
+ "errors": JobCancelError(
1178
+ "Execution was canceled from the event before start "
1179
+ "self-hosted execution."
1180
+ ).to_dict()
1181
+ },
1182
+ ),
1183
+ extras=job.extras,
1082
1184
  )
1083
1185
 
1084
1186
  import requests
@@ -1090,11 +1192,20 @@ def self_hosted_execute(
1090
1192
  data={
1091
1193
  "job": job.model_dump(),
1092
1194
  "params": params,
1093
- "result": result.__dict__,
1195
+ "run_id": parent_run_id,
1196
+ "extras": job.extras,
1094
1197
  },
1095
1198
  )
1096
1199
  except requests.exceptions.RequestException as e:
1097
- return result.catch(status=FAILED, context={"errors": to_dict(e)})
1200
+ return Result(
1201
+ run_id=run_id,
1202
+ parent_run_id=parent_run_id,
1203
+ status=FAILED,
1204
+ context=catch(
1205
+ context, status=FAILED, updated={"errors": to_dict(e)}
1206
+ ),
1207
+ extras=job.extras,
1208
+ )
1098
1209
 
1099
1210
  if resp.status_code != 200:
1100
1211
  raise JobError(
@@ -1102,7 +1213,13 @@ def self_hosted_execute(
1102
1213
  f"{job.runs_on.args.host!r}"
1103
1214
  )
1104
1215
 
1105
- return result.catch(status=SUCCESS)
1216
+ return Result(
1217
+ run_id=run_id,
1218
+ parent_run_id=parent_run_id,
1219
+ status=SUCCESS,
1220
+ context=catch(context, status=SUCCESS),
1221
+ extras=job.extras,
1222
+ )
1106
1223
 
1107
1224
 
1108
1225
  def azure_batch_execute(
@@ -1110,7 +1227,6 @@ def azure_batch_execute(
1110
1227
  params: DictData,
1111
1228
  *,
1112
1229
  run_id: StrOrNone = None,
1113
- parent_run_id: StrOrNone = None,
1114
1230
  event: Optional[Event] = None,
1115
1231
  ) -> Result: # pragma: no cov
1116
1232
  """Azure Batch job execution that will run all job's stages on the Azure
@@ -1134,32 +1250,43 @@ def azure_batch_execute(
1134
1250
  :param job:
1135
1251
  :param params:
1136
1252
  :param run_id:
1137
- :param parent_run_id:
1138
1253
  :param event:
1139
1254
 
1140
1255
  :rtype: Result
1141
1256
  """
1142
- result: Result = Result.construct_with_rs_or_id(
1143
- run_id=run_id,
1144
- parent_run_id=parent_run_id,
1145
- id_logic=(job.id or "EMPTY"),
1146
- extras=job.extras,
1257
+ parent_run_id: StrOrNone = run_id
1258
+ run_id: str = gen_id((job.id or "EMPTY"), unique=True)
1259
+ trace: Trace = get_trace(
1260
+ run_id, parent_run_id=parent_run_id, extras=job.extras
1147
1261
  )
1148
-
1149
- result.trace.info("[JOB]: Start Azure Batch executor.")
1262
+ context: DictData = {"status": WAIT}
1263
+ trace.info("[JOB]: Start Azure Batch executor.")
1150
1264
 
1151
1265
  if event and event.is_set():
1152
- return result.catch(
1266
+ return Result(
1267
+ run_id=run_id,
1268
+ parent_run_id=parent_run_id,
1153
1269
  status=CANCEL,
1154
- context={
1155
- "errors": JobCancelError(
1156
- "Execution was canceled from the event before start "
1157
- "azure-batch execution."
1158
- ).to_dict()
1159
- },
1270
+ context=catch(
1271
+ context,
1272
+ status=CANCEL,
1273
+ updated={
1274
+ "errors": JobCancelError(
1275
+ "Execution was canceled from the event before start "
1276
+ "self-hosted execution."
1277
+ ).to_dict()
1278
+ },
1279
+ ),
1280
+ extras=job.extras,
1160
1281
  )
1161
1282
  print(params)
1162
- return result.catch(status=SUCCESS)
1283
+ return Result(
1284
+ run_id=run_id,
1285
+ parent_run_id=parent_run_id,
1286
+ status=SUCCESS,
1287
+ context=catch(context, status=SUCCESS),
1288
+ extras=job.extras,
1289
+ )
1163
1290
 
1164
1291
 
1165
1292
  def docker_execution(
@@ -1177,24 +1304,36 @@ def docker_execution(
1177
1304
  - Install this workflow package
1178
1305
  - Start push job to run to target Docker container.
1179
1306
  """
1180
- result: Result = Result.construct_with_rs_or_id(
1181
- run_id=run_id,
1182
- parent_run_id=parent_run_id,
1183
- id_logic=(job.id or "EMPTY"),
1184
- extras=job.extras,
1307
+ parent_run_id: StrOrNone = run_id
1308
+ run_id: str = gen_id((job.id or "EMPTY"), unique=True)
1309
+ trace: Trace = get_trace(
1310
+ run_id, parent_run_id=parent_run_id, extras=job.extras
1185
1311
  )
1186
-
1187
- result.trace.info("[JOB]: Start Docker executor.")
1312
+ context: DictData = {"status": WAIT}
1313
+ trace.info("[JOB]: Start Docker executor.")
1188
1314
 
1189
1315
  if event and event.is_set():
1190
- return result.catch(
1316
+ return Result(
1317
+ run_id=run_id,
1318
+ parent_run_id=parent_run_id,
1191
1319
  status=CANCEL,
1192
- context={
1193
- "errors": JobCancelError(
1194
- "Execution was canceled from the event before start "
1195
- "start docker execution."
1196
- ).to_dict()
1197
- },
1320
+ context=catch(
1321
+ context,
1322
+ status=CANCEL,
1323
+ updated={
1324
+ "errors": JobCancelError(
1325
+ "Execution was canceled from the event before start "
1326
+ "self-hosted execution."
1327
+ ).to_dict()
1328
+ },
1329
+ ),
1330
+ extras=job.extras,
1198
1331
  )
1199
1332
  print(params)
1200
- return result.catch(status=SUCCESS)
1333
+ return Result(
1334
+ run_id=run_id,
1335
+ parent_run_id=parent_run_id,
1336
+ status=SUCCESS,
1337
+ context=catch(context, status=SUCCESS),
1338
+ extras=job.extras,
1339
+ )