ddeutil-workflow 0.0.50__py3-none-any.whl → 0.0.52__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/job.py CHANGED
@@ -27,18 +27,18 @@ from threading import Event
27
27
  from typing import Annotated, Any, Literal, Optional, Union
28
28
 
29
29
  from ddeutil.core import freeze_args
30
- from pydantic import BaseModel, ConfigDict, Discriminator, Field, Tag
30
+ from pydantic import BaseModel, Discriminator, Field, SecretStr, Tag
31
31
  from pydantic.functional_validators import field_validator, model_validator
32
32
  from typing_extensions import Self
33
33
 
34
- from .__types import DictData, DictStr, Matrix, TupleStr
34
+ from .__types import DictData, DictStr, Matrix
35
35
  from .exceptions import (
36
36
  JobException,
37
37
  StageException,
38
38
  UtilException,
39
39
  to_dict,
40
40
  )
41
- from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
41
+ from .result import CANCEL, FAILED, SKIP, SUCCESS, WAIT, Result, Status
42
42
  from .reusables import has_template, param2template
43
43
  from .stages import Stage
44
44
  from .utils import cross_product, filter_func, gen_id
@@ -46,21 +46,6 @@ from .utils import cross_product, filter_func, gen_id
46
46
  MatrixFilter = list[dict[str, Union[str, int]]]
47
47
 
48
48
 
49
- __all__: TupleStr = (
50
- "Strategy",
51
- "Job",
52
- "Rule",
53
- "RunsOn",
54
- "RunsOnModel",
55
- "OnLocal",
56
- "OnSelfHosted",
57
- "OnK8s",
58
- "make",
59
- "local_execute_strategy",
60
- "local_execute",
61
- )
62
-
63
-
64
49
  @freeze_args
65
50
  @lru_cache
66
51
  def make(
@@ -120,7 +105,6 @@ def make(
120
105
 
121
106
  add.append(inc)
122
107
 
123
- # NOTE: Merge all matrix together.
124
108
  final.extend(add)
125
109
  return final
126
110
 
@@ -194,7 +178,7 @@ class Strategy(BaseModel):
194
178
 
195
179
 
196
180
  class Rule(str, Enum):
197
- """Trigger rules enum object."""
181
+ """Rule enum object for assign trigger option."""
198
182
 
199
183
  ALL_SUCCESS: str = "all_success"
200
184
  ALL_FAILED: str = "all_failed"
@@ -210,8 +194,8 @@ class RunsOn(str, Enum):
210
194
 
211
195
  LOCAL: str = "local"
212
196
  SELF_HOSTED: str = "self_hosted"
213
- K8S: str = "k8s"
214
197
  AZ_BATCH: str = "azure_batch"
198
+ DOCKER: str = "docker"
215
199
 
216
200
 
217
201
  class BaseRunsOn(BaseModel): # pragma: no cov
@@ -219,46 +203,87 @@ class BaseRunsOn(BaseModel): # pragma: no cov
219
203
  object and override execute method.
220
204
  """
221
205
 
222
- model_config = ConfigDict(use_enum_values=True)
223
-
224
- type: Literal[RunsOn.LOCAL]
206
+ type: RunsOn = Field(description="A runs-on type.")
225
207
  args: DictData = Field(
226
208
  default_factory=dict,
227
209
  alias="with",
210
+ description=(
211
+ "An argument that pass to the runs-on execution function. This "
212
+ "args will override by this child-model with specific args model."
213
+ ),
228
214
  )
229
215
 
230
216
 
231
217
  class OnLocal(BaseRunsOn): # pragma: no cov
232
218
  """Runs-on local."""
233
219
 
234
- type: Literal[RunsOn.LOCAL] = Field(default=RunsOn.LOCAL)
220
+ type: Literal[RunsOn.LOCAL] = Field(
221
+ default=RunsOn.LOCAL, validate_default=True
222
+ )
235
223
 
236
224
 
237
225
  class SelfHostedArgs(BaseModel):
238
- host: str
226
+ """Self-Hosted arguments."""
227
+
228
+ host: str = Field(description="A host URL of the target self-hosted.")
239
229
 
240
230
 
241
231
  class OnSelfHosted(BaseRunsOn): # pragma: no cov
242
232
  """Runs-on self-hosted."""
243
233
 
244
- type: Literal[RunsOn.SELF_HOSTED] = Field(default=RunsOn.SELF_HOSTED)
234
+ type: Literal[RunsOn.SELF_HOSTED] = Field(
235
+ default=RunsOn.SELF_HOSTED, validate_default=True
236
+ )
245
237
  args: SelfHostedArgs = Field(alias="with")
246
238
 
247
239
 
248
- class OnK8s(BaseRunsOn): # pragma: no cov
249
- """Runs-on Kubernetes."""
240
+ class AzBatchArgs(BaseModel):
241
+ batch_account_name: str
242
+ batch_account_key: SecretStr
243
+ batch_account_url: str
244
+ storage_account_name: str
245
+ storage_account_key: SecretStr
250
246
 
251
- type: Literal[RunsOn.K8S] = Field(default=RunsOn.K8S)
252
247
 
248
+ class OnAzBatch(BaseRunsOn): # pragma: no cov
253
249
 
254
- def get_discriminator_runs_on(model: dict[str, Any]) -> str:
255
- return model.get("type", "local")
250
+ type: Literal[RunsOn.AZ_BATCH] = Field(
251
+ default=RunsOn.AZ_BATCH, validate_default=True
252
+ )
253
+ args: AzBatchArgs = Field(alias="with")
254
+
255
+
256
+ class DockerArgs(BaseModel):
257
+ image: str = Field(
258
+ default="ubuntu-latest",
259
+ description=(
260
+ "An image that want to run like `ubuntu-22.04`, `windows-latest`, "
261
+ ", `ubuntu-24.04-arm`, or `macos-14`"
262
+ ),
263
+ )
264
+ env: DictData = Field(default_factory=dict)
265
+ volume: DictData = Field(default_factory=dict)
266
+
267
+
268
+ class OnDocker(BaseRunsOn): # pragma: no cov
269
+ """Runs-on Docker container."""
270
+
271
+ type: Literal[RunsOn.DOCKER] = Field(
272
+ default=RunsOn.DOCKER, validate_default=True
273
+ )
274
+ args: DockerArgs = Field(alias="with", default_factory=DockerArgs)
275
+
276
+
277
+ def get_discriminator_runs_on(model: dict[str, Any]) -> RunsOn:
278
+ """Get discriminator of the RunsOn models."""
279
+ t = model.get("type")
280
+ return RunsOn(t) if t else RunsOn.LOCAL
256
281
 
257
282
 
258
283
  RunsOnModel = Annotated[
259
284
  Union[
260
- Annotated[OnK8s, Tag(RunsOn.K8S)],
261
285
  Annotated[OnSelfHosted, Tag(RunsOn.SELF_HOSTED)],
286
+ Annotated[OnDocker, Tag(RunsOn.DOCKER)],
262
287
  Annotated[OnLocal, Tag(RunsOn.LOCAL)],
263
288
  ],
264
289
  Discriminator(get_discriminator_runs_on),
@@ -288,7 +313,6 @@ class Job(BaseModel):
288
313
  ... "name": "Some stage",
289
314
  ... "run": "print('Hello World')",
290
315
  ... },
291
- ... ...
292
316
  ... ],
293
317
  ... }
294
318
  """
@@ -319,6 +343,7 @@ class Job(BaseModel):
319
343
  )
320
344
  trigger_rule: Rule = Field(
321
345
  default=Rule.ALL_SUCCESS,
346
+ validate_default=True,
322
347
  description=(
323
348
  "A trigger rule of tracking needed jobs if feature will use when "
324
349
  "the `raise_error` did not set from job and stage executions."
@@ -327,7 +352,7 @@ class Job(BaseModel):
327
352
  )
328
353
  needs: list[str] = Field(
329
354
  default_factory=list,
330
- description="A list of the job ID that want to run before this job.",
355
+ description="A list of the job that want to run before this job model.",
331
356
  )
332
357
  strategy: Strategy = Field(
333
358
  default_factory=Strategy,
@@ -359,7 +384,7 @@ class Job(BaseModel):
359
384
  name: str = stage.iden
360
385
  if name in rs:
361
386
  raise ValueError(
362
- "Stage name in jobs object should not be duplicate."
387
+ f"Stage name, {name!r}, should not be duplicate."
363
388
  )
364
389
  rs.append(name)
365
390
  return value
@@ -372,7 +397,9 @@ class Job(BaseModel):
372
397
  """
373
398
  # VALIDATE: Validate job id should not dynamic with params template.
374
399
  if has_template(self.id):
375
- raise ValueError("Job ID should not has any template.")
400
+ raise ValueError(
401
+ f"Job ID, {self.id!r}, should not has any template."
402
+ )
376
403
 
377
404
  return self
378
405
 
@@ -390,7 +417,7 @@ class Job(BaseModel):
390
417
  if self.extras:
391
418
  stage.extras = self.extras
392
419
  return stage
393
- raise ValueError(f"Stage ID {stage_id} does not exists")
420
+ raise ValueError(f"Stage {stage_id!r} does not exists in this job.")
394
421
 
395
422
  def check_needs(
396
423
  self,
@@ -497,14 +524,14 @@ class Job(BaseModel):
497
524
  ... (i) output: {'strategy-01': bar, 'strategy-02': bar}
498
525
  ... (ii) to: {'jobs': {}}
499
526
 
500
- The result of the `to` variable will be;
527
+ The result of the `to` argument will be;
501
528
 
502
529
  ... (iii) to: {
503
530
  'jobs': {
504
531
  '<job-id>': {
505
532
  'strategies': {
506
533
  'strategy-01': bar,
507
- 'strategy-02': bar
534
+ 'strategy-02': bar,
508
535
  }
509
536
  }
510
537
  }
@@ -527,22 +554,27 @@ class Job(BaseModel):
527
554
  "This job do not set the ID before setting execution output."
528
555
  )
529
556
 
530
- # NOTE: If the job ID did not set, it will use index of jobs key
531
- # instead.
532
557
  _id: str = self.id or job_id
533
-
534
558
  errors: DictData = (
535
559
  {"errors": output.pop("errors", {})} if "errors" in output else {}
536
560
  )
561
+ skipping: dict[str, bool] = (
562
+ {"skipped": output.pop("skipped", False)}
563
+ if "skipped" in output
564
+ else {}
565
+ )
537
566
 
538
- if "SKIP" in output: # pragma: no cov
539
- to["jobs"][_id] = output["SKIP"]
540
- elif self.strategy.is_set():
541
- to["jobs"][_id] = {"strategies": output, **errors}
567
+ if self.strategy.is_set():
568
+ to["jobs"][_id] = {"strategies": output, **skipping, **errors}
569
+ elif len(k := output.keys()) > 1: # pragma: no cov
570
+ raise JobException(
571
+ "Strategy output from execution return more than one ID while "
572
+ "this job does not set strategy."
573
+ )
542
574
  else:
543
- _output = output.get(next(iter(output), "FIRST"), {})
575
+ _output: DictData = {} if len(k) == 0 else output[list(k)[0]]
544
576
  _output.pop("matrix", {})
545
- to["jobs"][_id] = {**_output, **errors}
577
+ to["jobs"][_id] = {**_output, **skipping, **errors}
546
578
  return to
547
579
 
548
580
  def execute(
@@ -559,6 +591,9 @@ class Job(BaseModel):
559
591
  execution. It will generate matrix values at the first step and run
560
592
  multithread on this metrics to the `stages` field of this job.
561
593
 
594
+ This method be execution routing for call dynamic execution function
595
+ with specific target `runs-on` value.
596
+
562
597
  :param params: An input parameters that use on job execution.
563
598
  :param run_id: (str) A job running ID.
564
599
  :param parent_run_id: (str) A parent workflow running ID.
@@ -566,8 +601,10 @@ class Job(BaseModel):
566
601
  data.
567
602
  :param event: (Event) An event manager that pass to the
568
603
  PoolThreadExecutor.
569
- :param raise_error: (bool) A flag that all this method raise error to the
570
- strategy execution. Default is `True`.
604
+ :param raise_error: (bool) A flag that all this method raise error to
605
+ the strategy execution. Default is `True`.
606
+
607
+ :raise NotImplementedError: If the `runs-on` value does not implement.
571
608
 
572
609
  :rtype: Result
573
610
  """
@@ -579,28 +616,36 @@ class Job(BaseModel):
579
616
  extras=self.extras,
580
617
  )
581
618
 
582
- result.trace.info(f"[JOB]: Start execute job: {self.id!r}")
619
+ result.trace.info(
620
+ f"[JOB]: Execute: {self.id!r} on {self.runs_on.type.value!r}"
621
+ )
583
622
  if self.runs_on.type == RunsOn.LOCAL:
584
623
  return local_execute(
585
- job=self,
586
- params=params,
587
- result=result,
624
+ self,
625
+ params,
626
+ run_id=run_id,
627
+ parent_run_id=parent_run_id,
588
628
  event=event,
589
629
  raise_error=raise_error,
590
630
  )
591
631
  elif self.runs_on.type == RunsOn.SELF_HOSTED: # pragma: no cov
592
632
  pass
593
- elif self.runs_on.type == RunsOn.K8S: # pragma: no cov
594
- pass
633
+ elif self.runs_on.type == RunsOn.DOCKER: # pragma: no cov
634
+ docker_execution(
635
+ self,
636
+ params,
637
+ run_id=run_id,
638
+ parent_run_id=parent_run_id,
639
+ event=event,
640
+ raise_error=raise_error,
641
+ )
595
642
 
596
643
  # pragma: no cov
597
644
  result.trace.error(
598
- f"[JOB]: Job executor does not support for runs-on type: "
599
- f"{self.runs_on.type} yet"
645
+ f"[JOB]: Execution not support runs-on: {self.runs_on.type!r} yet."
600
646
  )
601
647
  raise NotImplementedError(
602
- f"The job runs-on other type: {self.runs_on.type} does not "
603
- f"support yet."
648
+ f"Execution runs-on type: {self.runs_on.type} does not support yet."
604
649
  )
605
650
 
606
651
 
@@ -622,6 +667,8 @@ def local_execute_strategy(
622
667
 
623
668
  The result of this execution will return result with strategy ID
624
669
  that generated from the `gen_id` function with an input strategy value.
670
+ For each stage that execution with this strategy metrix, it will use the
671
+ `set_outputs` method for reconstruct result context data.
625
672
 
626
673
  :raise JobException: If it has any error from `StageException` or
627
674
  `UtilException`.
@@ -645,8 +692,10 @@ def local_execute_strategy(
645
692
  context.update({"matrix": strategy, "stages": {}})
646
693
 
647
694
  if strategy:
648
- result.trace.info(f"[JOB]: Execute Strategy ID: {strategy_id}")
649
- result.trace.info(f"[JOB]: ... Matrix: {strategy_id}")
695
+ result.trace.info(f"[JOB]: Execute Strategy: {strategy_id!r}")
696
+ result.trace.info(f"[JOB]: ... matrix: {strategy!r}")
697
+ else:
698
+ result.trace.info("[JOB]: Execute Empty-Strategy")
650
699
 
651
700
  for stage in job.stages:
652
701
 
@@ -654,7 +703,7 @@ def local_execute_strategy(
654
703
  stage.extras = job.extras
655
704
 
656
705
  if stage.is_skipped(params=context):
657
- result.trace.info(f"[STAGE]: Skip stage: {stage.iden!r}")
706
+ result.trace.info(f"[JOB]: Skip Stage: {stage.iden!r}")
658
707
  stage.set_outputs(output={"skipped": True}, to=context)
659
708
  continue
660
709
 
@@ -664,7 +713,7 @@ def local_execute_strategy(
664
713
  "strategy execution."
665
714
  )
666
715
  return result.catch(
667
- status=FAILED,
716
+ status=CANCEL,
668
717
  context={
669
718
  strategy_id: {
670
719
  "matrix": strategy,
@@ -675,6 +724,7 @@ def local_execute_strategy(
675
724
  )
676
725
 
677
726
  try:
727
+ result.trace.info(f"[JOB]: Execute Stage: {stage.iden!r}")
678
728
  rs: Result = stage.handler_execute(
679
729
  params=context,
680
730
  run_id=result.run_id,
@@ -682,22 +732,6 @@ def local_execute_strategy(
682
732
  event=event,
683
733
  )
684
734
  stage.set_outputs(rs.context, to=context)
685
- if rs.status == FAILED:
686
- error_msg: str = (
687
- f"Job strategy was break because it has a stage, "
688
- f"{stage.iden}, failed without raise error."
689
- )
690
- return result.catch(
691
- status=FAILED,
692
- context={
693
- strategy_id: {
694
- "matrix": strategy,
695
- "stages": filter_func(context.pop("stages", {})),
696
- "errors": JobException(error_msg).to_dict(),
697
- },
698
- },
699
- )
700
-
701
735
  except (StageException, UtilException) as e:
702
736
  result.trace.error(f"[JOB]: {e.__class__.__name__}: {e}")
703
737
  if raise_error:
@@ -716,6 +750,22 @@ def local_execute_strategy(
716
750
  },
717
751
  )
718
752
 
753
+ if rs.status == FAILED:
754
+ error_msg: str = (
755
+ f"Job strategy was break because stage, {stage.iden}, "
756
+ f"failed without raise error."
757
+ )
758
+ return result.catch(
759
+ status=FAILED,
760
+ context={
761
+ strategy_id: {
762
+ "matrix": strategy,
763
+ "stages": filter_func(context.pop("stages", {})),
764
+ "errors": JobException(error_msg).to_dict(),
765
+ },
766
+ },
767
+ )
768
+
719
769
  return result.catch(
720
770
  status=SUCCESS,
721
771
  context={
@@ -733,7 +783,6 @@ def local_execute(
733
783
  *,
734
784
  run_id: str | None = None,
735
785
  parent_run_id: str | None = None,
736
- result: Result | None = None,
737
786
  event: Event | None = None,
738
787
  raise_error: bool = True,
739
788
  ) -> Result:
@@ -748,8 +797,6 @@ def local_execute(
748
797
  :param params: (DictData) An input parameters that use on job execution.
749
798
  :param run_id: (str) A job running ID for this execution.
750
799
  :param parent_run_id: (str) A parent workflow running ID for this release.
751
- :param result: (Result) A result object for keeping context and status
752
- data.
753
800
  :param event: (Event) An event manager that pass to the PoolThreadExecutor.
754
801
  :param raise_error: (bool) A flag that all this method raise error to the
755
802
  strategy execution. Default is `True`.
@@ -757,12 +804,12 @@ def local_execute(
757
804
  :rtype: Result
758
805
  """
759
806
  result: Result = Result.construct_with_rs_or_id(
760
- result,
761
807
  run_id=run_id,
762
808
  parent_run_id=parent_run_id,
763
809
  id_logic=(job.id or "not-set"),
764
810
  extras=job.extras,
765
811
  )
812
+
766
813
  event: Event = Event() if event is None else event
767
814
 
768
815
  # NOTE: Normal Job execution without parallel strategy matrix. It uses
@@ -773,7 +820,7 @@ def local_execute(
773
820
 
774
821
  if event and event.is_set(): # pragma: no cov
775
822
  return result.catch(
776
- status=FAILED,
823
+ status=CANCEL,
777
824
  context={
778
825
  "errors": JobException(
779
826
  "Job strategy was canceled from event that had set "
@@ -791,7 +838,7 @@ def local_execute(
791
838
  raise_error=raise_error,
792
839
  )
793
840
 
794
- return result.catch(status=result.status)
841
+ return result
795
842
 
796
843
  fail_fast_flag: bool = job.strategy.fail_fast
797
844
  ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
@@ -802,7 +849,7 @@ def local_execute(
802
849
 
803
850
  if event and event.is_set(): # pragma: no cov
804
851
  return result.catch(
805
- status=FAILED,
852
+ status=CANCEL,
806
853
  context={
807
854
  "errors": JobException(
808
855
  "Job strategy was canceled from event that had set "
@@ -841,7 +888,7 @@ def local_execute(
841
888
 
842
889
  if len(done) != len(futures):
843
890
  result.trace.warning(
844
- "[JOB]: Set the event for stop running stage."
891
+ "[JOB]: Set event for stop pending stage future."
845
892
  )
846
893
  event.set()
847
894
  for future in not_done:
@@ -850,7 +897,7 @@ def local_execute(
850
897
  nd: str = (
851
898
  f", the strategies do not run is {not_done}" if not_done else ""
852
899
  )
853
- result.trace.debug(f"[JOB]: Strategy is set Fail Fast{nd}")
900
+ result.trace.debug(f"[JOB]: Strategy set Fail-Fast{nd}")
854
901
 
855
902
  for future in done:
856
903
  try:
@@ -871,7 +918,6 @@ def self_hosted_execute(
871
918
  *,
872
919
  run_id: str | None = None,
873
920
  parent_run_id: str | None = None,
874
- result: Result | None = None,
875
921
  event: Event | None = None,
876
922
  raise_error: bool = True,
877
923
  ) -> Result: # pragma: no cov
@@ -883,8 +929,6 @@ def self_hosted_execute(
883
929
  :param params: (DictData) An input parameters that use on job execution.
884
930
  :param run_id: (str) A job running ID for this execution.
885
931
  :param parent_run_id: (str) A parent workflow running ID for this release.
886
- :param result: (Result) A result object for keeping context and status
887
- data.
888
932
  :param event: (Event) An event manager that pass to the PoolThreadExecutor.
889
933
  :param raise_error: (bool) A flag that all this method raise error to the
890
934
  strategy execution.
@@ -892,7 +936,6 @@ def self_hosted_execute(
892
936
  :rtype: Result
893
937
  """
894
938
  result: Result = Result.construct_with_rs_or_id(
895
- result,
896
939
  run_id=run_id,
897
940
  parent_run_id=parent_run_id,
898
941
  id_logic=(job.id or "not-set"),
@@ -901,7 +944,7 @@ def self_hosted_execute(
901
944
 
902
945
  if event and event.is_set():
903
946
  return result.catch(
904
- status=FAILED,
947
+ status=CANCEL,
905
948
  context={
906
949
  "errors": JobException(
907
950
  "Job self-hosted execution was canceled from event that "
@@ -943,7 +986,6 @@ def azure_batch_execute(
943
986
  *,
944
987
  run_id: str | None = None,
945
988
  parent_run_id: str | None = None,
946
- result: Result | None = None,
947
989
  event: Event | None = None,
948
990
  raise_error: bool | None = None,
949
991
  ) -> Result: # pragma no cov
@@ -962,17 +1004,19 @@ def azure_batch_execute(
962
1004
  the compute node before running the script.
963
1005
  - Monitor the job and retrieve the output files from Azure Storage.
964
1006
 
1007
+ References:
1008
+ - https://docs.azure.cn/en-us/batch/tutorial-parallel-python
1009
+
965
1010
  :param job:
966
1011
  :param params:
967
1012
  :param run_id:
968
1013
  :param parent_run_id:
969
- :param result:
970
1014
  :param event:
971
1015
  :param raise_error:
972
- :return:
1016
+
1017
+ :rtype: Result
973
1018
  """
974
1019
  result: Result = Result.construct_with_rs_or_id(
975
- result,
976
1020
  run_id=run_id,
977
1021
  parent_run_id=parent_run_id,
978
1022
  id_logic=(job.id or "not-set"),
@@ -980,7 +1024,7 @@ def azure_batch_execute(
980
1024
  )
981
1025
  if event and event.is_set():
982
1026
  return result.catch(
983
- status=FAILED,
1027
+ status=CANCEL,
984
1028
  context={
985
1029
  "errors": JobException(
986
1030
  "Job azure-batch execution was canceled from event that "
@@ -991,3 +1035,40 @@ def azure_batch_execute(
991
1035
  print(params)
992
1036
  print(raise_error)
993
1037
  return result.catch(status=SUCCESS)
1038
+
1039
+
1040
+ def docker_execution(
1041
+ job: Job,
1042
+ params: DictData,
1043
+ *,
1044
+ run_id: str | None = None,
1045
+ parent_run_id: str | None = None,
1046
+ event: Event | None = None,
1047
+ raise_error: bool | None = None,
1048
+ ):
1049
+ """Docker job execution.
1050
+
1051
+ Steps:
1052
+ - Pull the image
1053
+ - Install this workflow package
1054
+ - Start push job to run to target Docker container.
1055
+ """
1056
+ result: Result = Result.construct_with_rs_or_id(
1057
+ run_id=run_id,
1058
+ parent_run_id=parent_run_id,
1059
+ id_logic=(job.id or "not-set"),
1060
+ extras=job.extras,
1061
+ )
1062
+ if event and event.is_set():
1063
+ return result.catch(
1064
+ status=CANCEL,
1065
+ context={
1066
+ "errors": JobException(
1067
+ "Job Docker execution was canceled from event that "
1068
+ "had set before start execution."
1069
+ ).to_dict()
1070
+ },
1071
+ )
1072
+ print(params)
1073
+ print(raise_error)
1074
+ return result.catch(status=SUCCESS)