ddeutil-workflow 0.0.41__py3-none-any.whl → 0.0.43__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.41"
1
+ __version__: str = "0.0.43"
@@ -4,7 +4,7 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
- from .__types import Re
7
+ from .__types import DictData, DictStr, Matrix, Re, TupleStr
8
8
  from .conf import (
9
9
  Config,
10
10
  Loader,
@@ -47,6 +47,10 @@ from .params import (
47
47
  StrParam,
48
48
  )
49
49
  from .result import (
50
+ FAILED,
51
+ SKIP,
52
+ SUCCESS,
53
+ WAIT,
50
54
  Result,
51
55
  Status,
52
56
  )
@@ -20,7 +20,7 @@ from fastapi.middleware.gzip import GZipMiddleware
20
20
  from fastapi.responses import UJSONResponse
21
21
 
22
22
  from ..__about__ import __version__
23
- from ..conf import config, get_logger
23
+ from ..conf import api_config, config, get_logger
24
24
  from ..scheduler import ReleaseThread, ReleaseThreads
25
25
  from ..workflow import ReleaseQueue, WorkflowTask
26
26
  from .repeat import repeat_at
@@ -96,24 +96,24 @@ async def health():
96
96
 
97
97
 
98
98
  # NOTE Add the jobs and logs routes by default.
99
- app.include_router(job, prefix=config.prefix_path)
100
- app.include_router(log, prefix=config.prefix_path)
99
+ app.include_router(job, prefix=api_config.prefix_path)
100
+ app.include_router(log, prefix=api_config.prefix_path)
101
101
 
102
102
 
103
103
  # NOTE: Enable the workflows route.
104
- if config.enable_route_workflow:
104
+ if api_config.enable_route_workflow:
105
105
  from .routes import workflow
106
106
 
107
- app.include_router(workflow, prefix=config.prefix_path)
107
+ app.include_router(workflow, prefix=api_config.prefix_path)
108
108
 
109
109
 
110
110
  # NOTE: Enable the schedules route.
111
- if config.enable_route_schedule:
111
+ if api_config.enable_route_schedule:
112
112
  from ..logs import get_audit
113
113
  from ..scheduler import schedule_task
114
114
  from .routes import schedule
115
115
 
116
- app.include_router(schedule, prefix=config.prefix_path)
116
+ app.include_router(schedule, prefix=api_config.prefix_path)
117
117
 
118
118
  @schedule.on_event("startup")
119
119
  @repeat_at(cron="* * * * *", delay=2)
@@ -28,7 +28,7 @@ schedule_route = APIRouter(
28
28
  async def get_schedules(name: str):
29
29
  """Get schedule object."""
30
30
  try:
31
- schedule: Schedule = Schedule.from_loader(name=name, externals={})
31
+ schedule: Schedule = Schedule.from_conf(name=name, extras={})
32
32
  except ValueError:
33
33
  raise HTTPException(
34
34
  status_code=st.HTTP_404_NOT_FOUND,
@@ -51,7 +51,7 @@ async def get_deploy_schedulers(request: Request):
51
51
  @schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
52
52
  async def get_deploy_scheduler(request: Request, name: str):
53
53
  if name in request.state.scheduler:
54
- schedule = Schedule.from_loader(name)
54
+ schedule = Schedule.from_conf(name)
55
55
  getter: list[dict[str, dict[str, list[datetime]]]] = []
56
56
  for workflow in schedule.workflows:
57
57
  getter.append(
@@ -94,7 +94,7 @@ async def add_deploy_scheduler(request: Request, name: str):
94
94
 
95
95
  # NOTE: Create a pair of workflow and on from schedule model.
96
96
  try:
97
- schedule: Schedule = Schedule.from_loader(name)
97
+ schedule: Schedule = Schedule.from_conf(name)
98
98
  except ValueError as err:
99
99
  request.state.scheduler.remove(name)
100
100
  logger.exception(err)
@@ -107,7 +107,7 @@ async def add_deploy_scheduler(request: Request, name: str):
107
107
  schedule.tasks(
108
108
  start_date_waiting,
109
109
  queue=request.state.workflow_queue,
110
- externals={},
110
+ extras={},
111
111
  ),
112
112
  )
113
113
  return {
@@ -124,7 +124,7 @@ async def del_deploy_scheduler(request: Request, name: str):
124
124
  # NOTE: Remove current schedule name from the state.
125
125
  request.state.scheduler.remove(name)
126
126
 
127
- schedule: Schedule = Schedule.from_loader(name)
127
+ schedule: Schedule = Schedule.from_conf(name)
128
128
 
129
129
  for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
130
130
  if task in request.state.workflow_tasks:
@@ -44,7 +44,7 @@ async def get_workflows() -> DictData:
44
44
  async def get_workflow_by_name(name: str) -> DictData:
45
45
  """Return model of workflow that passing an input workflow name."""
46
46
  try:
47
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
47
+ workflow: Workflow = Workflow.from_conf(name=name, extras={})
48
48
  except ValueError as err:
49
49
  logger.exception(err)
50
50
  raise HTTPException(
@@ -69,7 +69,7 @@ class ExecutePayload(BaseModel):
69
69
  async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
70
70
  """Return model of workflow that passing an input workflow name."""
71
71
  try:
72
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
72
+ workflow: Workflow = Workflow.from_conf(name=name, extras={})
73
73
  except ValueError:
74
74
  raise HTTPException(
75
75
  status_code=st.HTTP_404_NOT_FOUND,
ddeutil/workflow/conf.py CHANGED
@@ -12,6 +12,7 @@ from collections.abc import Iterator
12
12
  from datetime import timedelta
13
13
  from functools import cached_property, lru_cache
14
14
  from pathlib import Path
15
+ from typing import Optional, TypeVar
15
16
  from zoneinfo import ZoneInfo
16
17
 
17
18
  from ddeutil.core import str2bool
@@ -20,6 +21,7 @@ from ddeutil.io.paths import glob_files, is_ignored, read_ignore
20
21
 
21
22
  from .__types import DictData, TupleStr
22
23
 
24
+ T = TypeVar("T")
23
25
  PREFIX: str = "WORKFLOW"
24
26
 
25
27
 
@@ -29,12 +31,14 @@ def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
31
 
30
32
 
31
33
  __all__: TupleStr = (
34
+ "api_config",
32
35
  "env",
33
36
  "get_logger",
34
37
  "Config",
35
38
  "SimLoad",
36
39
  "Loader",
37
40
  "config",
41
+ "dynamic",
38
42
  )
39
43
 
40
44
 
@@ -99,7 +103,7 @@ class Config(BaseConfig): # pragma: no cov
99
103
 
100
104
  :rtype: list[str]
101
105
  """
102
- regis_call_str: str = env("CORE_REGISTRY", ".")
106
+ regis_call_str: str = env("CORE_REGISTRY_CALLER", ".")
103
107
  return [r.strip() for r in regis_call_str.split(",")]
104
108
 
105
109
  @property
@@ -116,7 +120,7 @@ class Config(BaseConfig): # pragma: no cov
116
120
  # NOTE: Log
117
121
  @property
118
122
  def log_path(self) -> Path:
119
- return Path(env("LOG_PATH", "./logs"))
123
+ return Path(env("LOG_TRACE_PATH", "./logs"))
120
124
 
121
125
  @property
122
126
  def debug(self) -> bool:
@@ -145,16 +149,15 @@ class Config(BaseConfig): # pragma: no cov
145
149
 
146
150
  @property
147
151
  def enable_write_log(self) -> bool:
148
- return str2bool(env("LOG_ENABLE_WRITE", "false"))
152
+ return str2bool(env("LOG_TRACE_ENABLE_WRITE", "false"))
149
153
 
150
- # NOTE: Audit Log
151
154
  @property
152
155
  def audit_path(self) -> Path:
153
- return Path(env("AUDIT_PATH", "./audits"))
156
+ return Path(env("LOG_AUDIT_PATH", "./audits"))
154
157
 
155
158
  @property
156
159
  def enable_write_audit(self) -> bool:
157
- return str2bool(env("AUDIT_ENABLE_WRITE", "false"))
160
+ return str2bool(env("LOG_AUDIT_ENABLE_WRITE", "false"))
158
161
 
159
162
  @property
160
163
  def log_datetime_format(self) -> str:
@@ -178,29 +181,12 @@ class Config(BaseConfig): # pragma: no cov
178
181
  def job_default_id(self) -> bool:
179
182
  return str2bool(env("CORE_JOB_DEFAULT_ID", "false"))
180
183
 
181
- # NOTE: Workflow
182
- @property
183
- def max_job_parallel(self) -> int:
184
- max_job_parallel = int(env("CORE_MAX_JOB_PARALLEL", "2"))
185
-
186
- # VALIDATE: the MAX_JOB_PARALLEL value should not less than 0.
187
- if max_job_parallel < 0:
188
- raise ValueError(
189
- f"``WORKFLOW_MAX_JOB_PARALLEL`` should more than 0 but got "
190
- f"{max_job_parallel}."
191
- )
192
- return max_job_parallel
193
-
194
- @property
195
- def max_job_exec_timeout(self) -> int:
196
- return int(env("CORE_MAX_JOB_EXEC_TIMEOUT", "600"))
197
-
198
- @property
199
- def max_poking_pool_worker(self) -> int:
200
- return int(env("CORE_MAX_NUM_POKING", "4"))
201
-
202
184
  @property
203
185
  def max_on_per_workflow(self) -> int:
186
+ """The maximum on value that store in workflow model.
187
+
188
+ :rtype: int
189
+ """
204
190
  return int(env("CORE_MAX_CRON_PER_WORKFLOW", "5"))
205
191
 
206
192
  @property
@@ -229,7 +215,9 @@ class Config(BaseConfig): # pragma: no cov
229
215
  f"timedelta with {stop_boundary_delta_str}."
230
216
  ) from err
231
217
 
232
- # NOTE: API
218
+
219
+ class APIConfig:
220
+
233
221
  @property
234
222
  def prefix_path(self) -> str:
235
223
  return env("API_PREFIX_PATH", "/api/v1")
@@ -389,6 +377,29 @@ class Loader(SimLoad):
389
377
 
390
378
 
391
379
  config: Config = Config()
380
+ api_config: APIConfig = APIConfig()
381
+
382
+
383
+ def dynamic(
384
+ key: Optional[str] = None,
385
+ *,
386
+ f: Optional[T] = None,
387
+ extras: Optional[DictData] = None,
388
+ ) -> Optional[T]:
389
+ """Dynamic get config if extra value was passed at run-time.
390
+
391
+ :param key: (str) A config key that get from Config object.
392
+ :param f: An inner config function scope.
393
+ :param extras: An extra values that pass at run-time.
394
+ """
395
+ rsx: Optional[T] = extras[key] if extras and key in extras else None
396
+ rs: Optional[T] = f or getattr(config, key, None)
397
+ if rsx is not None and not isinstance(rsx, type(rs)):
398
+ raise TypeError(
399
+ f"Type of config {key!r} from extras: {rsx!r} does not valid "
400
+ f"as config {type(rs)}."
401
+ )
402
+ return rsx or rs
392
403
 
393
404
 
394
405
  @lru_cache
ddeutil/workflow/cron.py CHANGED
@@ -72,17 +72,16 @@ class On(BaseModel):
72
72
 
73
73
  model_config = ConfigDict(arbitrary_types_allowed=True)
74
74
 
75
- # NOTE: This is fields of the base schedule.
76
75
  extras: Annotated[
77
76
  DictData,
78
77
  Field(
79
78
  default_factory=dict,
80
- description="An extras mapping parameters",
79
+ description="An extras mapping parameters.",
81
80
  ),
82
81
  ]
83
82
  cronjob: Annotated[
84
83
  CronJob,
85
- Field(description="Cron job of this schedule"),
84
+ Field(description="A Cronjob object of this schedule."),
86
85
  ]
87
86
  tz: Annotated[
88
87
  str,
@@ -93,12 +92,12 @@ class On(BaseModel):
93
92
  ] = "Etc/UTC"
94
93
 
95
94
  @classmethod
96
- def from_value(cls, value: DictStr, externals: DictData) -> Self:
95
+ def from_value(cls, value: DictStr, extras: DictData) -> Self:
97
96
  """Constructor from values that will generate crontab by function.
98
97
 
99
98
  :param value: A mapping value that will generate crontab before create
100
99
  schedule model.
101
- :param externals: An extras external parameter that will keep in extras.
100
+ :param extras: An extras parameter that will keep in extras.
102
101
  """
103
102
  passing: DictStr = {}
104
103
  if "timezone" in value:
@@ -106,22 +105,22 @@ class On(BaseModel):
106
105
  passing["cronjob"] = interval2crontab(
107
106
  **{v: value[v] for v in value if v in ("interval", "day", "time")}
108
107
  )
109
- return cls(extras=externals | passing.pop("extras", {}), **passing)
108
+ return cls(extras=extras | passing.pop("extras", {}), **passing)
110
109
 
111
110
  @classmethod
112
- def from_loader(
111
+ def from_conf(
113
112
  cls,
114
113
  name: str,
115
- externals: DictData | None = None,
114
+ extras: DictData | None = None,
116
115
  ) -> Self:
117
116
  """Constructor from the name of config that will use loader object for
118
117
  getting the data.
119
118
 
120
119
  :param name: A name of config that will get from loader.
121
- :param externals: An extras external parameter that will keep in extras.
120
+ :param extras: An extra parameter that will keep in extras.
122
121
  """
123
- externals: DictData = externals or {}
124
- loader: Loader = Loader(name, externals=externals)
122
+ extras: DictData = extras or {}
123
+ loader: Loader = Loader(name, externals=extras)
125
124
 
126
125
  # NOTE: Validate the config type match with current connection model
127
126
  if loader.type != cls.__name__:
@@ -138,7 +137,7 @@ class On(BaseModel):
138
137
  if v in ("interval", "day", "time")
139
138
  }
140
139
  ),
141
- extras=externals | loader_data.pop("extras", {}),
140
+ extras=extras | loader_data.pop("extras", {}),
142
141
  **loader_data,
143
142
  )
144
143
  )
@@ -149,7 +148,7 @@ class On(BaseModel):
149
148
  return cls.model_validate(
150
149
  obj=dict(
151
150
  cronjob=loader_data.pop("cronjob"),
152
- extras=externals | loader_data.pop("extras", {}),
151
+ extras=extras | loader_data.pop("extras", {}),
153
152
  **loader_data,
154
153
  )
155
154
  )
@@ -9,10 +9,20 @@ annotate for handle error only.
9
9
  """
10
10
  from __future__ import annotations
11
11
 
12
- from typing import Any
12
+ from typing import TypedDict
13
13
 
14
+ ErrorData = TypedDict(
15
+ "ErrorData",
16
+ {
17
+ "class": Exception,
18
+ "name": str,
19
+ "message": str,
20
+ },
21
+ )
14
22
 
15
- def to_dict(exception: Exception) -> dict[str, Any]: # pragma: no cov
23
+
24
+ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
25
+ """Create dict data from exception instance."""
16
26
  return {
17
27
  "class": exception,
18
28
  "name": exception.__class__.__name__,
@@ -22,7 +32,7 @@ def to_dict(exception: Exception) -> dict[str, Any]: # pragma: no cov
22
32
 
23
33
  class BaseWorkflowException(Exception):
24
34
 
25
- def to_dict(self) -> dict[str, Any]:
35
+ def to_dict(self) -> ErrorData:
26
36
  return to_dict(self)
27
37
 
28
38
 
ddeutil/workflow/job.py CHANGED
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ # [x] Use dynamic config
6
7
  """Job Model that use for keeping stages and node that running its stages.
7
8
  The job handle the lineage of stages and location of execution of stages that
8
9
  mean the job model able to define `runs-on` key that allow you to run this
@@ -32,13 +33,13 @@ from pydantic.functional_validators import field_validator, model_validator
32
33
  from typing_extensions import Self
33
34
 
34
35
  from .__types import DictData, DictStr, Matrix, TupleStr
35
- from .conf import config
36
+ from .conf import dynamic
36
37
  from .exceptions import (
37
38
  JobException,
38
39
  StageException,
39
40
  UtilException,
40
41
  )
41
- from .result import Result, Status
42
+ from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
42
43
  from .reusables import has_template, param2template
43
44
  from .stages import Stage
44
45
  from .utils import cross_product, filter_func, gen_id
@@ -50,7 +51,6 @@ __all__: TupleStr = (
50
51
  "Strategy",
51
52
  "Job",
52
53
  "TriggerRules",
53
- "TriggerState",
54
54
  "RunsOn",
55
55
  "RunsOnLocal",
56
56
  "RunsOnSelfHosted",
@@ -205,16 +205,6 @@ class TriggerRules(str, Enum):
205
205
  none_skipped: str = "none_skipped"
206
206
 
207
207
 
208
- class TriggerState(str, Enum):
209
- waiting: str = "waiting"
210
- passed: str = "passed"
211
- skipped: str = "skipped"
212
- failed: str = "failed"
213
-
214
- def is_waiting(self):
215
- return self.value == "waiting"
216
-
217
-
218
208
  class RunsOnType(str, Enum):
219
209
  """Runs-On enum object."""
220
210
 
@@ -346,7 +336,7 @@ class Job(BaseModel):
346
336
  )
347
337
  extras: DictData = Field(
348
338
  default_factory=dict,
349
- description="An extra override values.",
339
+ description="An extra override config values.",
350
340
  )
351
341
 
352
342
  @field_validator("desc", mode="after")
@@ -404,29 +394,33 @@ class Job(BaseModel):
404
394
  raise ValueError(f"Stage ID {stage_id} does not exists")
405
395
 
406
396
  def check_needs(
407
- self, jobs: dict[str, Any]
408
- ) -> TriggerState: # pragma: no cov
409
- """Return True if job's need exists in an input list of job's ID.
397
+ self,
398
+ jobs: dict[str, Any],
399
+ ) -> Status: # pragma: no cov
400
+ """Return Status enum for checking job's need trigger logic in an
401
+ input list of job's ID.
402
+
403
+ :param jobs: A mapping of job ID and result context.
410
404
 
411
- :param jobs: A mapping of job model and its ID.
405
+ :raise NotImplementedError: If the job trigger rule out of scope.
412
406
 
413
- :rtype: TriggerState
407
+ :rtype: Status
414
408
  """
415
409
  if not self.needs:
416
- return TriggerState.passed
410
+ return SUCCESS
417
411
 
418
- def make_return(result: bool) -> TriggerState:
419
- return TriggerState.passed if result else TriggerState.failed
412
+ def make_return(result: bool) -> Status:
413
+ return SUCCESS if result else FAILED
420
414
 
421
415
  need_exist: dict[str, Any] = {
422
416
  need: jobs[need] for need in self.needs if need in jobs
423
417
  }
424
418
  if len(need_exist) != len(self.needs):
425
- return TriggerState.waiting
419
+ return WAIT
426
420
  elif all("skipped" in need_exist[job] for job in need_exist):
427
- return TriggerState.skipped
421
+ return SKIP
428
422
  elif self.trigger_rule == TriggerRules.all_done:
429
- return TriggerState.passed
423
+ return SUCCESS
430
424
  elif self.trigger_rule == TriggerRules.all_success:
431
425
  rs = all(
432
426
  k not in need_exist[job]
@@ -477,7 +471,9 @@ class Job(BaseModel):
477
471
  # should use the `re` module to validate eval-string before
478
472
  # running.
479
473
  rs: bool = eval(
480
- param2template(self.condition, params), globals() | params, {}
474
+ param2template(self.condition, params, extras=self.extras),
475
+ globals() | params,
476
+ {},
481
477
  )
482
478
  if not isinstance(rs, bool):
483
479
  raise TypeError("Return type of condition does not be boolean")
@@ -519,7 +515,9 @@ class Job(BaseModel):
519
515
  if "jobs" not in to:
520
516
  to["jobs"] = {}
521
517
 
522
- if self.id is None and not config.job_default_id:
518
+ if self.id is None and not dynamic(
519
+ "job_default_id", extras=self.extras
520
+ ):
523
521
  raise JobException(
524
522
  "This job do not set the ID before setting execution output."
525
523
  )
@@ -666,7 +664,7 @@ def local_execute_strategy(
666
664
  "strategy execution."
667
665
  )
668
666
  return result.catch(
669
- status=Status.FAILED,
667
+ status=FAILED,
670
668
  context={
671
669
  strategy_id: {
672
670
  "matrix": strategy,
@@ -706,8 +704,8 @@ def local_execute_strategy(
706
704
  )
707
705
  except (StageException, UtilException) as err:
708
706
  result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
709
- do_raise: bool = (
710
- config.job_raise_error if raise_error is None else raise_error
707
+ do_raise: bool = dynamic(
708
+ "job_raise_error", f=raise_error, extras=job.extras
711
709
  )
712
710
  if do_raise:
713
711
  raise JobException(
@@ -716,7 +714,7 @@ def local_execute_strategy(
716
714
  ) from None
717
715
 
718
716
  return result.catch(
719
- status=Status.FAILED,
717
+ status=FAILED,
720
718
  context={
721
719
  strategy_id: {
722
720
  "matrix": strategy,
@@ -727,7 +725,7 @@ def local_execute_strategy(
727
725
  )
728
726
 
729
727
  return result.catch(
730
- status=Status.SUCCESS,
728
+ status=SUCCESS,
731
729
  context={
732
730
  strategy_id: {
733
731
  "matrix": strategy,
@@ -782,7 +780,7 @@ def local_execute(
782
780
 
783
781
  if event and event.is_set(): # pragma: no cov
784
782
  return result.catch(
785
- status=Status.FAILED,
783
+ status=FAILED,
786
784
  context={
787
785
  "errors": JobException(
788
786
  "Job strategy was canceled from event that had set "
@@ -800,7 +798,7 @@ def local_execute(
800
798
  raise_error=raise_error,
801
799
  )
802
800
 
803
- return result.catch(status=Status.SUCCESS)
801
+ return result.catch(status=SUCCESS)
804
802
 
805
803
  fail_fast_flag: bool = job.strategy.fail_fast
806
804
  ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
@@ -811,7 +809,7 @@ def local_execute(
811
809
 
812
810
  if event and event.is_set(): # pragma: no cov
813
811
  return result.catch(
814
- status=Status.FAILED,
812
+ status=FAILED,
815
813
  context={
816
814
  "errors": JobException(
817
815
  "Job strategy was canceled from event that had set "
@@ -841,7 +839,7 @@ def local_execute(
841
839
  ]
842
840
 
843
841
  context: DictData = {}
844
- status: Status = Status.SUCCESS
842
+ status: Status = SUCCESS
845
843
 
846
844
  if not fail_fast_flag:
847
845
  done = as_completed(futures, timeout=1800)
@@ -867,7 +865,7 @@ def local_execute(
867
865
  try:
868
866
  future.result()
869
867
  except JobException as err:
870
- status = Status.FAILED
868
+ status = FAILED
871
869
  result.trace.error(
872
870
  f"[JOB]: {ls} Catch:\n\t{err.__class__.__name__}:"
873
871
  f"\n\t{err}"
@@ -895,7 +893,7 @@ def self_hosted_execute(
895
893
  )
896
894
 
897
895
  if event and event.is_set():
898
- return result.catch(status=Status.FAILED)
896
+ return result.catch(status=FAILED)
899
897
 
900
898
  import requests
901
899
 
@@ -905,8 +903,8 @@ def self_hosted_execute(
905
903
  )
906
904
 
907
905
  if resp.status_code != 200:
908
- do_raise: bool = (
909
- config.job_raise_error if raise_error is None else raise_error
906
+ do_raise: bool = dynamic(
907
+ "job_raise_error", f=raise_error, extras=job.extras
910
908
  )
911
909
  if do_raise:
912
910
  raise JobException(
@@ -914,5 +912,5 @@ def self_hosted_execute(
914
912
  f"{job.runs_on.args.host!r}"
915
913
  )
916
914
 
917
- return result.catch(status=Status.FAILED)
918
- return result.catch(status=Status.SUCCESS)
915
+ return result.catch(status=FAILED)
916
+ return result.catch(status=SUCCESS)