ddeutil-workflow 0.0.67__py3-none-any.whl → 0.0.69__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,374 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import json
9
+ import logging
10
+ import os
11
+ from abc import ABC, abstractmethod
12
+ from collections.abc import Iterator
13
+ from datetime import datetime
14
+ from pathlib import Path
15
+ from typing import ClassVar, Optional, TypeVar, Union
16
+
17
+ from pydantic import BaseModel, Field
18
+ from pydantic.functional_validators import model_validator
19
+ from typing_extensions import Self
20
+
21
+ from .__types import DictData
22
+ from .conf import dynamic
23
+ from .traces import TraceModel, get_trace, set_logging
24
+
25
+ logger = logging.getLogger("ddeutil.workflow")
26
+
27
+
28
+ class BaseAudit(BaseModel, ABC):
29
+ """Base Audit Pydantic Model with abstraction class property that implement
30
+ only model fields. This model should to use with inherit to logging
31
+ subclass like file, sqlite, etc.
32
+ """
33
+
34
+ extras: DictData = Field(
35
+ default_factory=dict,
36
+ description="An extras parameter that want to override core config",
37
+ )
38
+ name: str = Field(description="A workflow name.")
39
+ release: datetime = Field(description="A release datetime.")
40
+ type: str = Field(description="A running type before logging.")
41
+ context: DictData = Field(
42
+ default_factory=dict,
43
+ description="A context that receive from a workflow execution result.",
44
+ )
45
+ parent_run_id: Optional[str] = Field(
46
+ default=None, description="A parent running ID."
47
+ )
48
+ run_id: str = Field(description="A running ID")
49
+ execution_time: float = Field(default=0, description="An execution time.")
50
+
51
+ @model_validator(mode="after")
52
+ def __model_action(self) -> Self:
53
+ """Do before the Audit action with WORKFLOW_AUDIT_ENABLE_WRITE env variable.
54
+
55
+ :rtype: Self
56
+ """
57
+ if dynamic("enable_write_audit", extras=self.extras):
58
+ self.do_before()
59
+
60
+ # NOTE: Start setting log config in this line with cache.
61
+ set_logging("ddeutil.workflow")
62
+ return self
63
+
64
+ @classmethod
65
+ @abstractmethod
66
+ def is_pointed(
67
+ cls,
68
+ name: str,
69
+ release: datetime,
70
+ *,
71
+ extras: Optional[DictData] = None,
72
+ ) -> bool:
73
+ raise NotImplementedError(
74
+ "Audit should implement `is_pointed` class-method"
75
+ )
76
+
77
+ @classmethod
78
+ @abstractmethod
79
+ def find_audits(
80
+ cls,
81
+ name: str,
82
+ *,
83
+ extras: Optional[DictData] = None,
84
+ ) -> Iterator[Self]:
85
+ raise NotImplementedError(
86
+ "Audit should implement `find_audits` class-method"
87
+ )
88
+
89
+ @classmethod
90
+ @abstractmethod
91
+ def find_audit_with_release(
92
+ cls,
93
+ name: str,
94
+ release: Optional[datetime] = None,
95
+ *,
96
+ extras: Optional[DictData] = None,
97
+ ) -> Self:
98
+ raise NotImplementedError(
99
+ "Audit should implement `find_audit_with_release` class-method"
100
+ )
101
+
102
+ def do_before(self) -> None: # pragma: no cov
103
+ """To something before end up of initial log model."""
104
+
105
+ @abstractmethod
106
+ def save(self, excluded: Optional[list[str]]) -> None: # pragma: no cov
107
+ """Save this model logging to target logging store."""
108
+ raise NotImplementedError("Audit should implement `save` method.")
109
+
110
+
111
+ class NullAudit(BaseAudit):
112
+
113
+ @classmethod
114
+ def is_pointed(
115
+ cls,
116
+ name: str,
117
+ release: datetime,
118
+ *,
119
+ extras: Optional[DictData] = None,
120
+ ) -> bool:
121
+ return False
122
+
123
+ @classmethod
124
+ def find_audits(
125
+ cls,
126
+ name: str,
127
+ *,
128
+ extras: Optional[DictData] = None,
129
+ ) -> Iterator[Self]:
130
+ raise NotImplementedError()
131
+
132
+ @classmethod
133
+ def find_audit_with_release(
134
+ cls,
135
+ name: str,
136
+ release: Optional[datetime] = None,
137
+ *,
138
+ extras: Optional[DictData] = None,
139
+ ) -> Self:
140
+ raise NotImplementedError()
141
+
142
+ def save(self, excluded: Optional[list[str]]) -> None:
143
+ """Do nothing when do not set audit."""
144
+ return
145
+
146
+
147
+ class FileAudit(BaseAudit):
148
+ """File Audit Pydantic Model that use to saving log data from result of
149
+ workflow execution. It inherits from BaseAudit model that implement the
150
+ ``self.save`` method for file.
151
+ """
152
+
153
+ filename_fmt: ClassVar[str] = (
154
+ "workflow={name}/release={release:%Y%m%d%H%M%S}"
155
+ )
156
+
157
+ def do_before(self) -> None:
158
+ """Create directory of release before saving log file."""
159
+ self.pointer().mkdir(parents=True, exist_ok=True)
160
+
161
+ @classmethod
162
+ def find_audits(
163
+ cls, name: str, *, extras: Optional[DictData] = None
164
+ ) -> Iterator[Self]:
165
+ """Generate the audit data that found from logs path with specific a
166
+ workflow name.
167
+
168
+ :param name: A workflow name that want to search release logging data.
169
+ :param extras: An extra parameter that want to override core config.
170
+
171
+ :rtype: Iterator[Self]
172
+ """
173
+ pointer: Path = (
174
+ dynamic("audit_path", extras=extras) / f"workflow={name}"
175
+ )
176
+ if not pointer.exists():
177
+ raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
178
+
179
+ for file in pointer.glob("./release=*/*.log"):
180
+ with file.open(mode="r", encoding="utf-8") as f:
181
+ yield cls.model_validate(obj=json.load(f))
182
+
183
+ @classmethod
184
+ def find_audit_with_release(
185
+ cls,
186
+ name: str,
187
+ release: Optional[datetime] = None,
188
+ *,
189
+ extras: Optional[DictData] = None,
190
+ ) -> Self:
191
+ """Return the audit data that found from logs path with specific
192
+ workflow name and release values. If a release does not pass to an input
193
+ argument, it will return the latest release from the current log path.
194
+
195
+ :param name: (str) A workflow name that want to search log.
196
+ :param release: (datetime) A release datetime that want to search log.
197
+ :param extras: An extra parameter that want to override core config.
198
+
199
+ :raise FileNotFoundError:
200
+ :raise NotImplementedError: If an input release does not pass to this
201
+ method. Because this method does not implement latest log.
202
+
203
+ :rtype: Self
204
+ """
205
+ if release is None:
206
+ raise NotImplementedError("Find latest log does not implement yet.")
207
+
208
+ pointer: Path = (
209
+ dynamic("audit_path", extras=extras)
210
+ / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
211
+ )
212
+ if not pointer.exists():
213
+ raise FileNotFoundError(
214
+ f"Pointer: ./logs/workflow={name}/"
215
+ f"release={release:%Y%m%d%H%M%S} does not found."
216
+ )
217
+
218
+ latest_file: Path = max(pointer.glob("./*.log"), key=os.path.getctime)
219
+ with latest_file.open(mode="r", encoding="utf-8") as f:
220
+ return cls.model_validate(obj=json.load(f))
221
+
222
+ @classmethod
223
+ def is_pointed(
224
+ cls,
225
+ name: str,
226
+ release: datetime,
227
+ *,
228
+ extras: Optional[DictData] = None,
229
+ ) -> bool:
230
+ """Check the release log already pointed or created at the destination
231
+ log path.
232
+
233
+ :param name: (str) A workflow name.
234
+ :param release: (datetime) A release datetime.
235
+ :param extras: An extra parameter that want to override core config.
236
+
237
+ :rtype: bool
238
+ :return: Return False if the release log was not pointed or created.
239
+ """
240
+ # NOTE: Return False if enable writing log flag does not set.
241
+ if not dynamic("enable_write_audit", extras=extras):
242
+ return False
243
+
244
+ # NOTE: create pointer path that use the same logic of pointer method.
245
+ pointer: Path = dynamic(
246
+ "audit_path", extras=extras
247
+ ) / cls.filename_fmt.format(name=name, release=release)
248
+
249
+ return pointer.exists()
250
+
251
+ def pointer(self) -> Path:
252
+ """Return release directory path that was generated from model data.
253
+
254
+ :rtype: Path
255
+ """
256
+ return dynamic(
257
+ "audit_path", extras=self.extras
258
+ ) / self.filename_fmt.format(name=self.name, release=self.release)
259
+
260
+ def save(self, excluded: Optional[list[str]] = None) -> Self:
261
+ """Save logging data that receive a context data from a workflow
262
+ execution result.
263
+
264
+ :param excluded: An excluded list of key name that want to pass in the
265
+ model_dump method.
266
+
267
+ :rtype: Self
268
+ """
269
+ trace: TraceModel = get_trace(
270
+ self.run_id,
271
+ parent_run_id=self.parent_run_id,
272
+ extras=self.extras,
273
+ )
274
+
275
+ # NOTE: Check environ variable was set for real writing.
276
+ if not dynamic("enable_write_audit", extras=self.extras):
277
+ trace.debug("[AUDIT]: Skip writing log cause config was set")
278
+ return self
279
+
280
+ log_file: Path = (
281
+ self.pointer() / f"{self.parent_run_id or self.run_id}.log"
282
+ )
283
+ log_file.write_text(
284
+ json.dumps(
285
+ self.model_dump(exclude=excluded),
286
+ default=str,
287
+ indent=2,
288
+ ),
289
+ encoding="utf-8",
290
+ )
291
+ return self
292
+
293
+
294
+ class SQLiteAudit(BaseAudit): # pragma: no cov
295
+ """SQLite Audit Pydantic Model."""
296
+
297
+ table_name: ClassVar[str] = "audits"
298
+ schemas: ClassVar[
299
+ str
300
+ ] = """
301
+ workflow str,
302
+ release int,
303
+ type str,
304
+ context json,
305
+ parent_run_id int,
306
+ run_id int,
307
+ update datetime
308
+ primary key ( run_id )
309
+ """
310
+
311
+ @classmethod
312
+ def is_pointed(
313
+ cls,
314
+ name: str,
315
+ release: datetime,
316
+ *,
317
+ extras: Optional[DictData] = None,
318
+ ) -> bool: ...
319
+
320
+ @classmethod
321
+ def find_audits(
322
+ cls,
323
+ name: str,
324
+ *,
325
+ extras: Optional[DictData] = None,
326
+ ) -> Iterator[Self]: ...
327
+
328
+ @classmethod
329
+ def find_audit_with_release(
330
+ cls,
331
+ name: str,
332
+ release: Optional[datetime] = None,
333
+ *,
334
+ extras: Optional[DictData] = None,
335
+ ) -> Self: ...
336
+
337
+ def save(self, excluded: Optional[list[str]]) -> SQLiteAudit:
338
+ """Save logging data that receive a context data from a workflow
339
+ execution result.
340
+ """
341
+ trace: TraceModel = get_trace(
342
+ self.run_id,
343
+ parent_run_id=self.parent_run_id,
344
+ extras=self.extras,
345
+ )
346
+
347
+ # NOTE: Check environ variable was set for real writing.
348
+ if not dynamic("enable_write_audit", extras=self.extras):
349
+ trace.debug("[AUDIT]: Skip writing log cause config was set")
350
+ return self
351
+
352
+ raise NotImplementedError("SQLiteAudit does not implement yet.")
353
+
354
+
355
+ Audit = TypeVar("Audit", bound=BaseAudit)
356
+ AuditModel = Union[
357
+ NullAudit,
358
+ FileAudit,
359
+ SQLiteAudit,
360
+ ]
361
+
362
+
363
+ def get_audit(
364
+ extras: Optional[DictData] = None,
365
+ ) -> type[AuditModel]: # pragma: no cov
366
+ """Get an audit class that dynamic base on the config audit path value.
367
+
368
+ :param extras: An extra parameter that want to override the core config.
369
+
370
+ :rtype: type[Audit]
371
+ """
372
+ if dynamic("audit_path", extras=extras).is_file():
373
+ return SQLiteAudit
374
+ return FileAudit
ddeutil/workflow/cli.py CHANGED
@@ -1,12 +1,17 @@
1
1
  import json
2
- from typing import Annotated, Any
2
+ from pathlib import Path
3
+ from platform import python_version
4
+ from typing import Annotated, Any, Optional
3
5
 
4
6
  import typer
5
7
  import uvicorn
6
8
 
7
9
  from .__about__ import __version__
10
+ from .__types import DictData
8
11
  from .api import app as fastapp
9
- from .api.logs import LOGGING_CONFIG
12
+ from .errors import JobError
13
+ from .job import Job
14
+ from .result import Result
10
15
 
11
16
  app = typer.Typer(
12
17
  pretty_exceptions_enable=True,
@@ -14,23 +19,27 @@ app = typer.Typer(
14
19
 
15
20
 
16
21
  @app.callback()
17
- def callback():
18
- """
19
- Awesome Portal Gun
22
+ def callback() -> None:
23
+ """Manage Workflow Orchestration CLI.
24
+
25
+ Use it with the interface workflow engine.
20
26
  """
21
- typer.echo("Start call from callback function")
22
27
 
23
28
 
24
29
  @app.command()
25
- def version():
30
+ def version() -> None:
26
31
  """Get the ddeutil-workflow package version."""
27
- typer.echo(__version__)
32
+ typer.echo(f"ddeutil-workflow=={__version__}")
33
+ typer.echo(f"python-version=={python_version()}")
28
34
 
29
35
 
30
- @app.command()
31
- def job(
36
+ @app.command(name="job")
37
+ def execute_job(
32
38
  params: Annotated[str, typer.Option(help="A job execute parameters")],
33
- ):
39
+ job: Annotated[str, typer.Option(help="A job model")],
40
+ parent_run_id: Annotated[str, typer.Option(help="A parent running ID")],
41
+ run_id: Annotated[Optional[str], typer.Option(help="A running ID")] = None,
42
+ ) -> None:
34
43
  """Job execution on the local.
35
44
 
36
45
  Example:
@@ -39,8 +48,32 @@ def job(
39
48
  try:
40
49
  params_dict: dict[str, Any] = json.loads(params)
41
50
  except json.JSONDecodeError as e:
42
- raise ValueError(f"params does not support format: {params!r}.") from e
51
+ raise ValueError(f"Params does not support format: {params!r}.") from e
52
+
53
+ try:
54
+ job_dict: dict[str, Any] = json.loads(job)
55
+ _job: Job = Job.model_validate(obj=job_dict)
56
+ except json.JSONDecodeError as e:
57
+ raise ValueError(f"Params does not support format: {params!r}.") from e
58
+
43
59
  typer.echo(f"Job params: {params_dict}")
60
+ rs: Result = Result(
61
+ run_id=run_id,
62
+ parent_run_id=parent_run_id,
63
+ )
64
+
65
+ context: DictData = {}
66
+ try:
67
+ _job.set_outputs(
68
+ _job.execute(
69
+ params=params_dict,
70
+ run_id=rs.run_id,
71
+ parent_run_id=rs.parent_run_id,
72
+ ).context,
73
+ to=context,
74
+ )
75
+ except JobError as err:
76
+ rs.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
44
77
 
45
78
 
46
79
  @app.command()
@@ -48,21 +81,53 @@ def api(
48
81
  host: Annotated[str, typer.Option(help="A host url.")] = "0.0.0.0",
49
82
  port: Annotated[int, typer.Option(help="A port url.")] = 80,
50
83
  debug: Annotated[bool, typer.Option(help="A debug mode flag")] = True,
51
- worker: Annotated[int, typer.Option(help="A worker number")] = None,
84
+ workers: Annotated[int, typer.Option(help="A worker number")] = None,
85
+ reload: Annotated[bool, typer.Option(help="A reload flag")] = False,
52
86
  ):
53
87
  """
54
88
  Provision API application from the FastAPI.
55
89
  """
90
+ from .api.log_conf import LOGGING_CONFIG
91
+
92
+ # LOGGING_CONFIG = {}
56
93
 
57
94
  uvicorn.run(
58
95
  fastapp,
59
96
  host=host,
60
97
  port=port,
61
98
  log_config=uvicorn.config.LOGGING_CONFIG | LOGGING_CONFIG,
62
- log_level=("DEBUG" if debug else "INFO"),
63
- workers=worker,
99
+ # NOTE: Logging level of uvicorn should be lowered case.
100
+ log_level=("debug" if debug else "info"),
101
+ workers=workers,
102
+ reload=reload,
64
103
  )
65
104
 
66
105
 
106
+ @app.command()
107
+ def make(
108
+ name: Annotated[Path, typer.Argument()],
109
+ ) -> None:
110
+ """
111
+ Create Workflow YAML template.
112
+
113
+ :param name:
114
+ """
115
+ typer.echo(f"Start create YAML template filename: {name.resolve()}")
116
+
117
+
118
+ workflow_app = typer.Typer()
119
+ app.add_typer(workflow_app, name="workflow", help="An Only Workflow CLI.")
120
+
121
+
122
+ @workflow_app.callback()
123
+ def workflow_callback():
124
+ """Manage Only Workflow CLI."""
125
+
126
+
127
+ @workflow_app.command(name="execute")
128
+ def workflow_execute():
129
+ """"""
130
+
131
+
67
132
  if __name__ == "__main__":
68
133
  app()
ddeutil/workflow/conf.py CHANGED
@@ -7,12 +7,11 @@ from __future__ import annotations
7
7
 
8
8
  import copy
9
9
  import os
10
- from abc import ABC, abstractmethod
11
10
  from collections.abc import Iterator
12
11
  from functools import cached_property
13
12
  from inspect import isclass
14
13
  from pathlib import Path
15
- from typing import Final, Optional, Protocol, TypeVar, Union
14
+ from typing import Final, Optional, TypeVar, Union
16
15
  from zoneinfo import ZoneInfo
17
16
 
18
17
  from ddeutil.core import str2bool
@@ -109,9 +108,9 @@ class Config: # pragma: no cov
109
108
  return env(
110
109
  "LOG_FORMAT",
111
110
  (
112
- "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
111
+ "%(asctime)s.%(msecs)03d (%(process)-5d, "
113
112
  "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
114
- "(%(filename)s:%(lineno)s)"
113
+ "(%(filename)s:%(lineno)s) (%(name)-10s)"
115
114
  ),
116
115
  )
117
116
 
@@ -145,44 +144,20 @@ class Config: # pragma: no cov
145
144
  def stage_default_id(self) -> bool:
146
145
  return str2bool(env("CORE_STAGE_DEFAULT_ID", "false"))
147
146
 
148
- @property
149
- def max_cron_per_workflow(self) -> int:
150
- """The maximum on value that store in workflow model.
151
-
152
- :rtype: int
153
- """
154
- return int(env("CORE_MAX_CRON_PER_WORKFLOW", "5"))
155
-
156
- @property
157
- def max_queue_complete_hist(self) -> int:
158
- return int(env("CORE_MAX_QUEUE_COMPLETE_HIST", "16"))
159
-
160
147
 
161
148
  class APIConfig:
162
149
  """API Config object."""
163
150
 
164
151
  @property
165
- def prefix_path(self) -> str:
166
- return env("API_PREFIX_PATH", "/api/v1")
167
-
168
-
169
- class BaseLoad(ABC): # pragma: no cov
170
- """Base Load object is the abstraction object for any Load object that
171
- should to inherit from this base class.
172
- """
173
-
174
- @classmethod
175
- @abstractmethod
176
- def find(cls, name: str, *args, **kwargs) -> DictData: ...
152
+ def version(self) -> str:
153
+ return env("API_VERSION", "1")
177
154
 
178
- @classmethod
179
- @abstractmethod
180
- def finds(
181
- cls, obj: object, *args, **kwargs
182
- ) -> Iterator[tuple[str, DictData]]: ...
155
+ @property
156
+ def prefix_path(self) -> str:
157
+ return env("API_PREFIX_PATH", f"/api/v{self.version}")
183
158
 
184
159
 
185
- class FileLoad(BaseLoad):
160
+ class YamlParser:
186
161
  """Base Load object that use to search config data by given some identity
187
162
  value like name of `Workflow` or `Crontab` templates.
188
163
 
@@ -424,24 +399,6 @@ def dynamic(
424
399
  return extra
425
400
 
426
401
 
427
- class Loader(Protocol): # pragma: no cov
428
- type: str
429
- path: Path
430
- data: DictData
431
- extras: DictData
432
- externals: DictData
433
-
434
- def __init__(self, *args, **kwargs) -> None: ...
435
-
436
- @classmethod
437
- def find(cls, name: str, *args, **kwargs) -> DictData: ...
438
-
439
- @classmethod
440
- def finds(
441
- cls, obj: object, *args, **kwargs
442
- ) -> Iterator[tuple[str, DictData]]: ...
443
-
444
-
445
402
  def pass_env(value: T) -> T: # pragma: no cov
446
403
  """Passing environment variable to an input value.
447
404
 
ddeutil/workflow/event.py CHANGED
@@ -3,8 +3,9 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """Event module include all event object for trigger the Workflow to release.
7
- Now, it has only `Crontab` and `CrontabYear` event models on this module because
6
+ """An Event module keep all triggerable object to the Workflow model. The simple
7
+ event trigger that use to run workflow is `Crontab` model.
8
+ Now, it has only `Crontab` and `CrontabYear` event models in this module because
8
9
  I think it is the core event for workflow orchestration.
9
10
  """
10
11
  from __future__ import annotations
@@ -22,7 +23,7 @@ from typing_extensions import Self
22
23
 
23
24
  from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner, Options
24
25
  from .__types import DictData, DictStr
25
- from .conf import FileLoad
26
+ from .conf import YamlParser
26
27
 
27
28
  Interval = Literal["daily", "weekly", "monthly"]
28
29
 
@@ -95,7 +96,7 @@ class Crontab(BaseModel):
95
96
  tz: Annotated[
96
97
  TimeZoneName,
97
98
  Field(
98
- description="A timezone string value",
99
+ description="A timezone string value.",
99
100
  alias="timezone",
100
101
  ),
101
102
  ] = "UTC"
@@ -138,7 +139,7 @@ class Crontab(BaseModel):
138
139
  :rtype: Self
139
140
  """
140
141
  extras: DictData = extras or {}
141
- loader: FileLoad = FileLoad(name, extras=extras)
142
+ loader: YamlParser = YamlParser(name, extras=extras)
142
143
 
143
144
  # NOTE: Validate the config type match with current connection model
144
145
  if loader.type != cls.__name__:
@@ -13,6 +13,7 @@ from dataclasses import field
13
13
  from datetime import datetime
14
14
  from enum import Enum
15
15
  from typing import Optional, Union
16
+ from zoneinfo import ZoneInfo
16
17
 
17
18
  from pydantic import ConfigDict
18
19
  from pydantic.dataclasses import dataclass
@@ -30,12 +31,20 @@ from . import (
30
31
  WorkflowError,
31
32
  )
32
33
  from .__types import DictData
34
+ from .audits import TraceModel, get_trace
33
35
  from .conf import dynamic
34
36
  from .errors import ResultError
35
- from .logs import TraceModel, get_dt_tznow, get_trace
36
37
  from .utils import default_gen_id, gen_id, get_dt_now
37
38
 
38
39
 
40
+ def get_dt_tznow(tz: Optional[ZoneInfo] = None) -> datetime: # pragma: no cov
41
+ """Return the current datetime object that passing the config timezone.
42
+
43
+ :rtype: datetime
44
+ """
45
+ return get_dt_now(tz=dynamic("tz", f=tz))
46
+
47
+
39
48
  class Status(str, Enum):
40
49
  """Status Int Enum object that use for tracking execution status to the
41
50
  Result dataclass object.