ddeutil-workflow 0.0.35__py3-none-any.whl → 0.0.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.35"
1
+ __version__: str = "0.0.36"
@@ -37,9 +37,11 @@ from .exceptions import (
37
37
  )
38
38
  from .job import (
39
39
  Job,
40
+ RunsOn,
40
41
  Strategy,
41
42
  )
42
43
  from .logs import (
44
+ TraceData,
43
45
  TraceLog,
44
46
  get_dt_tznow,
45
47
  get_trace,
@@ -11,7 +11,11 @@ from datetime import datetime, timedelta
11
11
  from typing import TypedDict
12
12
 
13
13
  from dotenv import load_dotenv
14
- from fastapi import FastAPI
14
+ from fastapi import FastAPI, Request
15
+ from fastapi import status as st
16
+ from fastapi.encoders import jsonable_encoder
17
+ from fastapi.exceptions import RequestValidationError
18
+ from fastapi.middleware.cors import CORSMiddleware
15
19
  from fastapi.middleware.gzip import GZipMiddleware
16
20
  from fastapi.responses import UJSONResponse
17
21
 
@@ -20,7 +24,7 @@ from ..conf import config, get_logger
20
24
  from ..scheduler import ReleaseThread, ReleaseThreads
21
25
  from ..workflow import ReleaseQueue, WorkflowTask
22
26
  from .repeat import repeat_at
23
- from .routes import log
27
+ from .routes import job, log
24
28
 
25
29
  load_dotenv()
26
30
  logger = get_logger("ddeutil.workflow")
@@ -61,24 +65,38 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
61
65
 
62
66
 
63
67
  app = FastAPI(
64
- titile="Workflow API",
68
+ titile="Workflow",
65
69
  description=(
66
- "This is workflow FastAPI web application that use to manage manual "
67
- "execute or schedule workflow via RestAPI."
70
+ "This is a workflow FastAPI application that use to manage manual "
71
+ "execute, logging, and schedule workflow via RestAPI."
68
72
  ),
69
73
  version=__version__,
70
74
  lifespan=lifespan,
71
75
  default_response_class=UJSONResponse,
72
76
  )
73
77
  app.add_middleware(GZipMiddleware, minimum_size=1000)
78
+ origins: list[str] = [
79
+ "http://localhost",
80
+ "http://localhost:88",
81
+ "http://localhost:80",
82
+ ]
83
+ app.add_middleware(
84
+ CORSMiddleware,
85
+ allow_origins=origins,
86
+ allow_credentials=True,
87
+ allow_methods=["*"],
88
+ allow_headers=["*"],
89
+ )
74
90
 
75
91
 
76
92
  @app.get("/")
77
93
  async def health():
78
- return {"message": "Workflow API already start up"}
94
+ """Index view that not return any template without json status."""
95
+ return {"message": "Workflow already start up with healthy status."}
79
96
 
80
97
 
81
- # NOTE Add the logs route by default.
98
+ # NOTE Add the jobs and logs routes by default.
99
+ app.include_router(job, prefix=config.prefix_path)
82
100
  app.include_router(log, prefix=config.prefix_path)
83
101
 
84
102
 
@@ -111,12 +129,13 @@ if config.enable_route_schedule:
111
129
  stop=datetime.now(config.tz) + timedelta(minutes=1),
112
130
  queue=app.state.workflow_queue,
113
131
  threads=app.state.workflow_threads,
114
- log=get_audit(),
132
+ audit=get_audit(),
115
133
  )
116
134
 
117
135
  @schedule.on_event("startup")
118
136
  @repeat_at(cron="*/5 * * * *", delay=10)
119
137
  def monitoring():
138
+ """Monitoring workflow thread that running in the background."""
120
139
  logger.debug("[MONITOR]: Start monitoring threading.")
121
140
  snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
122
141
  for t_name in snapshot_threads:
@@ -126,3 +145,23 @@ if config.enable_route_schedule:
126
145
  # NOTE: remove the thread that running success.
127
146
  if not thread_release["thread"].is_alive():
128
147
  app.state.workflow_threads.pop(t_name)
148
+
149
+
150
+ @app.exception_handler(RequestValidationError)
151
+ async def validation_exception_handler(
152
+ request: Request, exc: RequestValidationError
153
+ ):
154
+ return UJSONResponse(
155
+ status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
156
+ content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
157
+ )
158
+
159
+
160
+ if __name__ == "__main__":
161
+ import uvicorn
162
+
163
+ uvicorn.run(
164
+ app,
165
+ host="0.0.0.0",
166
+ port=80,
167
+ )
@@ -21,17 +21,26 @@ logger = get_logger("ddeutil.workflow")
21
21
  def get_cronjob_delta(cron: str) -> float:
22
22
  """This function returns the time delta between now and the next cron
23
23
  execution time.
24
+
25
+ :rtype: float
24
26
  """
25
27
  now: datetime = datetime.now(tz=config.tz)
26
28
  cron = CronJob(cron)
27
29
  return (cron.schedule(now).next - now).total_seconds()
28
30
 
29
31
 
30
- def cron_valid(cron: str):
32
+ def cron_valid(cron: str, raise_error: bool = True) -> bool:
33
+ """Check this crontab string value is valid with its cron syntax.
34
+
35
+ :rtype: bool
36
+ """
31
37
  try:
32
38
  CronJob(cron)
39
+ return True
33
40
  except Exception as err:
34
- raise ValueError(f"Crontab value does not valid, {cron}") from err
41
+ if raise_error:
42
+ raise ValueError(f"Crontab value does not valid, {cron}") from err
43
+ return False
35
44
 
36
45
 
37
46
  async def run_func(
@@ -41,6 +50,7 @@ async def run_func(
41
50
  raise_exceptions: bool = False,
42
51
  **kwargs,
43
52
  ):
53
+ """Run function inside the repeat decorator functions."""
44
54
  try:
45
55
  if is_coroutine:
46
56
  await func(*args, **kwargs)
@@ -62,11 +72,11 @@ def repeat_at(
62
72
  """This function returns a decorator that makes a function execute
63
73
  periodically as per the cron expression provided.
64
74
 
65
- :param cron: str
66
- Cron-style string for periodic execution, eg. '0 0 * * *' every midnight
67
- :param delay:
68
- :param raise_exceptions: bool (default False)
69
- Whether to raise exceptions or log them
75
+ :param cron: (str) A Cron-style string for periodic execution, e.g.
76
+ '0 0 * * *' every midnight
77
+ :param delay: (float) A delay seconds value.
78
+ :param raise_exceptions: (bool) A raise exception flag. Whether to raise
79
+ exceptions or log them if raise was set be false.
70
80
  :param max_repetitions: int (default None)
71
81
  Maximum number of times to repeat the function. If None, repeat
72
82
  indefinitely.
@@ -81,12 +91,12 @@ def repeat_at(
81
91
 
82
92
  @wraps(func)
83
93
  def wrapper(*_args, **_kwargs):
84
- repititions: int = 0
94
+ repetitions: int = 0
85
95
  cron_valid(cron)
86
96
 
87
97
  async def loop(*args, **kwargs):
88
- nonlocal repititions
89
- while max_repetitions is None or repititions < max_repetitions:
98
+ nonlocal repetitions
99
+ while max_repetitions is None or repetitions < max_repetitions:
90
100
  sleep_time = get_cronjob_delta(cron) + delay
91
101
  await asyncio.sleep(sleep_time)
92
102
  await run_func(
@@ -96,7 +106,7 @@ def repeat_at(
96
106
  raise_exceptions=raise_exceptions,
97
107
  **kwargs,
98
108
  )
99
- repititions += 1
109
+ repetitions += 1
100
110
 
101
111
  ensure_future(loop(*_args, **_kwargs))
102
112
 
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ from .job import job_route as job
6
7
  from .logs import log_route as log
7
8
  from .schedules import schedule_route as schedule
8
9
  from .workflows import workflow_route as workflow
@@ -0,0 +1,73 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from typing import Any, Optional
9
+
10
+ from fastapi import APIRouter
11
+ from fastapi.responses import UJSONResponse
12
+ from pydantic import BaseModel
13
+
14
+ from ...__types import DictData
15
+ from ...conf import get_logger
16
+ from ...exceptions import JobException
17
+ from ...job import Job
18
+ from ...result import Result
19
+
20
+ logger = get_logger("ddeutil.workflow")
21
+
22
+
23
+ job_route = APIRouter(
24
+ prefix="/job",
25
+ tags=["job"],
26
+ default_response_class=UJSONResponse,
27
+ )
28
+
29
+
30
+ class ResultPost(BaseModel):
31
+ context: DictData
32
+ run_id: str
33
+ parent_run_id: Optional[str] = None
34
+
35
+
36
+ @job_route.post(path="/execute/")
37
+ async def job_execute(
38
+ result: ResultPost,
39
+ job: Job,
40
+ params: dict[str, Any],
41
+ ):
42
+ """Execute job via API."""
43
+ rs: Result = Result(
44
+ context=result.context,
45
+ run_id=result.run_id,
46
+ parent_run_id=result.parent_run_id,
47
+ )
48
+ try:
49
+ job.set_outputs(
50
+ job.execute(
51
+ params=params,
52
+ run_id=rs.run_id,
53
+ parent_run_id=rs.parent_run_id,
54
+ ).context,
55
+ to=params,
56
+ )
57
+ except JobException as err:
58
+ rs.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
59
+
60
+ return {
61
+ "message": "Start execute job via API.",
62
+ "result": {
63
+ "run_id": rs.run_id,
64
+ "parent_run_id": rs.parent_run_id,
65
+ },
66
+ "job": job.model_dump(
67
+ by_alias=True,
68
+ exclude_none=True,
69
+ exclude_unset=True,
70
+ exclude_defaults=True,
71
+ ),
72
+ "params": params,
73
+ }
@@ -3,34 +3,62 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ """This route include audit and trace log paths."""
6
7
  from __future__ import annotations
7
8
 
8
9
  from fastapi import APIRouter
9
10
  from fastapi.responses import UJSONResponse
10
11
 
11
- from ...conf import get_logger
12
+ from ...audit import get_audit
12
13
  from ...logs import get_trace_obj
13
14
 
14
- logger = get_logger("ddeutil.workflow")
15
-
16
-
17
- # NOTE: Start create the schedule routes.
18
- #
19
15
  log_route = APIRouter(
20
16
  prefix="/logs",
21
- tags=["logs"],
17
+ tags=["logs", "trace", "audit"],
22
18
  default_response_class=UJSONResponse,
23
19
  )
24
20
 
25
21
 
26
- @log_route.get(path="/")
27
- async def get_logs():
22
+ @log_route.get(path="/trace/")
23
+ async def get_traces():
24
+ """Get all trace logs."""
28
25
  return {
29
- "message": "Getting logs",
30
- "audits": list(get_trace_obj().find_logs()),
26
+ "message": "Getting trace logs",
27
+ "traces": list(get_trace_obj().find_logs()),
31
28
  }
32
29
 
33
30
 
34
- @log_route.get(path="/{run_id}")
35
- async def get_log_with_run_id(run_id: str):
31
+ @log_route.get(path="/trace/{run_id}")
32
+ async def get_trace_with_id(run_id: str):
33
+ """Get trace log with specific running ID."""
36
34
  return get_trace_obj().find_log_with_id(run_id)
35
+
36
+
37
+ @log_route.get(path="/audit/")
38
+ async def get_audits():
39
+ """Get all audit logs."""
40
+ return {
41
+ "message": "Getting audit logs",
42
+ "audits": list(get_audit().find_audits(name="demo")),
43
+ }
44
+
45
+
46
+ @log_route.get(path="/audit/{workflow}/")
47
+ async def get_audit_with_workflow(workflow: str):
48
+ """Get all audit logs."""
49
+ return {
50
+ "message": f"Getting audit logs with workflow name {workflow}",
51
+ "audits": list(get_audit().find_audits(name="demo")),
52
+ }
53
+
54
+
55
+ @log_route.get(path="/audit/{workflow}/{release}")
56
+ async def get_audit_with_workflow_release(workflow: str, release: str):
57
+ """Get all audit logs."""
58
+ return {
59
+ "message": (
60
+ f"Getting audit logs with workflow name {workflow} and release "
61
+ f"{release}"
62
+ ),
63
+ "audits": list(get_audit().find_audits(name="demo")),
64
+ }
@@ -26,6 +26,7 @@ schedule_route = APIRouter(
26
26
 
27
27
  @schedule_route.get(path="/{name}")
28
28
  async def get_schedules(name: str):
29
+ """Get schedule object."""
29
30
  try:
30
31
  schedule: Schedule = Schedule.from_loader(name=name, externals={})
31
32
  except ValueError:
ddeutil/workflow/audit.py CHANGED
@@ -112,7 +112,8 @@ class FileAudit(BaseAudit):
112
112
  :param release: A release datetime that want to search log.
113
113
 
114
114
  :raise FileNotFoundError:
115
- :raise NotImplementedError:
115
+ :raise NotImplementedError: If an input release does not pass to this
116
+ method. Because this method does not implement latest log.
116
117
 
117
118
  :rtype: Self
118
119
  """
@@ -181,7 +182,9 @@ class FileAudit(BaseAudit):
181
182
  trace.debug("[LOG]: Skip writing log cause config was set")
182
183
  return self
183
184
 
184
- log_file: Path = self.pointer() / f"{self.run_id}.log"
185
+ log_file: Path = (
186
+ self.pointer() / f"{self.parent_run_id or self.run_id}.log"
187
+ )
185
188
  log_file.write_text(
186
189
  json.dumps(
187
190
  self.model_dump(exclude=excluded),
@@ -196,7 +199,7 @@ class FileAudit(BaseAudit):
196
199
  class SQLiteAudit(BaseAudit): # pragma: no cov
197
200
  """SQLite Audit Pydantic Model."""
198
201
 
199
- table_name: ClassVar[str] = "workflow_log"
202
+ table_name: ClassVar[str] = "audits"
200
203
  schemas: ClassVar[
201
204
  str
202
205
  ] = """
ddeutil/workflow/job.py CHANGED
@@ -32,7 +32,7 @@ from pydantic.functional_validators import field_validator, model_validator
32
32
  from typing_extensions import Self
33
33
 
34
34
  from .__types import DictData, DictStr, Matrix, TupleStr
35
- from .conf import config, get_logger
35
+ from .conf import config
36
36
  from .exceptions import (
37
37
  JobException,
38
38
  StageException,
@@ -48,7 +48,6 @@ from .utils import (
48
48
  gen_id,
49
49
  )
50
50
 
51
- logger = get_logger("ddeutil.workflow")
52
51
  MatrixFilter = list[dict[str, Union[str, int]]]
53
52
 
54
53
 
@@ -59,7 +58,6 @@ __all__: TupleStr = (
59
58
  "RunsOn",
60
59
  "RunsOnLocal",
61
60
  "RunsOnSelfHosted",
62
- "RunsOnDocker",
63
61
  "RunsOnK8s",
64
62
  "make",
65
63
  )
@@ -225,12 +223,11 @@ class RunsOnType(str, Enum):
225
223
  """Runs-On enum object."""
226
224
 
227
225
  LOCAL: str = "local"
228
- DOCKER: str = "docker"
229
226
  SELF_HOSTED: str = "self_hosted"
230
227
  K8S: str = "k8s"
231
228
 
232
229
 
233
- class BaseRunsOn(BaseModel):
230
+ class BaseRunsOn(BaseModel): # pragma: no cov
234
231
  model_config = ConfigDict(use_enum_values=True)
235
232
 
236
233
  type: Literal[RunsOnType.LOCAL]
@@ -240,13 +237,13 @@ class BaseRunsOn(BaseModel):
240
237
  )
241
238
 
242
239
 
243
- class RunsOnLocal(BaseRunsOn):
240
+ class RunsOnLocal(BaseRunsOn): # pragma: no cov
244
241
  """Runs-on local."""
245
242
 
246
243
  type: Literal[RunsOnType.LOCAL] = Field(default=RunsOnType.LOCAL)
247
244
 
248
245
 
249
- class RunsOnSelfHosted(BaseRunsOn):
246
+ class RunsOnSelfHosted(BaseRunsOn): # pragma: no cov
250
247
  """Runs-on self-hosted."""
251
248
 
252
249
  type: Literal[RunsOnType.SELF_HOSTED] = Field(
@@ -254,13 +251,7 @@ class RunsOnSelfHosted(BaseRunsOn):
254
251
  )
255
252
 
256
253
 
257
- class RunsOnDocker(BaseRunsOn):
258
- """Runs-on local Docker."""
259
-
260
- type: Literal[RunsOnType.DOCKER] = Field(default=RunsOnType.DOCKER)
261
-
262
-
263
- class RunsOnK8s(BaseRunsOn):
254
+ class RunsOnK8s(BaseRunsOn): # pragma: no cov
264
255
  """Runs-on Kubernetes."""
265
256
 
266
257
  type: Literal[RunsOnType.K8S] = Field(default=RunsOnType.K8S)
@@ -270,7 +261,6 @@ RunsOn = Annotated[
270
261
  Union[
271
262
  RunsOnLocal,
272
263
  RunsOnSelfHosted,
273
- RunsOnDocker,
274
264
  RunsOnK8s,
275
265
  ],
276
266
  Field(discriminator="type"),
@@ -286,7 +276,7 @@ class Job(BaseModel):
286
276
 
287
277
  Data Validate:
288
278
  >>> job = {
289
- ... "runs-on": None,
279
+ ... "runs-on": {"type": "local"},
290
280
  ... "strategy": {
291
281
  ... "max-parallel": 1,
292
282
  ... "matrix": {