ddeutil-workflow 0.0.54__py3-none-any.whl → 0.0.55__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.54"
1
+ __version__: str = "0.0.55"
@@ -1 +1,170 @@
1
- from .api import app
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import contextlib
9
+ from collections.abc import AsyncIterator
10
+ from datetime import datetime, timedelta
11
+ from typing import TypedDict
12
+
13
+ from dotenv import load_dotenv
14
+ from fastapi import FastAPI, Request
15
+ from fastapi import status as st
16
+ from fastapi.encoders import jsonable_encoder
17
+ from fastapi.exceptions import RequestValidationError
18
+ from fastapi.middleware.cors import CORSMiddleware
19
+ from fastapi.middleware.gzip import GZipMiddleware
20
+ from fastapi.responses import UJSONResponse
21
+
22
+ from ..__about__ import __version__
23
+ from ..conf import api_config, config
24
+ from ..logs import get_logger
25
+ from ..scheduler import ReleaseThread, ReleaseThreads
26
+ from ..workflow import ReleaseQueue, WorkflowTask
27
+ from .routes import job, log
28
+ from .utils import repeat_at
29
+
30
+ load_dotenv()
31
+ logger = get_logger("uvicorn.error")
32
+
33
+
34
+ class State(TypedDict):
35
+ """TypeDict for State of FastAPI application."""
36
+
37
+ scheduler: list[str]
38
+ workflow_threads: ReleaseThreads
39
+ workflow_tasks: list[WorkflowTask]
40
+ workflow_queue: dict[str, ReleaseQueue]
41
+
42
+
43
+ @contextlib.asynccontextmanager
44
+ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
45
+ """Lifespan function for the FastAPI application."""
46
+ a.state.scheduler = []
47
+ a.state.workflow_threads = {}
48
+ a.state.workflow_tasks = []
49
+ a.state.workflow_queue = {}
50
+
51
+ yield {
52
+ # NOTE: Scheduler value should be contained a key of workflow and
53
+ # list of datetime of queue and running.
54
+ #
55
+ # ... {
56
+ # ... '<workflow-name>': (
57
+ # ... [<running-datetime>, ...], [<queue-datetime>, ...]
58
+ # ... )
59
+ # ... }
60
+ #
61
+ "scheduler": a.state.scheduler,
62
+ "workflow_queue": a.state.workflow_queue,
63
+ "workflow_threads": a.state.workflow_threads,
64
+ "workflow_tasks": a.state.workflow_tasks,
65
+ }
66
+
67
+
68
+ app = FastAPI(
69
+ titile="Workflow",
70
+ description=(
71
+ "This is a workflow FastAPI application that use to manage manual "
72
+ "execute, logging, and schedule workflow via RestAPI."
73
+ ),
74
+ version=__version__,
75
+ lifespan=lifespan,
76
+ default_response_class=UJSONResponse,
77
+ )
78
+ app.add_middleware(GZipMiddleware, minimum_size=1000)
79
+ origins: list[str] = [
80
+ "http://localhost",
81
+ "http://localhost:88",
82
+ "http://localhost:80",
83
+ ]
84
+ app.add_middleware(
85
+ CORSMiddleware,
86
+ allow_origins=origins,
87
+ allow_credentials=True,
88
+ allow_methods=["*"],
89
+ allow_headers=["*"],
90
+ )
91
+
92
+
93
+ @app.get(path="/", response_class=UJSONResponse)
94
+ async def health():
95
+ """Index view that not return any template without json status."""
96
+ return {"message": "Workflow already start up with healthy status."}
97
+
98
+
99
+ # NOTE Add the jobs and logs routes by default.
100
+ app.include_router(job, prefix=api_config.prefix_path)
101
+ app.include_router(log, prefix=api_config.prefix_path)
102
+
103
+
104
+ # NOTE: Enable the workflows route.
105
+ if api_config.enable_route_workflow:
106
+ from .routes import workflow
107
+
108
+ app.include_router(workflow, prefix=api_config.prefix_path)
109
+
110
+
111
+ # NOTE: Enable the schedules route.
112
+ if api_config.enable_route_schedule:
113
+ from ..logs import get_audit
114
+ from ..scheduler import schedule_task
115
+ from .routes import schedule
116
+
117
+ app.include_router(schedule, prefix=api_config.prefix_path)
118
+
119
+ @schedule.on_event("startup")
120
+ @repeat_at(cron="* * * * *", delay=2)
121
+ def scheduler_listener():
122
+ """Schedule broker every minute at 02 second."""
123
+ logger.debug(
124
+ f"[SCHEDULER]: Start listening schedule from queue "
125
+ f"{app.state.scheduler}"
126
+ )
127
+ if app.state.workflow_tasks:
128
+ schedule_task(
129
+ app.state.workflow_tasks,
130
+ stop=datetime.now(config.tz) + timedelta(minutes=1),
131
+ queue=app.state.workflow_queue,
132
+ threads=app.state.workflow_threads,
133
+ audit=get_audit(),
134
+ )
135
+
136
+ @schedule.on_event("startup")
137
+ @repeat_at(cron="*/5 * * * *", delay=10)
138
+ def monitoring():
139
+ """Monitoring workflow thread that running in the background."""
140
+ logger.debug("[MONITOR]: Start monitoring threading.")
141
+ snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
142
+ for t_name in snapshot_threads:
143
+
144
+ thread_release: ReleaseThread = app.state.workflow_threads[t_name]
145
+
146
+ # NOTE: remove the thread that running success.
147
+ if not thread_release["thread"].is_alive():
148
+ app.state.workflow_threads.pop(t_name)
149
+
150
+
151
+ @app.exception_handler(RequestValidationError)
152
+ async def validation_exception_handler(
153
+ request: Request, exc: RequestValidationError
154
+ ):
155
+ _ = request
156
+ return UJSONResponse(
157
+ status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
158
+ content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
159
+ )
160
+
161
+
162
+ if __name__ == "__main__":
163
+ import uvicorn
164
+
165
+ uvicorn.run(
166
+ app,
167
+ host="0.0.0.0",
168
+ port=80,
169
+ log_level="DEBUG",
170
+ )
@@ -9,7 +9,7 @@ from typing import Any, Optional
9
9
 
10
10
  from fastapi import APIRouter
11
11
  from fastapi.responses import UJSONResponse
12
- from pydantic import BaseModel
12
+ from pydantic import BaseModel, Field
13
13
 
14
14
  from ...__types import DictData
15
15
  from ...exceptions import JobException
@@ -18,33 +18,37 @@ from ...logs import get_logger
18
18
  from ...result import Result
19
19
 
20
20
  logger = get_logger("uvicorn.error")
21
+ job_route = APIRouter(prefix="/job", tags=["job"])
21
22
 
22
23
 
23
- job_route = APIRouter(
24
- prefix="/job",
25
- tags=["job"],
26
- default_response_class=UJSONResponse,
27
- )
24
+ class ResultCreate(BaseModel):
25
+ """Create Result model for receive running IDs to create the Result
26
+ dataclass.
27
+ """
28
28
 
29
-
30
- class ResultPost(BaseModel):
31
- context: DictData
32
- run_id: str
33
- parent_run_id: Optional[str] = None
29
+ run_id: str = Field(description="A running ID.")
30
+ parent_run_id: Optional[str] = Field(
31
+ default=None, description="A parent running ID."
32
+ )
34
33
 
35
34
 
36
- @job_route.post(path="/execute/")
35
+ @job_route.post(path="/execute/", response_class=UJSONResponse)
37
36
  async def job_execute(
38
- result: ResultPost,
37
+ result: ResultCreate,
39
38
  job: Job,
40
39
  params: dict[str, Any],
40
+ extras: Optional[dict[str, Any]] = None,
41
41
  ):
42
- """Execute job via RestAPI."""
42
+ """Execute job via RestAPI with execute route path."""
43
43
  rs: Result = Result(
44
- context=result.context,
45
44
  run_id=result.run_id,
46
45
  parent_run_id=result.parent_run_id,
46
+ extras=extras or {},
47
47
  )
48
+
49
+ if extras:
50
+ job.extras = extras
51
+
48
52
  context: DictData = {}
49
53
  try:
50
54
  job.set_outputs(
@@ -59,14 +63,11 @@ async def job_execute(
59
63
  rs.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
60
64
 
61
65
  return {
62
- "message": "Start execute job via API.",
63
- "result": {
64
- "run_id": rs.run_id,
65
- "parent_run_id": rs.parent_run_id,
66
- },
66
+ "message": "Execute job via RestAPI.",
67
+ "result": {"run_id": rs.run_id, "parent_run_id": rs.parent_run_id},
67
68
  "job": job.model_dump(
68
69
  by_alias=True,
69
- exclude_none=True,
70
+ exclude_none=False,
70
71
  exclude_unset=True,
71
72
  exclude_defaults=True,
72
73
  ),
@@ -17,7 +17,6 @@ from ...logs import get_logger
17
17
  from ...scheduler import Schedule
18
18
 
19
19
  logger = get_logger("uvicorn.error")
20
-
21
20
  schedule_route = APIRouter(
22
21
  prefix="/schedules",
23
22
  tags=["schedules"],
@@ -108,7 +107,6 @@ async def add_deploy_scheduler(request: Request, name: str):
108
107
  schedule.tasks(
109
108
  start_date_waiting,
110
109
  queue=request.state.workflow_queue,
111
- extras={},
112
110
  ),
113
111
  )
114
112
  return {
@@ -21,7 +21,6 @@ from ...result import Result
21
21
  from ...workflow import Workflow
22
22
 
23
23
  logger = get_logger("uvicorn.error")
24
-
25
24
  workflow_route = APIRouter(
26
25
  prefix="/workflows",
27
26
  tags=["workflows"],
@@ -55,7 +54,7 @@ async def get_workflow_by_name(name: str) -> DictData:
55
54
  ) from None
56
55
  return workflow.model_dump(
57
56
  by_alias=True,
58
- exclude_none=True,
57
+ exclude_none=False,
59
58
  exclude_unset=True,
60
59
  exclude_defaults=True,
61
60
  )
@@ -98,7 +97,7 @@ async def get_workflow_audits(name: str):
98
97
  "audits": [
99
98
  audit.model_dump(
100
99
  by_alias=True,
101
- exclude_none=True,
100
+ exclude_none=False,
102
101
  exclude_unset=True,
103
102
  exclude_defaults=True,
104
103
  )
@@ -132,7 +131,7 @@ async def get_workflow_release_audit(name: str, release: str):
132
131
  "message": f"Getting workflow {name!r} audit in release {release}",
133
132
  "audit": audit.model_dump(
134
133
  by_alias=True,
135
- exclude_none=True,
134
+ exclude_none=False,
136
135
  exclude_unset=True,
137
136
  exclude_defaults=True,
138
137
  ),
ddeutil/workflow/job.py CHANGED
@@ -540,6 +540,11 @@ class Job(BaseModel):
540
540
  }
541
541
  }
542
542
 
543
+ The keys that will set to the received context is `strategies`,
544
+ `errors`, and `skipped` keys. The `errors` and `skipped` keys will
545
+ extract from the result context if it exists. If it does not found, it
546
+ will not set on the received context.
547
+
543
548
  :raise JobException: If the job's ID does not set and the setting
544
549
  default job ID flag does not set.
545
550
 
@@ -599,7 +604,7 @@ class Job(BaseModel):
599
604
 
600
605
  :param params: (DictData) A parameter data.
601
606
  :param run_id: (str) A job running ID.
602
- :param parent_run_id: (str) A parent workflow running ID.
607
+ :param parent_run_id: (str) A parent running ID.
603
608
  :param event: (Event) An Event manager instance that use to cancel this
604
609
  execution if it forces stopped by parent execution.
605
610
 
@@ -667,15 +672,15 @@ def local_execute_strategy(
667
672
  `set_outputs` method for reconstruct result context data.
668
673
 
669
674
  :param job: (Job) A job model that want to execute.
670
- :param strategy: A strategy metrix value that use on this execution.
671
- This value will pass to the `matrix` key for templating.
675
+ :param strategy: (DictData) A strategy metrix value. This value will pass
676
+ to the `matrix` key for templating in context data.
672
677
  :param params: (DictData) A parameter data.
673
678
  :param result: (Result) A Result instance for return context and status.
674
679
  :param event: (Event) An Event manager instance that use to cancel this
675
680
  execution if it forces stopped by parent execution.
676
681
 
677
- :raise JobException: If it has any error from `StageException` or
678
- `UtilException`.
682
+ :raise JobException: If stage execution raise any error as `StageException`
683
+ or `UtilException`.
679
684
 
680
685
  :rtype: Result
681
686
  """
@@ -683,17 +688,16 @@ def local_execute_strategy(
683
688
  run_id=gen_id(job.id or "not-set", unique=True),
684
689
  extras=job.extras,
685
690
  )
686
-
687
- strategy_id: str = gen_id(strategy)
688
- context: DictData = copy.deepcopy(params)
689
- context.update({"matrix": strategy, "stages": {}})
690
-
691
691
  if strategy:
692
+ strategy_id: str = gen_id(strategy)
692
693
  result.trace.info(f"[JOB]: Start Strategy: {strategy_id!r}")
693
694
  result.trace.info(f"[JOB]: ... matrix: {strategy!r}")
694
695
  else:
695
- result.trace.info("[JOB]: Start Strategy: EMPTY")
696
+ strategy_id: str = "EMPTY"
697
+ result.trace.info("[JOB]: Start Strategy: 'EMPTY'")
696
698
 
699
+ context: DictData = copy.deepcopy(params)
700
+ context.update({"matrix": strategy, "stages": {}})
697
701
  for stage in job.stages:
698
702
 
699
703
  if job.extras:
@@ -707,7 +711,7 @@ def local_execute_strategy(
707
711
  if event and event.is_set():
708
712
  error_msg: str = (
709
713
  "Job strategy was canceled from event that had set before "
710
- "strategy execution."
714
+ "job strategy execution."
711
715
  )
712
716
  return result.catch(
713
717
  status=CANCEL,
@@ -820,7 +824,7 @@ def local_execute(
820
824
  context={
821
825
  "errors": JobException(
822
826
  "Job was canceled from event that had set before "
823
- "local execution."
827
+ "local job execution."
824
828
  ).to_dict()
825
829
  },
826
830
  )
@@ -98,6 +98,7 @@ class Result:
98
98
  return cls(
99
99
  run_id=(run_id or gen_id(id_logic or "", unique=True)),
100
100
  parent_run_id=parent_run_id,
101
+ ts=get_dt_now(dynamic("tz", extras=extras)),
101
102
  extras=(extras or {}),
102
103
  )
103
104
  elif parent_run_id:
@@ -535,9 +535,7 @@ def schedule_task(
535
535
  current_release: datetime = current_date.replace(
536
536
  second=0, microsecond=0
537
537
  )
538
- if (
539
- first_date := q.first_queue.date
540
- ) > current_release: # pragma: no cov
538
+ if (first_date := q.queue[0].date) > current_release: # pragma: no cov
541
539
  result.trace.debug(
542
540
  f"[WORKFLOW]: Skip schedule "
543
541
  f"{first_date:%Y-%m-%d %H:%M:%S} for : {task.alias!r}"