ddeutil-workflow 0.0.23__py3-none-any.whl → 0.0.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.23"
1
+ __version__: str = "0.0.25"
@@ -736,6 +736,12 @@ class CronRunner:
736
736
  self.is_year: bool = isinstance(cron, CronJobYear)
737
737
  self.reset_flag: bool = True
738
738
 
739
+ def __repr__(self) -> str:
740
+ return (
741
+ f"{self.__class__.__name__}(CronJob('{self.cron}'), "
742
+ f"{self.date:%Y-%m-%d %H:%M:%S}, tz='{self.tz}')"
743
+ )
744
+
739
745
  def reset(self) -> None:
740
746
  """Resets the iterator to start time."""
741
747
  self.date: datetime = self.__start_date
@@ -3,11 +3,17 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ from .__cron import CronRunner
6
7
  from .conf import (
7
8
  Config,
8
9
  FileLog,
9
10
  Loader,
10
11
  )
12
+ from .cron import (
13
+ On,
14
+ YearOn,
15
+ interval2crontab,
16
+ )
11
17
  from .exceptions import (
12
18
  JobException,
13
19
  ParamValueException,
@@ -19,11 +25,6 @@ from .job import (
19
25
  Job,
20
26
  Strategy,
21
27
  )
22
- from .on import (
23
- On,
24
- YearOn,
25
- interval2crontab,
26
- )
27
28
  from .params import (
28
29
  ChoiceParam,
29
30
  DatetimeParam,
@@ -35,6 +36,7 @@ from .result import Result
35
36
  from .scheduler import (
36
37
  Schedule,
37
38
  WorkflowSchedule,
39
+ schedule_runner,
38
40
  )
39
41
  from .stage import (
40
42
  BashStage,
@@ -68,11 +70,10 @@ from .utils import (
68
70
  map_post_filter,
69
71
  not_in_template,
70
72
  param2template,
71
- queue2str,
72
73
  str2template,
73
74
  tag,
74
75
  )
75
76
  from .workflow import (
76
77
  Workflow,
77
- WorkflowTaskData,
78
+ WorkflowTask,
78
79
  )
@@ -0,0 +1 @@
1
+ from .api import app
@@ -11,7 +11,6 @@ import uuid
11
11
  from collections.abc import AsyncIterator
12
12
  from datetime import datetime, timedelta
13
13
  from queue import Empty, Queue
14
- from threading import Thread
15
14
  from typing import TypedDict
16
15
 
17
16
  from dotenv import load_dotenv
@@ -20,34 +19,39 @@ from fastapi.middleware.gzip import GZipMiddleware
20
19
  from fastapi.responses import UJSONResponse
21
20
  from pydantic import BaseModel
22
21
 
23
- from .__about__ import __version__
24
- from .conf import config, get_logger
22
+ from ..__about__ import __version__
23
+ from ..conf import config, get_logger
24
+ from ..scheduler import ReleaseThread, ReleaseThreads
25
+ from ..workflow import WorkflowQueue, WorkflowTask
25
26
  from .repeat import repeat_at, repeat_every
26
- from .workflow import WorkflowTaskData
27
27
 
28
28
  load_dotenv()
29
29
  logger = get_logger("ddeutil.workflow")
30
30
 
31
31
 
32
32
  class State(TypedDict):
33
+ """TypeDict for State of FastAPI application."""
34
+
35
+ # NOTE: For upper queue route.
33
36
  upper_queue: Queue
34
37
  upper_result: dict[str, str]
38
+
39
+ # NOTE: For schedule listener.
35
40
  scheduler: list[str]
36
- workflow_threads: dict[str, Thread]
37
- workflow_tasks: list[WorkflowTaskData]
38
- workflow_queue: dict[str, list[datetime]]
39
- workflow_running: dict[str, list[datetime]]
41
+ workflow_threads: ReleaseThreads
42
+ workflow_tasks: list[WorkflowTask]
43
+ workflow_queue: dict[str, WorkflowQueue]
40
44
 
41
45
 
42
46
  @contextlib.asynccontextmanager
43
47
  async def lifespan(a: FastAPI) -> AsyncIterator[State]:
48
+ """Lifespan function for the FastAPI application."""
44
49
  a.state.upper_queue = Queue()
45
50
  a.state.upper_result = {}
46
51
  a.state.scheduler = []
47
52
  a.state.workflow_threads = {}
48
53
  a.state.workflow_tasks = []
49
54
  a.state.workflow_queue = {}
50
- a.state.workflow_running = {}
51
55
 
52
56
  await asyncio.create_task(broker_upper_messages())
53
57
 
@@ -65,7 +69,6 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
65
69
  #
66
70
  "scheduler": a.state.scheduler,
67
71
  "workflow_queue": a.state.workflow_queue,
68
- "workflow_running": a.state.workflow_running,
69
72
  "workflow_threads": a.state.workflow_threads,
70
73
  "workflow_tasks": a.state.workflow_tasks,
71
74
  }
@@ -114,12 +117,11 @@ async def get_result(request_id: str) -> dict[str, str]:
114
117
 
115
118
 
116
119
  @app.get("/")
117
- @app.get("/api")
118
120
  async def health():
119
121
  return {"message": "Workflow API already start up"}
120
122
 
121
123
 
122
- @app.post("/api")
124
+ @app.post(f"{config.prefix_path}/upper")
123
125
  async def message_upper(payload: Payload):
124
126
  """Convert message from any case to the upper case."""
125
127
  request_id: str = str(uuid.uuid4())
@@ -129,27 +131,47 @@ async def message_upper(payload: Payload):
129
131
  return await get_result(request_id)
130
132
 
131
133
 
134
+ # NOTE: Enable the workflow route.
132
135
  if config.enable_route_workflow:
133
- from .route import workflow
136
+ from .route import workflow_route
137
+
138
+ app.include_router(workflow_route, prefix=config.prefix_path)
134
139
 
135
- app.include_router(workflow)
136
140
 
141
+ # NOTE: Enable the schedule route.
137
142
  if config.enable_route_schedule:
138
- from .route import schedule
139
- from .scheduler import workflow_task
143
+ from ..conf import FileLog
144
+ from ..scheduler import schedule_task
145
+ from .route import schedule_route
140
146
 
141
- app.include_router(schedule)
147
+ app.include_router(schedule_route, prefix=config.prefix_path)
142
148
 
143
- @schedule.on_event("startup")
149
+ @schedule_route.on_event("startup")
144
150
  @repeat_at(cron="* * * * *", delay=2)
145
- def schedule_broker_up():
151
+ def scheduler_listener():
152
+ """Schedule broker every minute at 02 second."""
146
153
  logger.debug(
147
154
  f"[SCHEDULER]: Start listening schedule from queue "
148
155
  f"{app.state.scheduler}"
149
156
  )
150
157
  if app.state.workflow_tasks:
151
- workflow_task(
158
+ schedule_task(
152
159
  app.state.workflow_tasks,
153
- stop=datetime.now() + timedelta(minutes=1),
160
+ stop=datetime.now(config.tz) + timedelta(minutes=1),
161
+ queue=app.state.workflow_queue,
154
162
  threads=app.state.workflow_threads,
163
+ log=FileLog,
155
164
  )
165
+
166
+ @schedule_route.on_event("startup")
167
+ @repeat_at(cron="*/5 * * * *")
168
+ def monitoring():
169
+ logger.debug("[MONITOR]: Start monitoring threading.")
170
+ snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
171
+ for t_name in snapshot_threads:
172
+
173
+ thread_release: ReleaseThread = app.state.workflow_threads[t_name]
174
+
175
+ # NOTE: remove the thread that running success.
176
+ if not thread_release["thread"].is_alive():
177
+ app.state.workflow_threads.pop(t_name)
@@ -12,8 +12,8 @@ from functools import wraps
12
12
 
13
13
  from starlette.concurrency import run_in_threadpool
14
14
 
15
- from .__cron import CronJob
16
- from .conf import config, get_logger
15
+ from ..__cron import CronJob
16
+ from ..conf import config, get_logger
17
17
 
18
18
  logger = get_logger("ddeutil.workflow")
19
19
 
@@ -6,6 +6,7 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import copy
9
+ from dataclasses import asdict
9
10
  from datetime import datetime, timedelta
10
11
  from typing import Any
11
12
 
@@ -14,41 +15,43 @@ from fastapi import status as st
14
15
  from fastapi.responses import UJSONResponse
15
16
  from pydantic import BaseModel
16
17
 
17
- from . import Workflow
18
- from .__types import DictData
19
- from .conf import Loader, config, get_logger
20
- from .result import Result
21
- from .scheduler import Schedule
18
+ from ..__types import DictData
19
+ from ..conf import Loader, config, get_logger
20
+ from ..result import Result
21
+ from ..scheduler import Schedule
22
+ from ..workflow import Workflow
22
23
 
23
24
  logger = get_logger("ddeutil.workflow")
24
- workflow = APIRouter(
25
- prefix="/api/workflow",
26
- tags=["workflow"],
25
+
26
+ workflow_route = APIRouter(
27
+ prefix="/workflows",
28
+ tags=["workflows"],
27
29
  default_response_class=UJSONResponse,
28
30
  )
29
- schedule = APIRouter(
30
- prefix="/api/schedule",
31
- tags=["schedule"],
31
+
32
+ schedule_route = APIRouter(
33
+ prefix="/schedules",
34
+ tags=["schedules"],
32
35
  default_response_class=UJSONResponse,
33
36
  )
34
37
 
35
- ListDate = list[datetime]
36
38
 
37
-
38
- @workflow.get("/")
39
- async def get_workflows():
39
+ @workflow_route.get(path="/")
40
+ async def get_workflows() -> DictData:
40
41
  """Return all workflow workflows that exists in config path."""
41
- workflows: DictData = Loader.finds(Workflow)
42
+ workflows: DictData = dict(Loader.finds(Workflow))
42
43
  return {
43
- "message": f"getting all workflows: {workflows}",
44
+ "message": f"Getting all workflows: {len(workflows)}",
45
+ "count": len(workflows),
46
+ "workflows": workflows,
44
47
  }
45
48
 
46
49
 
47
- @workflow.get("/{name}")
48
- async def get_workflow(name: str) -> DictData:
50
+ @workflow_route.get(path="/{name}")
51
+ async def get_workflow_by_name(name: str) -> DictData:
49
52
  """Return model of workflow that passing an input workflow name."""
50
53
  try:
51
- wf: Workflow = Workflow.from_loader(name=name, externals={})
54
+ workflow: Workflow = Workflow.from_loader(name=name, externals={})
52
55
  except ValueError as err:
53
56
  logger.exception(err)
54
57
  raise HTTPException(
@@ -57,7 +60,7 @@ async def get_workflow(name: str) -> DictData:
57
60
  f"Workflow workflow name: {name!r} does not found in /conf path"
58
61
  ),
59
62
  ) from None
60
- return wf.model_dump(
63
+ return workflow.model_dump(
61
64
  by_alias=True,
62
65
  exclude_none=True,
63
66
  exclude_unset=True,
@@ -69,11 +72,11 @@ class ExecutePayload(BaseModel):
69
72
  params: dict[str, Any]
70
73
 
71
74
 
72
- @workflow.post("/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
75
+ @workflow_route.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
73
76
  async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
74
77
  """Return model of workflow that passing an input workflow name."""
75
78
  try:
76
- wf: Workflow = Workflow.from_loader(name=name, externals={})
79
+ workflow: Workflow = Workflow.from_loader(name=name, externals={})
77
80
  except ValueError:
78
81
  raise HTTPException(
79
82
  status_code=st.HTTP_404_NOT_FOUND,
@@ -83,36 +86,45 @@ async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
83
86
  ) from None
84
87
 
85
88
  # NOTE: Start execute manually
86
- rs: Result = wf.execute(params=payload.params)
89
+ try:
90
+ result: Result = workflow.execute(params=payload.params)
91
+ except Exception as err:
92
+ raise HTTPException(
93
+ status_code=st.HTTP_500_INTERNAL_SERVER_ERROR,
94
+ detail=f"{type(err)}: {err}",
95
+ ) from None
87
96
 
88
- return dict(rs)
97
+ return asdict(result)
89
98
 
90
99
 
91
- @workflow.get("/{name}/logs")
100
+ @workflow_route.get(path="/{name}/logs")
92
101
  async def get_workflow_logs(name: str):
93
- return {"message": f"getting workflow {name!r} logs"}
102
+ return {"message": f"Getting workflow {name!r} logs"}
94
103
 
95
104
 
96
- @workflow.get("/{name}/logs/{release}")
105
+ @workflow_route.get(path="/{name}/logs/{release}")
97
106
  async def get_workflow_release_log(name: str, release: str):
98
- return {"message": f"getting workflow {name!r} log in release {release}"}
107
+ return {"message": f"Getting workflow {name!r} log in release {release}"}
99
108
 
100
109
 
101
- @workflow.delete("/{name}/logs/{release}", status_code=st.HTTP_204_NO_CONTENT)
110
+ @workflow_route.delete(
111
+ path="/{name}/logs/{release}",
112
+ status_code=st.HTTP_204_NO_CONTENT,
113
+ )
102
114
  async def del_workflow_release_log(name: str, release: str):
103
- return {"message": f"deleted workflow {name!r} log in release {release}"}
115
+ return {"message": f"Deleted workflow {name!r} log in release {release}"}
104
116
 
105
117
 
106
- @schedule.get("/{name}")
107
- async def get_schedule(name: str):
118
+ @schedule_route.get(path="/{name}")
119
+ async def get_schedules(name: str):
108
120
  try:
109
- sch: Schedule = Schedule.from_loader(name=name, externals={})
121
+ schedule: Schedule = Schedule.from_loader(name=name, externals={})
110
122
  except ValueError:
111
123
  raise HTTPException(
112
124
  status_code=st.HTTP_404_NOT_FOUND,
113
125
  detail=f"Schedule name: {name!r} does not found in /conf path",
114
126
  ) from None
115
- return sch.model_dump(
127
+ return schedule.model_dump(
116
128
  by_alias=True,
117
129
  exclude_none=True,
118
130
  exclude_unset=True,
@@ -120,32 +132,32 @@ async def get_schedule(name: str):
120
132
  )
121
133
 
122
134
 
123
- @schedule.get("/deploy")
135
+ @schedule_route.get(path="/deploy")
124
136
  async def get_deploy_schedulers(request: Request):
125
137
  snapshot = copy.deepcopy(request.state.scheduler)
126
138
  return {"schedule": snapshot}
127
139
 
128
140
 
129
- @schedule.get("/deploy/{name}")
141
+ @schedule_route.get(path="/deploy/{name}")
130
142
  async def get_deploy_scheduler(request: Request, name: str):
131
143
  if name in request.state.scheduler:
132
144
  sch = Schedule.from_loader(name)
133
145
  getter: list[dict[str, dict[str, list[datetime]]]] = []
134
- for wf in sch.workflows:
146
+ for workflow in sch.workflows:
135
147
  getter.append(
136
148
  {
137
- wf.name: {
149
+ workflow.name: {
138
150
  "queue": copy.deepcopy(
139
- request.state.workflow_queue[wf.name]
151
+ request.state.workflow_queue[workflow.name]
140
152
  ),
141
153
  "running": copy.deepcopy(
142
- request.state.workflow_running[wf.name]
154
+ request.state.workflow_running[workflow.name]
143
155
  ),
144
156
  }
145
157
  }
146
158
  )
147
159
  return {
148
- "message": f"getting {name!r} to schedule listener.",
160
+ "message": f"Getting {name!r} to schedule listener.",
149
161
  "scheduler": getter,
150
162
  }
151
163
  raise HTTPException(
@@ -154,13 +166,13 @@ async def get_deploy_scheduler(request: Request, name: str):
154
166
  )
155
167
 
156
168
 
157
- @schedule.post("/deploy/{name}")
169
+ @schedule_route.post(path="/deploy/{name}")
158
170
  async def add_deploy_scheduler(request: Request, name: str):
159
171
  """Adding schedule name to application state store."""
160
172
  if name in request.state.scheduler:
161
173
  raise HTTPException(
162
174
  status_code=st.HTTP_302_FOUND,
163
- detail="This schedule already exists in scheduler list.",
175
+ detail=f"This schedule {name!r} already exists in scheduler list.",
164
176
  )
165
177
 
166
178
  request.state.scheduler.append(name)
@@ -172,42 +184,49 @@ async def add_deploy_scheduler(request: Request, name: str):
172
184
 
173
185
  # NOTE: Create pair of workflow and on from schedule model.
174
186
  try:
175
- sch = Schedule.from_loader(name)
176
- except ValueError as e:
187
+ schedule: Schedule = Schedule.from_loader(name)
188
+ except ValueError as err:
177
189
  request.state.scheduler.remove(name)
178
- logger.exception(e)
190
+ logger.exception(err)
179
191
  raise HTTPException(
180
192
  status_code=st.HTTP_404_NOT_FOUND,
181
- detail=str(e),
193
+ detail=str(err),
182
194
  ) from None
195
+
183
196
  request.state.workflow_tasks.extend(
184
- sch.tasks(
197
+ schedule.tasks(
185
198
  start_date_waiting,
186
199
  queue=request.state.workflow_queue,
187
- running=request.state.workflow_running,
200
+ externals={},
188
201
  ),
189
202
  )
190
- return {"message": f"adding {name!r} to schedule listener."}
203
+ return {
204
+ "message": f"Adding {name!r} to schedule listener.",
205
+ "start_date": start_date_waiting,
206
+ }
191
207
 
192
208
 
193
- @schedule.delete("/deploy/{name}")
209
+ @schedule_route.delete(path="/deploy/{name}")
194
210
  async def del_deploy_scheduler(request: Request, name: str):
211
+ """Delete workflow task on the schedule listener."""
195
212
  if name in request.state.scheduler:
213
+
214
+ # NOTE: Remove current schedule name from the state.
196
215
  request.state.scheduler.remove(name)
197
- sche = Schedule.from_loader(name)
198
- for workflow_task in sche.tasks(datetime.now(), {}, {}):
199
- request.state.workflow_tasks.remove(workflow_task)
200
216
 
201
- for wf in sche.workflows:
202
- if wf in request.state.workflow_queue:
203
- request.state.workflow_queue.pop(wf, {})
217
+ schedule: Schedule = Schedule.from_loader(name)
204
218
 
205
- if wf in request.state.workflow_running:
206
- request.state.workflow_running.pop(wf, {})
219
+ for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
220
+ if task in request.state.workflow_tasks:
221
+ request.state.workflow_tasks.remove(task)
207
222
 
208
- return {"message": f"deleted {name!r} to schedule listener."}
223
+ for workflow in schedule.workflows:
224
+ if workflow.alias in request.state.workflow_queue:
225
+ request.state.workflow_queue.pop(workflow.alias)
226
+
227
+ return {"message": f"Deleted schedule {name!r} in listener."}
209
228
 
210
229
  raise HTTPException(
211
230
  status_code=st.HTTP_404_NOT_FOUND,
212
- detail=f"Does not found {name!r} in schedule listener",
231
+ detail=f"Does not found schedule {name!r} in listener",
213
232
  )
ddeutil/workflow/cli.py CHANGED
@@ -6,23 +6,16 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import json
9
+ import sys
9
10
  from datetime import datetime
10
- from enum import Enum
11
11
  from typing import Annotated, Optional
12
12
 
13
13
  from ddeutil.core import str2list
14
- from typer import Argument, Option, Typer
14
+ from typer import Argument, Typer, echo
15
15
 
16
- from .conf import config, get_logger
16
+ from .conf import config
17
17
 
18
- logger = get_logger("ddeutil.workflow")
19
18
  cli: Typer = Typer()
20
- cli_log: Typer = Typer()
21
- cli.add_typer(
22
- cli_log,
23
- name="log",
24
- help="Logging of workflow CLI",
25
- )
26
19
 
27
20
 
28
21
  @cli.command()
@@ -34,15 +27,28 @@ def run(
34
27
  params: Annotated[
35
28
  str,
36
29
  Argument(
37
- help="A json string for parameters of this workflow execution."
30
+ help="A json string for parameters of this workflow execution.",
38
31
  ),
39
32
  ],
40
33
  ):
41
34
  """Run workflow workflow manually with an input custom parameters that able
42
35
  to receive with workflow params config.
43
36
  """
44
- logger.info(f"Running workflow name: {workflow}")
45
- logger.info(f"... with Parameters: {json.dumps(json.loads(params))}")
37
+ echo(f"Running workflow name: ({type(workflow)}) {workflow!r}")
38
+ echo(f"... with Parameters: ({type(params)}) {params!r}")
39
+
40
+ from .result import Result
41
+ from .workflow import Workflow
42
+
43
+ try:
44
+ wf: Workflow = Workflow.from_loader(name=workflow)
45
+ rs: Result = wf.execute(params=json.loads(params))
46
+ except Exception as err:
47
+ echo(str(err))
48
+ sys.exit(1)
49
+
50
+ echo(f"Result: {rs}")
51
+ sys.exit(0)
46
52
 
47
53
 
48
54
  @cli.command()
@@ -69,54 +75,26 @@ def schedule(
69
75
  module.
70
76
  """
71
77
  excluded: list[str] = str2list(excluded) if excluded else []
78
+ echo(f"... with Excluded Parameters: {excluded!r}")
72
79
  externals: str = externals or "{}"
80
+
81
+ # NOTE: Convert timezone on the stop date.
73
82
  if stop:
74
83
  stop: datetime = stop.astimezone(tz=config.tz)
75
84
 
76
- from .scheduler import workflow_runner
77
-
78
- # NOTE: Start running workflow scheduler application.
79
- workflow_rs: list[str] = workflow_runner(
80
- stop=stop, excluded=excluded, externals=json.loads(externals)
81
- )
82
- logger.info(f"Application run success: {workflow_rs}")
85
+ from .scheduler import schedule_runner
83
86
 
87
+ try:
88
+ # NOTE: Start running workflow scheduler application.
89
+ workflow_rs: list[str] = schedule_runner(
90
+ stop=stop, excluded=excluded, externals=json.loads(externals)
91
+ )
92
+ echo(f"Schedule with CLI run success with: {workflow_rs}")
93
+ except Exception as err:
94
+ echo(str(err))
95
+ sys.exit(1)
84
96
 
85
- @cli_log.command("workflow-get")
86
- def workflow_log_get(
87
- name: Annotated[
88
- str,
89
- Argument(help="A workflow name that want to getting log"),
90
- ],
91
- limit: Annotated[
92
- int,
93
- Argument(help="A number of the limitation of logging"),
94
- ] = 100,
95
- desc: Annotated[
96
- bool,
97
- Option(
98
- "--desc",
99
- help="A descending flag that order by logging release datetime.",
100
- ),
101
- ] = True,
102
- ):
103
- logger.info(f"{name} : limit {limit} : desc: {desc}")
104
- return [""]
105
-
106
-
107
- class LogMode(str, Enum):
108
- get = "get"
109
- delete = "delete"
110
-
111
-
112
- @cli_log.command("workflow-delete")
113
- def workflow_log_delete(
114
- mode: Annotated[
115
- LogMode,
116
- Argument(case_sensitive=True),
117
- ]
118
- ):
119
- logger.info(mode)
97
+ sys.exit(0)
120
98
 
121
99
 
122
100
  @cli.callback()