ddeutil-workflow 0.0.24__py3-none-any.whl → 0.0.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.24"
1
+ __version__: str = "0.0.26"
@@ -0,0 +1 @@
1
+ from .api import app
@@ -11,7 +11,6 @@ import uuid
11
11
  from collections.abc import AsyncIterator
12
12
  from datetime import datetime, timedelta
13
13
  from queue import Empty, Queue
14
- from threading import Thread
15
14
  from typing import TypedDict
16
15
 
17
16
  from dotenv import load_dotenv
@@ -20,34 +19,39 @@ from fastapi.middleware.gzip import GZipMiddleware
20
19
  from fastapi.responses import UJSONResponse
21
20
  from pydantic import BaseModel
22
21
 
23
- from .__about__ import __version__
24
- from .conf import config, get_logger
22
+ from ..__about__ import __version__
23
+ from ..conf import config, get_logger
24
+ from ..scheduler import ReleaseThread, ReleaseThreads
25
+ from ..workflow import WorkflowQueue, WorkflowTask
25
26
  from .repeat import repeat_at, repeat_every
26
- from .workflow import WorkflowTask
27
27
 
28
28
  load_dotenv()
29
29
  logger = get_logger("ddeutil.workflow")
30
30
 
31
31
 
32
32
  class State(TypedDict):
33
+ """TypeDict for State of FastAPI application."""
34
+
35
+ # NOTE: For upper queue route.
33
36
  upper_queue: Queue
34
37
  upper_result: dict[str, str]
38
+
39
+ # NOTE: For schedule listener.
35
40
  scheduler: list[str]
36
- workflow_threads: dict[str, Thread]
41
+ workflow_threads: ReleaseThreads
37
42
  workflow_tasks: list[WorkflowTask]
38
- workflow_queue: dict[str, list[datetime]]
39
- workflow_running: dict[str, list[datetime]]
43
+ workflow_queue: dict[str, WorkflowQueue]
40
44
 
41
45
 
42
46
  @contextlib.asynccontextmanager
43
47
  async def lifespan(a: FastAPI) -> AsyncIterator[State]:
48
+ """Lifespan function for the FastAPI application."""
44
49
  a.state.upper_queue = Queue()
45
50
  a.state.upper_result = {}
46
51
  a.state.scheduler = []
47
52
  a.state.workflow_threads = {}
48
53
  a.state.workflow_tasks = []
49
54
  a.state.workflow_queue = {}
50
- a.state.workflow_running = {}
51
55
 
52
56
  await asyncio.create_task(broker_upper_messages())
53
57
 
@@ -65,7 +69,6 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
65
69
  #
66
70
  "scheduler": a.state.scheduler,
67
71
  "workflow_queue": a.state.workflow_queue,
68
- "workflow_running": a.state.workflow_running,
69
72
  "workflow_threads": a.state.workflow_threads,
70
73
  "workflow_tasks": a.state.workflow_tasks,
71
74
  }
@@ -114,12 +117,11 @@ async def get_result(request_id: str) -> dict[str, str]:
114
117
 
115
118
 
116
119
  @app.get("/")
117
- @app.get("/api")
118
120
  async def health():
119
121
  return {"message": "Workflow API already start up"}
120
122
 
121
123
 
122
- @app.post("/api")
124
+ @app.post(f"{config.prefix_path}/upper")
123
125
  async def message_upper(payload: Payload):
124
126
  """Convert message from any case to the upper case."""
125
127
  request_id: str = str(uuid.uuid4())
@@ -129,27 +131,47 @@ async def message_upper(payload: Payload):
129
131
  return await get_result(request_id)
130
132
 
131
133
 
134
+ # NOTE: Enable the workflow route.
132
135
  if config.enable_route_workflow:
133
- from .route import workflow
136
+ from .route import workflow_route
137
+
138
+ app.include_router(workflow_route, prefix=config.prefix_path)
134
139
 
135
- app.include_router(workflow)
136
140
 
141
+ # NOTE: Enable the schedule route.
137
142
  if config.enable_route_schedule:
138
- from .route import schedule
139
- from .scheduler import workflow_task
143
+ from ..conf import FileLog
144
+ from ..scheduler import schedule_task
145
+ from .route import schedule_route
140
146
 
141
- app.include_router(schedule)
147
+ app.include_router(schedule_route, prefix=config.prefix_path)
142
148
 
143
- @schedule.on_event("startup")
149
+ @schedule_route.on_event("startup")
144
150
  @repeat_at(cron="* * * * *", delay=2)
145
- def schedule_broker_up():
151
+ def scheduler_listener():
152
+ """Schedule broker every minute at 02 second."""
146
153
  logger.debug(
147
154
  f"[SCHEDULER]: Start listening schedule from queue "
148
155
  f"{app.state.scheduler}"
149
156
  )
150
157
  if app.state.workflow_tasks:
151
- workflow_task(
158
+ schedule_task(
152
159
  app.state.workflow_tasks,
153
- stop=datetime.now() + timedelta(minutes=1),
160
+ stop=datetime.now(config.tz) + timedelta(minutes=1),
161
+ queue=app.state.workflow_queue,
154
162
  threads=app.state.workflow_threads,
163
+ log=FileLog,
155
164
  )
165
+
166
+ @schedule_route.on_event("startup")
167
+ @repeat_at(cron="*/5 * * * *")
168
+ def monitoring():
169
+ logger.debug("[MONITOR]: Start monitoring threading.")
170
+ snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
171
+ for t_name in snapshot_threads:
172
+
173
+ thread_release: ReleaseThread = app.state.workflow_threads[t_name]
174
+
175
+ # NOTE: remove the thread that running success.
176
+ if not thread_release["thread"].is_alive():
177
+ app.state.workflow_threads.pop(t_name)
@@ -12,8 +12,8 @@ from functools import wraps
12
12
 
13
13
  from starlette.concurrency import run_in_threadpool
14
14
 
15
- from .__cron import CronJob
16
- from .conf import config, get_logger
15
+ from ..__cron import CronJob
16
+ from ..conf import config, get_logger
17
17
 
18
18
  logger = get_logger("ddeutil.workflow")
19
19
 
@@ -6,6 +6,7 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import copy
9
+ from dataclasses import asdict
9
10
  from datetime import datetime, timedelta
10
11
  from typing import Any
11
12
 
@@ -14,41 +15,43 @@ from fastapi import status as st
14
15
  from fastapi.responses import UJSONResponse
15
16
  from pydantic import BaseModel
16
17
 
17
- from . import Workflow
18
- from .__types import DictData
19
- from .conf import Loader, config, get_logger
20
- from .result import Result
21
- from .scheduler import Schedule
18
+ from ..__types import DictData
19
+ from ..conf import Loader, config, get_logger
20
+ from ..result import Result
21
+ from ..scheduler import Schedule
22
+ from ..workflow import Workflow
22
23
 
23
24
  logger = get_logger("ddeutil.workflow")
24
- workflow = APIRouter(
25
- prefix="/api/workflow",
26
- tags=["workflow"],
25
+
26
+ workflow_route = APIRouter(
27
+ prefix="/workflows",
28
+ tags=["workflows"],
27
29
  default_response_class=UJSONResponse,
28
30
  )
29
- schedule = APIRouter(
30
- prefix="/api/schedule",
31
- tags=["schedule"],
31
+
32
+ schedule_route = APIRouter(
33
+ prefix="/schedules",
34
+ tags=["schedules"],
32
35
  default_response_class=UJSONResponse,
33
36
  )
34
37
 
35
- ListDate = list[datetime]
36
38
 
37
-
38
- @workflow.get("/")
39
- async def get_workflows():
39
+ @workflow_route.get(path="/")
40
+ async def get_workflows() -> DictData:
40
41
  """Return all workflow workflows that exists in config path."""
41
- workflows: DictData = Loader.finds(Workflow)
42
+ workflows: DictData = dict(Loader.finds(Workflow))
42
43
  return {
43
- "message": f"getting all workflows: {workflows}",
44
+ "message": f"Getting all workflows: {len(workflows)}",
45
+ "count": len(workflows),
46
+ "workflows": workflows,
44
47
  }
45
48
 
46
49
 
47
- @workflow.get("/{name}")
48
- async def get_workflow(name: str) -> DictData:
50
+ @workflow_route.get(path="/{name}")
51
+ async def get_workflow_by_name(name: str) -> DictData:
49
52
  """Return model of workflow that passing an input workflow name."""
50
53
  try:
51
- wf: Workflow = Workflow.from_loader(name=name, externals={})
54
+ workflow: Workflow = Workflow.from_loader(name=name, externals={})
52
55
  except ValueError as err:
53
56
  logger.exception(err)
54
57
  raise HTTPException(
@@ -57,7 +60,7 @@ async def get_workflow(name: str) -> DictData:
57
60
  f"Workflow workflow name: {name!r} does not found in /conf path"
58
61
  ),
59
62
  ) from None
60
- return wf.model_dump(
63
+ return workflow.model_dump(
61
64
  by_alias=True,
62
65
  exclude_none=True,
63
66
  exclude_unset=True,
@@ -69,11 +72,11 @@ class ExecutePayload(BaseModel):
69
72
  params: dict[str, Any]
70
73
 
71
74
 
72
- @workflow.post("/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
75
+ @workflow_route.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
73
76
  async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
74
77
  """Return model of workflow that passing an input workflow name."""
75
78
  try:
76
- wf: Workflow = Workflow.from_loader(name=name, externals={})
79
+ workflow: Workflow = Workflow.from_loader(name=name, externals={})
77
80
  except ValueError:
78
81
  raise HTTPException(
79
82
  status_code=st.HTTP_404_NOT_FOUND,
@@ -83,36 +86,45 @@ async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
83
86
  ) from None
84
87
 
85
88
  # NOTE: Start execute manually
86
- rs: Result = wf.execute(params=payload.params)
89
+ try:
90
+ result: Result = workflow.execute(params=payload.params)
91
+ except Exception as err:
92
+ raise HTTPException(
93
+ status_code=st.HTTP_500_INTERNAL_SERVER_ERROR,
94
+ detail=f"{type(err)}: {err}",
95
+ ) from None
87
96
 
88
- return dict(rs)
97
+ return asdict(result)
89
98
 
90
99
 
91
- @workflow.get("/{name}/logs")
100
+ @workflow_route.get(path="/{name}/logs")
92
101
  async def get_workflow_logs(name: str):
93
- return {"message": f"getting workflow {name!r} logs"}
102
+ return {"message": f"Getting workflow {name!r} logs"}
94
103
 
95
104
 
96
- @workflow.get("/{name}/logs/{release}")
105
+ @workflow_route.get(path="/{name}/logs/{release}")
97
106
  async def get_workflow_release_log(name: str, release: str):
98
- return {"message": f"getting workflow {name!r} log in release {release}"}
107
+ return {"message": f"Getting workflow {name!r} log in release {release}"}
99
108
 
100
109
 
101
- @workflow.delete("/{name}/logs/{release}", status_code=st.HTTP_204_NO_CONTENT)
110
+ @workflow_route.delete(
111
+ path="/{name}/logs/{release}",
112
+ status_code=st.HTTP_204_NO_CONTENT,
113
+ )
102
114
  async def del_workflow_release_log(name: str, release: str):
103
- return {"message": f"deleted workflow {name!r} log in release {release}"}
115
+ return {"message": f"Deleted workflow {name!r} log in release {release}"}
104
116
 
105
117
 
106
- @schedule.get("/{name}")
107
- async def get_schedule(name: str):
118
+ @schedule_route.get(path="/{name}")
119
+ async def get_schedules(name: str):
108
120
  try:
109
- sch: Schedule = Schedule.from_loader(name=name, externals={})
121
+ schedule: Schedule = Schedule.from_loader(name=name, externals={})
110
122
  except ValueError:
111
123
  raise HTTPException(
112
124
  status_code=st.HTTP_404_NOT_FOUND,
113
125
  detail=f"Schedule name: {name!r} does not found in /conf path",
114
126
  ) from None
115
- return sch.model_dump(
127
+ return schedule.model_dump(
116
128
  by_alias=True,
117
129
  exclude_none=True,
118
130
  exclude_unset=True,
@@ -120,32 +132,32 @@ async def get_schedule(name: str):
120
132
  )
121
133
 
122
134
 
123
- @schedule.get("/deploy")
135
+ @schedule_route.get(path="/deploy")
124
136
  async def get_deploy_schedulers(request: Request):
125
137
  snapshot = copy.deepcopy(request.state.scheduler)
126
138
  return {"schedule": snapshot}
127
139
 
128
140
 
129
- @schedule.get("/deploy/{name}")
141
+ @schedule_route.get(path="/deploy/{name}")
130
142
  async def get_deploy_scheduler(request: Request, name: str):
131
143
  if name in request.state.scheduler:
132
144
  sch = Schedule.from_loader(name)
133
145
  getter: list[dict[str, dict[str, list[datetime]]]] = []
134
- for wf in sch.workflows:
146
+ for workflow in sch.workflows:
135
147
  getter.append(
136
148
  {
137
- wf.name: {
149
+ workflow.name: {
138
150
  "queue": copy.deepcopy(
139
- request.state.workflow_queue[wf.name]
151
+ request.state.workflow_queue[workflow.name]
140
152
  ),
141
153
  "running": copy.deepcopy(
142
- request.state.workflow_running[wf.name]
154
+ request.state.workflow_running[workflow.name]
143
155
  ),
144
156
  }
145
157
  }
146
158
  )
147
159
  return {
148
- "message": f"getting {name!r} to schedule listener.",
160
+ "message": f"Getting {name!r} to schedule listener.",
149
161
  "scheduler": getter,
150
162
  }
151
163
  raise HTTPException(
@@ -154,13 +166,13 @@ async def get_deploy_scheduler(request: Request, name: str):
154
166
  )
155
167
 
156
168
 
157
- @schedule.post("/deploy/{name}")
169
+ @schedule_route.post(path="/deploy/{name}")
158
170
  async def add_deploy_scheduler(request: Request, name: str):
159
171
  """Adding schedule name to application state store."""
160
172
  if name in request.state.scheduler:
161
173
  raise HTTPException(
162
174
  status_code=st.HTTP_302_FOUND,
163
- detail="This schedule already exists in scheduler list.",
175
+ detail=f"This schedule {name!r} already exists in scheduler list.",
164
176
  )
165
177
 
166
178
  request.state.scheduler.append(name)
@@ -172,42 +184,49 @@ async def add_deploy_scheduler(request: Request, name: str):
172
184
 
173
185
  # NOTE: Create pair of workflow and on from schedule model.
174
186
  try:
175
- sch = Schedule.from_loader(name)
176
- except ValueError as e:
187
+ schedule: Schedule = Schedule.from_loader(name)
188
+ except ValueError as err:
177
189
  request.state.scheduler.remove(name)
178
- logger.exception(e)
190
+ logger.exception(err)
179
191
  raise HTTPException(
180
192
  status_code=st.HTTP_404_NOT_FOUND,
181
- detail=str(e),
193
+ detail=str(err),
182
194
  ) from None
195
+
183
196
  request.state.workflow_tasks.extend(
184
- sch.tasks(
197
+ schedule.tasks(
185
198
  start_date_waiting,
186
199
  queue=request.state.workflow_queue,
187
- running=request.state.workflow_running,
200
+ externals={},
188
201
  ),
189
202
  )
190
- return {"message": f"adding {name!r} to schedule listener."}
203
+ return {
204
+ "message": f"Adding {name!r} to schedule listener.",
205
+ "start_date": start_date_waiting,
206
+ }
191
207
 
192
208
 
193
- @schedule.delete("/deploy/{name}")
209
+ @schedule_route.delete(path="/deploy/{name}")
194
210
  async def del_deploy_scheduler(request: Request, name: str):
211
+ """Delete workflow task on the schedule listener."""
195
212
  if name in request.state.scheduler:
213
+
214
+ # NOTE: Remove current schedule name from the state.
196
215
  request.state.scheduler.remove(name)
197
- sche = Schedule.from_loader(name)
198
- for workflow_task in sche.tasks(datetime.now(), {}, {}):
199
- request.state.workflow_tasks.remove(workflow_task)
200
216
 
201
- for wf in sche.workflows:
202
- if wf in request.state.workflow_queue:
203
- request.state.workflow_queue.pop(wf, {})
217
+ schedule: Schedule = Schedule.from_loader(name)
204
218
 
205
- if wf in request.state.workflow_running:
206
- request.state.workflow_running.pop(wf, {})
219
+ for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
220
+ if task in request.state.workflow_tasks:
221
+ request.state.workflow_tasks.remove(task)
207
222
 
208
- return {"message": f"deleted {name!r} to schedule listener."}
223
+ for workflow in schedule.workflows:
224
+ if workflow.alias in request.state.workflow_queue:
225
+ request.state.workflow_queue.pop(workflow.alias)
226
+
227
+ return {"message": f"Deleted schedule {name!r} in listener."}
209
228
 
210
229
  raise HTTPException(
211
230
  status_code=st.HTTP_404_NOT_FOUND,
212
- detail=f"Does not found {name!r} in schedule listener",
231
+ detail=f"Does not found schedule {name!r} in listener",
213
232
  )
ddeutil/workflow/conf.py CHANGED
@@ -16,7 +16,7 @@ from pathlib import Path
16
16
  from typing import ClassVar, Optional, TypeVar, Union
17
17
  from zoneinfo import ZoneInfo
18
18
 
19
- from ddeutil.core import import_string, str2bool
19
+ from ddeutil.core import str2bool
20
20
  from ddeutil.io import PathSearch, YamlFlResolve
21
21
  from dotenv import load_dotenv
22
22
  from pydantic import BaseModel, Field
@@ -37,7 +37,6 @@ __all__: TupleStr = (
37
37
  "Config",
38
38
  "SimLoad",
39
39
  "Loader",
40
- "get_type",
41
40
  "config",
42
41
  "logger",
43
42
  "FileLog",
@@ -83,7 +82,7 @@ class Config:
83
82
 
84
83
  # NOTE: Register
85
84
  regis_hook_str: str = os.getenv(
86
- "WORKFLOW_CORE_REGISTRY", "ddeutil.workflow"
85
+ "WORKFLOW_CORE_REGISTRY", "src,src.ddeutil.workflow,tests,tests.utils"
87
86
  )
88
87
  regis_filter_str: str = os.getenv(
89
88
  "WORKFLOW_CORE_REGISTRY_FILTER", "ddeutil.workflow.utils"
@@ -136,11 +135,12 @@ class Config:
136
135
  )
137
136
 
138
137
  # NOTE: API
138
+ prefix_path: str = env("WORKFLOW_API_PREFIX_PATH", "/api/v1")
139
139
  enable_route_workflow: bool = str2bool(
140
- os.getenv("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")
140
+ env("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")
141
141
  )
142
142
  enable_route_schedule: bool = str2bool(
143
- os.getenv("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")
143
+ env("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")
144
144
  )
145
145
 
146
146
  def __init__(self) -> None:
@@ -231,8 +231,8 @@ class SimLoad:
231
231
  obj: object,
232
232
  conf: Config,
233
233
  *,
234
- include: list[str] | None = None,
235
- exclude: list[str] | None = None,
234
+ included: list[str] | None = None,
235
+ excluded: list[str] | None = None,
236
236
  ) -> Iterator[tuple[str, DictData]]:
237
237
  """Find all data that match with object type in config path. This class
238
238
  method can use include and exclude list of identity name for filter and
@@ -240,22 +240,23 @@ class SimLoad:
240
240
 
241
241
  :param obj: A object that want to validate matching before return.
242
242
  :param conf: A config object.
243
- :param include:
244
- :param exclude:
243
+ :param included:
244
+ :param excluded:
245
245
 
246
246
  :rtype: Iterator[tuple[str, DictData]]
247
247
  """
248
- exclude: list[str] = exclude or []
248
+ exclude: list[str] = excluded or []
249
249
  for file in PathSearch(conf.conf_path).files:
250
+
250
251
  for key, data in cls.filter_suffix(file).items():
251
252
 
252
253
  if key in exclude:
253
254
  continue
254
255
 
255
- if issubclass(get_type(data["type"], conf), obj):
256
+ if data["type"] == obj.__name__:
256
257
  yield key, (
257
- {k: data[k] for k in data if k in include}
258
- if include
258
+ {k: data[k] for k in data if k in included}
259
+ if included
259
260
  else data
260
261
  )
261
262
 
@@ -267,14 +268,14 @@ class SimLoad:
267
268
  return {}
268
269
 
269
270
  @cached_property
270
- def type(self) -> AnyModelType:
271
+ def type(self) -> str:
271
272
  """Return object of string type which implement on any registry. The
272
273
  object type.
273
274
 
274
275
  :rtype: AnyModelType
275
276
  """
276
277
  if _typ := self.data.get("type"):
277
- return get_type(_typ, self.conf)
278
+ return _typ
278
279
  raise ValueError(
279
280
  f"the 'type' value: {_typ} does not exists in config data."
280
281
  )
@@ -292,47 +293,26 @@ class Loader(SimLoad):
292
293
  cls,
293
294
  obj: object,
294
295
  *,
295
- include: list[str] | None = None,
296
- exclude: list[str] | None = None,
296
+ included: list[str] | None = None,
297
+ excluded: list[str] | None = None,
297
298
  **kwargs,
298
299
  ) -> Iterator[tuple[str, DictData]]:
299
300
  """Override the find class method from the Simple Loader object.
300
301
 
301
302
  :param obj: A object that want to validate matching before return.
302
- :param include:
303
- :param exclude:
303
+ :param included:
304
+ :param excluded:
304
305
 
305
306
  :rtype: Iterator[tuple[str, DictData]]
306
307
  """
307
308
  return super().finds(
308
- obj=obj, conf=Config(), include=include, exclude=exclude
309
+ obj=obj, conf=Config(), included=included, excluded=excluded
309
310
  )
310
311
 
311
312
  def __init__(self, name: str, externals: DictData) -> None:
312
313
  super().__init__(name, conf=Config(), externals=externals)
313
314
 
314
315
 
315
- def get_type(t: str, params: Config) -> AnyModelType:
316
- """Return import type from string importable value in the type key.
317
-
318
- :param t: A importable type string.
319
- :param params: A config parameters that use registry to search this
320
- type.
321
-
322
- :rtype: AnyModelType
323
- """
324
- try:
325
- # NOTE: Auto adding module prefix if it does not set
326
- return import_string(f"ddeutil.workflow.{t}")
327
- except ModuleNotFoundError:
328
- for registry in params.regis_hook:
329
- try:
330
- return import_string(f"{registry}.{t}")
331
- except ModuleNotFoundError:
332
- continue
333
- return import_string(f"{t}")
334
-
335
-
336
316
  config = Config()
337
317
  logger = get_logger("ddeutil.workflow")
338
318
 
@@ -488,9 +468,6 @@ class FileLog(BaseLog):
488
468
  if not config.enable_write_log:
489
469
  return self
490
470
 
491
- logger.debug(
492
- f"({self.run_id}) [LOG]: Start writing log: {self.name!r}."
493
- )
494
471
  log_file: Path = self.pointer() / f"{self.run_id}.log"
495
472
  log_file.write_text(
496
473
  json.dumps(
ddeutil/workflow/cron.py CHANGED
@@ -121,7 +121,7 @@ class On(BaseModel):
121
121
  loader: Loader = Loader(name, externals=externals)
122
122
 
123
123
  # NOTE: Validate the config type match with current connection model
124
- if loader.type != cls:
124
+ if loader.type != cls.__name__:
125
125
  raise ValueError(f"Type {loader.type} does not match with {cls}")
126
126
 
127
127
  loader_data: DictData = loader.data
@@ -29,3 +29,6 @@ class WorkflowFailException(WorkflowException): ...
29
29
 
30
30
 
31
31
  class ParamValueException(WorkflowException): ...
32
+
33
+
34
+ class CliException(BaseWorkflowException): ...
@@ -30,10 +30,10 @@ from concurrent.futures import (
30
30
  )
31
31
  from datetime import datetime, timedelta
32
32
  from functools import wraps
33
- from heapq import heappop
33
+ from heapq import heappop, heappush
34
34
  from textwrap import dedent
35
35
  from threading import Thread
36
- from typing import Callable, Optional
36
+ from typing import Callable, Optional, TypedDict
37
37
 
38
38
  from pydantic import BaseModel, Field
39
39
  from pydantic.functional_validators import field_validator, model_validator
@@ -74,6 +74,8 @@ __all__: TupleStr = (
74
74
  "monitor",
75
75
  "schedule_control",
76
76
  "schedule_runner",
77
+ "ReleaseThreads",
78
+ "ReleaseThread",
77
79
  )
78
80
 
79
81
 
@@ -265,7 +267,7 @@ class Schedule(BaseModel):
265
267
  loader: Loader = Loader(name, externals=(externals or {}))
266
268
 
267
269
  # NOTE: Validate the config type match with current connection model
268
- if loader.type != cls:
270
+ if loader.type != cls.__name__:
269
271
  raise ValueError(f"Type {loader.type} does not match with {cls}")
270
272
 
271
273
  loader_data: DictData = copy.deepcopy(loader.data)
@@ -341,12 +343,20 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
341
343
  return decorator
342
344
 
343
345
 
344
- @catch_exceptions(cancel_on_failure=True) # pragma: no cov
346
+ class ReleaseThread(TypedDict):
347
+ thread: Thread
348
+ start_date: datetime
349
+
350
+
351
+ ReleaseThreads = dict[str, ReleaseThread]
352
+
353
+
354
+ @catch_exceptions(cancel_on_failure=True)
345
355
  def schedule_task(
346
356
  tasks: list[WorkflowTask],
347
357
  stop: datetime,
348
358
  queue: dict[str, WorkflowQueue],
349
- threads: dict[str, Thread],
359
+ threads: ReleaseThreads,
350
360
  log: type[Log],
351
361
  ) -> CancelJob | None:
352
362
  """Workflow task generator that create release pair of workflow and on to
@@ -397,53 +407,53 @@ def schedule_task(
397
407
  continue
398
408
 
399
409
  # VALIDATE: Check this task is the first release in the queue or not.
400
- current_date: datetime = current_date.replace(second=0, microsecond=0)
401
- if (first_date := q.first_queue.date) != current_date:
410
+ current_release: datetime = current_date.replace(
411
+ second=0, microsecond=0
412
+ )
413
+ if (first_date := q.first_queue.date) != current_release:
402
414
  logger.debug(
403
415
  f"[WORKFLOW]: Skip schedule "
404
- f"{first_date:%Y-%m-%d %H:%M:%S} "
405
- f"for : {task.alias!r} : {task.runner.cron}"
416
+ f"{first_date:%Y-%m-%d %H:%M:%S} for : {task.alias!r}"
406
417
  )
407
418
  continue
408
419
 
409
420
  # NOTE: Pop the latest release and push it to running.
410
421
  release: WorkflowRelease = heappop(q.queue)
411
- q.push_running(release)
422
+ heappush(q.running, release)
412
423
 
413
424
  logger.info(
414
- f"[WORKFLOW]: Start thread: '{task.alias}|{str(task.runner.cron)}|"
425
+ f"[WORKFLOW]: Start thread: '{task.alias}|"
415
426
  f"{release.date:%Y%m%d%H%M}'"
416
427
  )
417
428
 
418
429
  # NOTE: Create thread name that able to tracking with observe schedule
419
430
  # job.
420
- thread_name: str = (
421
- f"{task.alias}|{str(task.runner.cron)}|"
422
- f"{release.date:%Y%m%d%H%M}"
423
- )
424
-
425
- wf_thread: Thread = Thread(
431
+ thread_name: str = f"{task.alias}|{release.date:%Y%m%d%H%M}"
432
+ thread: Thread = Thread(
426
433
  target=catch_exceptions(cancel_on_failure=True)(task.release),
427
434
  kwargs={"release": release, "queue": q, "log": log},
428
435
  name=thread_name,
429
436
  daemon=True,
430
437
  )
431
438
 
432
- threads[thread_name] = wf_thread
439
+ threads[thread_name] = {
440
+ "thread": thread,
441
+ "start_date": datetime.now(tz=config.tz),
442
+ }
433
443
 
434
- wf_thread.start()
444
+ thread.start()
435
445
 
436
446
  delay()
437
447
 
438
448
  logger.debug(f"[SCHEDULE]: End schedule release {'=' * 80}")
439
449
 
440
450
 
441
- def monitor(threads: dict[str, Thread]) -> None: # pragma: no cov
451
+ def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
442
452
  """Monitoring function that running every five minute for track long running
443
453
  thread instance from the schedule_control function that run every minute.
444
454
 
445
455
  :param threads: A mapping of Thread object and its name.
446
- :type threads: dict[str, Thread]
456
+ :type threads: ReleaseThreads
447
457
  """
448
458
  logger.debug(
449
459
  "[MONITOR]: Start checking long running workflow release task."
@@ -452,8 +462,10 @@ def monitor(threads: dict[str, Thread]) -> None: # pragma: no cov
452
462
  snapshot_threads: list[str] = list(threads.keys())
453
463
  for t_name in snapshot_threads:
454
464
 
465
+ thread_release: ReleaseThread = threads[t_name]
466
+
455
467
  # NOTE: remove the thread that running success.
456
- if not threads[t_name].is_alive():
468
+ if not thread_release["thread"].is_alive():
457
469
  threads.pop(t_name)
458
470
 
459
471
 
@@ -488,7 +500,7 @@ def schedule_control(
488
500
 
489
501
  # IMPORTANT: Create main mapping of queue and thread object.
490
502
  queue: dict[str, WorkflowQueue] = {}
491
- threads: dict[str, Thread] = {}
503
+ threads: ReleaseThreads = {}
492
504
 
493
505
  start_date_waiting: datetime = start_date.replace(
494
506
  second=0, microsecond=0
ddeutil/workflow/utils.py CHANGED
@@ -74,6 +74,12 @@ def get_diff_sec(
74
74
  )
75
75
 
76
76
 
77
+ def wait_a_minute(now: datetime, second: float = 2) -> None: # pragma: no cov
78
+ """Wait with sleep to the next minute with an offset second value."""
79
+ future = now.replace(second=0, microsecond=0) + timedelta(minutes=1)
80
+ time.sleep((future - now).total_seconds() + second)
81
+
82
+
77
83
  def delay(second: float = 0) -> None: # pragma: no cov
78
84
  """Delay time that use time.sleep with random second value between
79
85
  0.00 - 0.99 seconds.
@@ -326,7 +332,7 @@ def get_args_from_filter(
326
332
 
327
333
  if func_name not in filters:
328
334
  raise UtilException(
329
- f"The post-filter: {func_name} does not support yet."
335
+ f"The post-filter: {func_name!r} does not support yet."
330
336
  )
331
337
 
332
338
  if isinstance((f_func := filters[func_name]), list) and (args or kwargs):
@@ -50,11 +50,11 @@ from .params import Param
50
50
  from .result import Result
51
51
  from .utils import (
52
52
  cut_id,
53
- delay,
54
53
  gen_id,
55
54
  get_dt_now,
56
55
  has_template,
57
56
  param2template,
57
+ wait_a_minute,
58
58
  )
59
59
 
60
60
  logger = get_logger("ddeutil.workflow")
@@ -201,22 +201,12 @@ class WorkflowQueue:
201
201
  or (value in self.complete)
202
202
  )
203
203
 
204
- def push_queue(self, value: WorkflowRelease) -> Self:
205
- """Push data to the waiting queue."""
206
- heappush(self.queue, value)
207
- return self
208
-
209
- def push_running(self, value: WorkflowRelease) -> Self:
210
- """Push WorkflowRelease to the running queue."""
211
- heappush(self.running, value)
212
- return self
213
-
214
204
  def remove_running(self, value: WorkflowRelease) -> Self:
215
205
  """Remove WorkflowRelease in the running queue if it exists."""
216
206
  if value in self.running:
217
207
  self.running.remove(value)
218
208
 
219
- def push_complete(self, value: WorkflowRelease) -> Self:
209
+ def mark_complete(self, value: WorkflowRelease) -> Self:
220
210
  """Push WorkflowRelease to the complete queue."""
221
211
  heappush(self.complete, value)
222
212
 
@@ -284,7 +274,7 @@ class Workflow(BaseModel):
284
274
  loader: Loader = Loader(name, externals=(externals or {}))
285
275
 
286
276
  # NOTE: Validate the config type match with current connection model
287
- if loader.type != cls:
277
+ if loader.type != cls.__name__:
288
278
  raise ValueError(f"Type {loader.type} does not match with {cls}")
289
279
 
290
280
  loader_data: DictData = copy.deepcopy(loader.data)
@@ -492,6 +482,14 @@ class Workflow(BaseModel):
492
482
  This method allow workflow use log object to save the execution
493
483
  result to log destination like file log to the local `/logs` directory.
494
484
 
485
+ :Steps:
486
+ - Initialize WorkflowQueue and WorkflowRelease if they do not pass.
487
+ - Create release data for pass to parameter templating function.
488
+ - Execute this workflow with mapping release data to its parameters.
489
+ - Writing log
490
+ - Remove this release on the running queue
491
+ - Push this release to complete queue
492
+
495
493
  :param release: A release datetime or WorkflowRelease object.
496
494
  :param params: A workflow parameter that pass to execute method.
497
495
  :param queue: A list of release time that already queue.
@@ -507,6 +505,7 @@ class Workflow(BaseModel):
507
505
  name: str = override_log_name or self.name
508
506
  run_id: str = run_id or gen_id(name, unique=True)
509
507
  rs_release: Result = Result(run_id=run_id)
508
+ rs_release_type: str = "release"
510
509
 
511
510
  # VALIDATE: Change queue value to WorkflowQueue object.
512
511
  if queue is None or isinstance(queue, list):
@@ -514,6 +513,7 @@ class Workflow(BaseModel):
514
513
 
515
514
  # VALIDATE: Change release value to WorkflowRelease object.
516
515
  if isinstance(release, datetime):
516
+ rs_release_type: str = "datetime"
517
517
  release: WorkflowRelease = WorkflowRelease.from_dt(release)
518
518
 
519
519
  logger.debug(
@@ -555,12 +555,14 @@ class Workflow(BaseModel):
555
555
  )
556
556
 
557
557
  # NOTE: Saving execution result to destination of the input log object.
558
+ logger.debug(f"({cut_id(run_id)}) [LOG]: Writing log: {name!r}.")
558
559
  rs_log.save(excluded=None)
559
560
 
560
561
  # NOTE: Remove this release from running.
561
562
  queue.remove_running(release)
562
- queue.push_complete(release)
563
+ queue.mark_complete(release)
563
564
 
565
+ # NOTE: Remove the params key from the result context for deduplicate.
564
566
  context: dict[str, Any] = rs.context
565
567
  context.pop("params")
566
568
 
@@ -568,7 +570,12 @@ class Workflow(BaseModel):
568
570
  status=0,
569
571
  context={
570
572
  "params": params,
571
- "release": {"status": "success", "logical_date": release.date},
573
+ "release": {
574
+ "status": "success",
575
+ "type": rs_release_type,
576
+ "logical_date": release.date,
577
+ "release": release,
578
+ },
572
579
  "outputs": context,
573
580
  },
574
581
  )
@@ -628,7 +635,7 @@ class Workflow(BaseModel):
628
635
  continue
629
636
 
630
637
  # NOTE: Push the WorkflowRelease object to queue.
631
- queue.push_queue(workflow_release)
638
+ heappush(queue.queue, workflow_release)
632
639
 
633
640
  return queue
634
641
 
@@ -742,7 +749,7 @@ class Workflow(BaseModel):
742
749
  f"release has diff time more than 60 seconds ..."
743
750
  )
744
751
  heappush(wf_queue.queue, release)
745
- delay(60)
752
+ wait_a_minute(get_dt_now(tz=config.tz, offset=offset))
746
753
 
747
754
  # WARNING: I already call queue poking again because issue
748
755
  # about the every minute crontab.
@@ -756,7 +763,7 @@ class Workflow(BaseModel):
756
763
  continue
757
764
 
758
765
  # NOTE: Push the latest WorkflowRelease to the running queue.
759
- wf_queue.push_running(release)
766
+ heappush(wf_queue.running, release)
760
767
 
761
768
  futures.append(
762
769
  executor.submit(
@@ -1145,8 +1152,18 @@ class WorkflowTask:
1145
1152
  :rtype: Result
1146
1153
  """
1147
1154
  log: type[Log] = log or FileLog
1155
+
1156
+ if release is None:
1157
+ if queue.check_queue(self.runner.date):
1158
+ release = self.runner.next
1159
+
1160
+ while queue.check_queue(release):
1161
+ release = self.runner.next
1162
+ else:
1163
+ release = self.runner.date
1164
+
1148
1165
  return self.workflow.release(
1149
- release=release or self.runner.next,
1166
+ release=release,
1150
1167
  params=self.values,
1151
1168
  run_id=run_id,
1152
1169
  log=log,
@@ -1199,7 +1216,7 @@ class WorkflowTask:
1199
1216
  return queue
1200
1217
 
1201
1218
  # NOTE: Push the WorkflowRelease object to queue.
1202
- queue.push_queue(workflow_release)
1219
+ heappush(queue.queue, workflow_release)
1203
1220
 
1204
1221
  return queue
1205
1222
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.24
3
+ Version: 0.0.26
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -26,7 +26,6 @@ Requires-Dist: ddeutil>=0.4.3
26
26
  Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
27
27
  Requires-Dist: pydantic==2.10.4
28
28
  Requires-Dist: python-dotenv==1.0.1
29
- Requires-Dist: typer==0.15.1
30
29
  Requires-Dist: schedule<2.0.0,==1.2.2
31
30
  Provides-Extra: api
32
31
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
@@ -68,8 +67,8 @@ configuration. It called **Metadata Driven Data Workflow**.
68
67
  > with `.yml` files and all of config file from several data orchestration framework
69
68
  > tools from my experience on Data Engineer. :grimacing:
70
69
  >
71
- > Other workflow that I interest on them and pick some interested feature to this
72
- > package:
70
+ > Other workflow tools that I interest on them and pick some interested feature
71
+ > implement to this package:
73
72
  >
74
73
  > - [Google **Workflows**](https://cloud.google.com/workflows)
75
74
  > - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
@@ -80,23 +79,10 @@ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
80
79
  If you want to install this package with application add-ons, you should add
81
80
  `app` in installation;
82
81
 
83
- | Usecase | Install Optional | Support |
84
- |-------------------|------------------------------------------|--------------------|
85
- | Python & CLI | `pip install ddeutil-workflow` | :heavy_check_mark: |
86
- | FastAPI Server | `pip install ddeutil-workflow[api]` | :heavy_check_mark: |
87
-
88
-
89
- > I added this feature to the main milestone.
90
- >
91
- > :egg: **Docker Images** supported:
92
- >
93
- > | Docker Image | Python Version | Support |
94
- > |-----------------------------|----------------|---------|
95
- > | ddeutil-workflow:latest | `3.9` | :x: |
96
- > | ddeutil-workflow:python3.10 | `3.10` | :x: |
97
- > | ddeutil-workflow:python3.11 | `3.11` | :x: |
98
- > | ddeutil-workflow:python3.12 | `3.12` | :x: |
99
- > | ddeutil-workflow:python3.12 | `3.13` | :x: |
82
+ | Usecase | Install Optional | Support |
83
+ |----------------|------------------------------------------|--------------------|
84
+ | Python | `pip install ddeutil-workflow` | :heavy_check_mark: |
85
+ | FastAPI Server | `pip install ddeutil-workflow[api]` | :heavy_check_mark: |
100
86
 
101
87
  ## :beers: Usage
102
88
 
@@ -113,7 +99,7 @@ use-case.
113
99
  run-py-local:
114
100
 
115
101
  # Validate model that use to parsing exists for template file
116
- type: ddeutil.workflow.Workflow
102
+ type: ddeutil.workflow.workflow.Workflow
117
103
  on:
118
104
  # If workflow deploy to schedule, it will running every 5 minutes
119
105
  # with Asia/Bangkok timezone.
@@ -182,35 +168,35 @@ The main configuration that use to dynamic changing with your propose of this
182
168
  application. If any configuration values do not set yet, it will use default value
183
169
  and do not raise any error to you.
184
170
 
185
- | Environment | Component | Default | Description | Remark |
186
- |:----------------------------------------|:----------|:---------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
187
- | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
188
- | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage. | |
189
- | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template. | |
190
- | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
191
- | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
192
- | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
193
- | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
194
- | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
195
- | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
196
- | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
197
- | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
198
- | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
199
- | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
200
- | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
201
- | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
202
- | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
203
- | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
204
- | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
205
- | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
206
- | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
171
+ | Environment | Component | Default | Description | Remark |
172
+ |:----------------------------------------|:----------:|:---------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
173
+ | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
174
+ | `WORKFLOW_CORE_REGISTRY` | Core | src,src.ddeutil.workflow,tests,tests.utils | List of importable string for the hook stage. | |
175
+ | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | src.ddeutil.workflow.utils,ddeutil.workflow.utils | List of importable string for the filter template. | |
176
+ | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
177
+ | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
178
+ | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
179
+ | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
180
+ | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
181
+ | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
182
+ | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
183
+ | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
184
+ | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
185
+ | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
186
+ | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
187
+ | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
188
+ | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
189
+ | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
190
+ | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
191
+ | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
192
+ | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
207
193
 
208
194
  **API Application**:
209
195
 
210
- | Environment | Component | Default | Description | Remark |
211
- |:--------------------------------------|-----------|---------|------------------------------------------------------------------------------------|--------|
212
- | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
213
- | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
196
+ | Environment | Component | Default | Description | Remark |
197
+ |:--------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
198
+ | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
199
+ | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
214
200
 
215
201
  ## :rocket: Deployment
216
202
 
@@ -218,12 +204,6 @@ This package able to run as a application service for receive manual trigger
218
204
  from the master node via RestAPI or use to be Scheduler background service
219
205
  like crontab job but via Python API.
220
206
 
221
- ### Schedule App
222
-
223
- ```shell
224
- (venv) $ ddeutil-workflow schedule
225
- ```
226
-
227
207
  ### API Server
228
208
 
229
209
  ```shell
@@ -231,7 +211,7 @@ like crontab job but via Python API.
231
211
  ```
232
212
 
233
213
  > [!NOTE]
234
- > If this package already deploy, it able to use
214
+ > If this package already deploy, it able to use multiprocess;
235
215
  > `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
236
216
 
237
217
  ### Docker Container
@@ -0,0 +1,23 @@
1
+ ddeutil/workflow/__about__.py,sha256=YOPdjr7wsJbawVBpWp1ZSxpo-8M9kGnIdMF9N0v_4HM,28
2
+ ddeutil/workflow/__cron.py,sha256=uA8XcbY_GwA9rJSHaHUaXaJyGDObJN0ZeYlJSinL8y8,26880
3
+ ddeutil/workflow/__init__.py,sha256=49eGrCuchPVZKMybRouAviNhbulK_F6VwCmLm76hIss,1478
4
+ ddeutil/workflow/__types.py,sha256=Ia7f38kvL3NibwmRKi0wQ1ud_45Z-SojYGhNJwIqcu8,3713
5
+ ddeutil/workflow/conf.py,sha256=YY2zZ_qv9JkTDs_73bkyrF1n1cqBINuxzMxbBjzYw-8,15361
6
+ ddeutil/workflow/cron.py,sha256=75A0hqevvouziKoLALncLJspVAeki9qCH3zniAJaxzY,7513
7
+ ddeutil/workflow/exceptions.py,sha256=P56K7VD3etGm9y-k_GXrzEyqsTCaz9EJazTIshZDf9g,943
8
+ ddeutil/workflow/job.py,sha256=cvSLMdc1sMl1MeU7so7Oe2SdRYxQwt6hm55mLV1iP-Y,24219
9
+ ddeutil/workflow/params.py,sha256=uPGkZx18E-iZ8BteqQ2ONgg0frhF3ZmP5cOyfK2j59U,5280
10
+ ddeutil/workflow/result.py,sha256=WIC8MsnfLiWNpZomT6jS4YCdYhlbIVVBjtGGe2dkoKk,3404
11
+ ddeutil/workflow/scheduler.py,sha256=_V812UlqcwfVF2Sl_45nIatMklioBXcXfGZSFoAAjwo,20452
12
+ ddeutil/workflow/stage.py,sha256=a2sngzs9DkP6GU2pgAD3QvGoijyBQTR_pOhyJUIuWAo,26692
13
+ ddeutil/workflow/utils.py,sha256=pucRnCi9aLJDptXhzzReHZd5d-S0o5oZif5tr6H4iy8,18736
14
+ ddeutil/workflow/workflow.py,sha256=AD0rs1tRT2EpvUyNVAEr2bBPgF6-KOzGmLedR3o4y0Q,42177
15
+ ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
16
+ ddeutil/workflow/api/api.py,sha256=Md1cz3Edc7_uz63s_L_i-R3IE4mkO3aTADrX8GOGU-Y,5644
17
+ ddeutil/workflow/api/repeat.py,sha256=zyvsrXKk-3-_N8ZRZSki0Mueshugum2jtqctEOp9QSc,4927
18
+ ddeutil/workflow/api/route.py,sha256=MQXtkF5uM_ZL1SGDuXFzgkNkbT5cpAXVNRp6mvewupM,7447
19
+ ddeutil_workflow-0.0.26.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
20
+ ddeutil_workflow-0.0.26.dist-info/METADATA,sha256=MD2MuUFEap5vrkRukNQzf2pLGnZI1QZh9CkmIo5ZVyU,14075
21
+ ddeutil_workflow-0.0.26.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
22
+ ddeutil_workflow-0.0.26.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
23
+ ddeutil_workflow-0.0.26.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.6.0)
2
+ Generator: setuptools (75.7.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
ddeutil/workflow/cli.py DELETED
@@ -1,130 +0,0 @@
1
- # ------------------------------------------------------------------------------
2
- # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
- # Licensed under the MIT License. See LICENSE in the project root for
4
- # license information.
5
- # ------------------------------------------------------------------------------
6
- from __future__ import annotations
7
-
8
- import json
9
- from datetime import datetime
10
- from enum import Enum
11
- from typing import Annotated, Optional
12
-
13
- from ddeutil.core import str2list
14
- from typer import Argument, Option, Typer
15
-
16
- from .conf import config, get_logger
17
-
18
- logger = get_logger("ddeutil.workflow")
19
- cli: Typer = Typer()
20
- cli_log: Typer = Typer()
21
- cli.add_typer(
22
- cli_log,
23
- name="log",
24
- help="Logging of workflow CLI",
25
- )
26
-
27
-
28
- @cli.command()
29
- def run(
30
- workflow: Annotated[
31
- str,
32
- Argument(help="A workflow name that want to run manually"),
33
- ],
34
- params: Annotated[
35
- str,
36
- Argument(
37
- help="A json string for parameters of this workflow execution."
38
- ),
39
- ],
40
- ):
41
- """Run workflow workflow manually with an input custom parameters that able
42
- to receive with workflow params config.
43
- """
44
- logger.info(f"Running workflow name: {workflow}")
45
- logger.info(f"... with Parameters: {json.dumps(json.loads(params))}")
46
-
47
-
48
- @cli.command()
49
- def schedule(
50
- stop: Annotated[
51
- Optional[datetime],
52
- Argument(
53
- formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S"],
54
- help="A stopping datetime that want to stop on schedule app.",
55
- ),
56
- ] = None,
57
- excluded: Annotated[
58
- Optional[str],
59
- Argument(help="A list of exclude workflow name in str."),
60
- ] = None,
61
- externals: Annotated[
62
- Optional[str],
63
- Argument(
64
- help="A json string for parameters of this workflow execution."
65
- ),
66
- ] = None,
67
- ):
68
- """Start workflow scheduler that will call workflow function from scheduler
69
- module.
70
- """
71
- excluded: list[str] = str2list(excluded) if excluded else []
72
- externals: str = externals or "{}"
73
- if stop:
74
- stop: datetime = stop.astimezone(tz=config.tz)
75
-
76
- from .scheduler import schedule_runner
77
-
78
- # NOTE: Start running workflow scheduler application.
79
- workflow_rs: list[str] = schedule_runner(
80
- stop=stop, excluded=excluded, externals=json.loads(externals)
81
- )
82
- logger.info(f"Application run success: {workflow_rs}")
83
-
84
-
85
- @cli_log.command("workflow-get")
86
- def workflow_log_get(
87
- name: Annotated[
88
- str,
89
- Argument(help="A workflow name that want to getting log"),
90
- ],
91
- limit: Annotated[
92
- int,
93
- Argument(help="A number of the limitation of logging"),
94
- ] = 100,
95
- desc: Annotated[
96
- bool,
97
- Option(
98
- "--desc",
99
- help="A descending flag that order by logging release datetime.",
100
- ),
101
- ] = True,
102
- ):
103
- logger.info(f"{name} : limit {limit} : desc: {desc}")
104
- return [""]
105
-
106
-
107
- class LogMode(str, Enum):
108
- get = "get"
109
- delete = "delete"
110
-
111
-
112
- @cli_log.command("workflow-delete")
113
- def workflow_log_delete(
114
- mode: Annotated[
115
- LogMode,
116
- Argument(case_sensitive=True),
117
- ]
118
- ):
119
- logger.info(mode)
120
-
121
-
122
- @cli.callback()
123
- def main():
124
- """
125
- Manage workflow with CLI.
126
- """
127
-
128
-
129
- if __name__ == "__main__":
130
- cli()
@@ -1,24 +0,0 @@
1
- ddeutil/workflow/__about__.py,sha256=LbAkk7O3dezpuJ-KPhsDQuHdrO9T0qmhBd-oDJzBhq4,28
2
- ddeutil/workflow/__cron.py,sha256=uA8XcbY_GwA9rJSHaHUaXaJyGDObJN0ZeYlJSinL8y8,26880
3
- ddeutil/workflow/__init__.py,sha256=49eGrCuchPVZKMybRouAviNhbulK_F6VwCmLm76hIss,1478
4
- ddeutil/workflow/__types.py,sha256=Ia7f38kvL3NibwmRKi0wQ1ud_45Z-SojYGhNJwIqcu8,3713
5
- ddeutil/workflow/api.py,sha256=cdRxqwVyGm_Ni_OmflIP35vUkkq8lHpF3xHh_BvVrKs,4692
6
- ddeutil/workflow/cli.py,sha256=8C5Xri1_82B-sxQcKMPRjDJcuYJG3FZ2bJehvs_xZ4s,3278
7
- ddeutil/workflow/conf.py,sha256=Al-00Uru2fCJaW2C_vt4IFuBDpI8Y5C4oAuLJ0Vdvbk,16110
8
- ddeutil/workflow/cron.py,sha256=0SxC3SH-8V1idgAEFOY-gYFEQPjK_zymmc5XqPoX_0I,7504
9
- ddeutil/workflow/exceptions.py,sha256=NqnQJP52S59XIYMeXbTDbr4xH2UZ5EA3ejpU5Z4g6cQ,894
10
- ddeutil/workflow/job.py,sha256=cvSLMdc1sMl1MeU7so7Oe2SdRYxQwt6hm55mLV1iP-Y,24219
11
- ddeutil/workflow/params.py,sha256=uPGkZx18E-iZ8BteqQ2ONgg0frhF3ZmP5cOyfK2j59U,5280
12
- ddeutil/workflow/repeat.py,sha256=s0azh-f5JQeow7kpxM8GKlqgAmKL7oU6St3L4Ggx4cY,4925
13
- ddeutil/workflow/result.py,sha256=WIC8MsnfLiWNpZomT6jS4YCdYhlbIVVBjtGGe2dkoKk,3404
14
- ddeutil/workflow/route.py,sha256=bH5IT90JVjCDe9A0gIefpQQBEfcd-o1uCHE9AvNglvU,6754
15
- ddeutil/workflow/scheduler.py,sha256=UI8wK2xBYmM3Bh_hel0TMzuJWyezM83Yn4xoiYqTSSQ,20238
16
- ddeutil/workflow/stage.py,sha256=a2sngzs9DkP6GU2pgAD3QvGoijyBQTR_pOhyJUIuWAo,26692
17
- ddeutil/workflow/utils.py,sha256=PhNJ54oKnZfq4nVOeP3tDjFN43ArUsMOnpcbSu7bo4I,18450
18
- ddeutil/workflow/workflow.py,sha256=JyT65Tql7CueQn2z4ZGhp6r44jgYDMcCOpxhiwI19uM,41403
19
- ddeutil_workflow-0.0.24.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
20
- ddeutil_workflow-0.0.24.dist-info/METADATA,sha256=0yh6zKsIu1COnhl-25rOxBGEqLQbrJZzA0IhriO3XwA,14234
21
- ddeutil_workflow-0.0.24.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
22
- ddeutil_workflow-0.0.24.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
23
- ddeutil_workflow-0.0.24.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
24
- ddeutil_workflow-0.0.24.dist-info/RECORD,,
@@ -1,2 +0,0 @@
1
- [console_scripts]
2
- ddeutil-workflow = ddeutil.workflow.cli:cli