ddeutil-workflow 0.0.8__py3-none-any.whl → 0.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.8"
1
+ __version__: str = "0.0.10"
@@ -10,22 +10,11 @@ from .exceptions import (
10
10
  StageException,
11
11
  UtilException,
12
12
  )
13
- from .on import AwsOn, On
14
- from .pipeline import Job, Pipeline
15
- from .stage import (
16
- BashStage,
17
- EmptyStage,
18
- HookStage,
19
- PyStage,
20
- Stage,
21
- TriggerStage,
22
- )
13
+ from .on import On, interval2crontab
14
+ from .pipeline import Job, Pipeline, Strategy
15
+ from .stage import Stage, handler_result
23
16
  from .utils import (
24
- ChoiceParam,
25
- DatetimeParam,
26
- IntParam,
27
17
  Param,
28
- StrParam,
29
18
  dash2underscore,
30
19
  param2template,
31
20
  )
ddeutil/workflow/api.py CHANGED
@@ -6,115 +6,84 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import asyncio
9
- import queue
10
- import time
9
+ import os
11
10
  import uuid
12
- from contextlib import asynccontextmanager
13
- from datetime import datetime
14
-
15
- from apscheduler.executors.pool import ProcessPoolExecutor
16
- from apscheduler.jobstores.memory import MemoryJobStore
17
- from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
18
- from apscheduler.schedulers.asyncio import AsyncIOScheduler
19
- from fastapi import BackgroundTasks, FastAPI
11
+ from queue import Empty, Queue
12
+
13
+ from ddeutil.core import str2bool
14
+ from dotenv import load_dotenv
15
+ from fastapi import FastAPI
20
16
  from fastapi.middleware.gzip import GZipMiddleware
21
17
  from fastapi.responses import UJSONResponse
22
18
  from pydantic import BaseModel
23
19
 
20
+ from .__about__ import __version__
24
21
  from .log import get_logger
25
22
  from .repeat import repeat_every
26
- from .route import schedule_route, workflow_route
27
23
 
28
- logger = get_logger(__name__)
24
+ load_dotenv()
25
+ logger = get_logger("ddeutil.workflow")
29
26
 
30
27
 
28
+ app = FastAPI(
29
+ titile="Workflow API",
30
+ description=(
31
+ "This is workflow FastAPI web application that use to manage manual "
32
+ "execute or schedule workflow via RestAPI."
33
+ ),
34
+ version=__version__,
35
+ )
36
+ app.add_middleware(GZipMiddleware, minimum_size=1000)
37
+ app.queue = Queue()
38
+ app.output_dict = {}
39
+ app.queue_limit = 5
40
+
41
+
42
+ @app.on_event("startup")
43
+ @repeat_every(seconds=10)
31
44
  def broker_upper_messages():
45
+ """Broker for receive message from the `/upper` path and change it to upper
46
+ case. This broker use interval running in background every 10 seconds.
47
+ """
32
48
  for _ in range(app.queue_limit):
33
49
  try:
34
50
  obj = app.queue.get_nowait()
35
51
  app.output_dict[obj["request_id"]] = obj["text"].upper()
36
52
  logger.info(f"Upper message: {app.output_dict}")
37
- except queue.Empty:
53
+ except Empty:
38
54
  pass
39
55
 
40
56
 
41
- jobstores = {
42
- "default": MemoryJobStore(),
43
- "sqlite": SQLAlchemyJobStore(url="sqlite:///jobs-store.sqlite"),
44
- }
45
- executors = {
46
- "default": {"type": "threadpool", "max_workers": 5},
47
- "processpool": ProcessPoolExecutor(max_workers=5),
48
- }
49
- scheduler = AsyncIOScheduler(
50
- jobstores=jobstores,
51
- executors=executors,
52
- timezone="Asia/Bangkok",
53
- )
54
-
55
-
56
- @asynccontextmanager
57
- async def lifespan(_: FastAPI):
58
- scheduler.start()
59
- yield
60
- scheduler.shutdown(wait=False)
61
-
62
-
63
- app = FastAPI(lifespan=lifespan)
64
- app.add_middleware(GZipMiddleware, minimum_size=1000)
65
- app.include_router(schedule_route)
66
- app.include_router(workflow_route)
67
-
68
- app.scheduler = scheduler
69
- app.scheduler.add_job(
70
- broker_upper_messages,
71
- "interval",
72
- seconds=10,
73
- )
74
- app.queue = queue.Queue()
75
- app.output_dict = {}
76
- app.queue_limit = 2
77
-
78
-
79
- def write_pipeline(task_id: str, message=""):
80
- logger.info(f"{task_id} : {message}")
81
- time.sleep(5)
82
- logger.info(f"{task_id} : run task successfully!!!")
83
-
84
-
85
- @app.post("/schedule/{name}", response_class=UJSONResponse)
86
- async def send_schedule(name: str, background_tasks: BackgroundTasks):
87
- background_tasks.add_task(
88
- write_pipeline,
89
- name,
90
- message=f"some message for {name}",
91
- )
92
- await fetch_current_time()
93
- return {"message": f"Schedule sent {name!r} in the background"}
94
-
95
-
96
- @repeat_every(seconds=2, max_repetitions=3)
97
- async def fetch_current_time():
98
- logger.info(f"Fetch: {datetime.now()}")
99
-
100
-
101
57
  class Payload(BaseModel):
102
58
  text: str
103
59
 
104
60
 
105
61
  async def get_result(request_id):
106
- while 1:
62
+ """Get data from output dict that global."""
63
+ while True:
107
64
  if request_id in app.output_dict:
108
65
  result = app.output_dict[request_id]
109
66
  del app.output_dict[request_id]
110
67
  return {"message": result}
111
- await asyncio.sleep(0.001)
68
+ await asyncio.sleep(0.0025)
112
69
 
113
70
 
114
71
  @app.post("/upper", response_class=UJSONResponse)
115
72
  async def message_upper(payload: Payload):
73
+ """Convert message from any case to the upper case."""
116
74
  request_id: str = str(uuid.uuid4())
117
75
  app.queue.put(
118
76
  {"text": payload.text, "request_id": request_id},
119
77
  )
120
78
  return await get_result(request_id)
79
+
80
+
81
+ if str2bool(os.getenv("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")):
82
+ from .route import workflow
83
+
84
+ app.include_router(workflow)
85
+
86
+ if str2bool(os.getenv("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")):
87
+ from .route import schedule
88
+
89
+ app.include_router(schedule)
@@ -0,0 +1,134 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import json
9
+ import os
10
+ from datetime import datetime
11
+ from enum import Enum
12
+ from typing import Annotated, Optional
13
+ from zoneinfo import ZoneInfo
14
+
15
+ from ddeutil.core import str2list
16
+ from typer import Argument, Option, Typer
17
+
18
+ from .log import get_logger
19
+
20
+ logger = get_logger("ddeutil.workflow")
21
+ cli: Typer = Typer()
22
+ cli_log: Typer = Typer()
23
+ cli.add_typer(
24
+ cli_log,
25
+ name="log",
26
+ help="Logging of workflow CLI",
27
+ )
28
+
29
+
30
+ @cli.command()
31
+ def run(
32
+ pipeline: Annotated[
33
+ str,
34
+ Argument(help="A pipeline name that want to run manually"),
35
+ ],
36
+ params: Annotated[
37
+ str,
38
+ Argument(
39
+ help="A json string for parameters of this pipeline execution."
40
+ ),
41
+ ],
42
+ ):
43
+ """Run pipeline workflow manually with an input custom parameters that able
44
+ to receive with pipeline params config.
45
+ """
46
+ logger.info(f"Running pipeline name: {pipeline}")
47
+ logger.info(f"... with Parameters: {json.dumps(json.loads(params))}")
48
+
49
+
50
+ @cli.command()
51
+ def schedule(
52
+ stop: Annotated[
53
+ Optional[datetime],
54
+ Argument(
55
+ formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S"],
56
+ help="A stopping datetime that want to stop on schedule app.",
57
+ ),
58
+ ] = None,
59
+ excluded: Annotated[
60
+ Optional[str],
61
+ Argument(help="A list of exclude workflow name in str."),
62
+ ] = None,
63
+ externals: Annotated[
64
+ Optional[str],
65
+ Argument(
66
+ help="A json string for parameters of this pipeline execution."
67
+ ),
68
+ ] = None,
69
+ ):
70
+ """Start workflow scheduler that will call workflow function from scheduler
71
+ module.
72
+ """
73
+ excluded: list[str] = str2list(excluded) if excluded else []
74
+ externals: str = externals or "{}"
75
+ if stop:
76
+ stop: datetime = stop.astimezone(
77
+ tz=ZoneInfo(os.getenv("WORKFLOW_CORE_TIMEZONE", "UTC"))
78
+ )
79
+
80
+ from .scheduler import workflow
81
+
82
+ # NOTE: Start running workflow scheduler application.
83
+ workflow_rs: list[str] = workflow(
84
+ stop=stop, excluded=excluded, externals=json.loads(externals)
85
+ )
86
+ logger.info(f"Application run success: {workflow_rs}")
87
+
88
+
89
+ @cli_log.command("pipeline-get")
90
+ def pipeline_log_get(
91
+ name: Annotated[
92
+ str,
93
+ Argument(help="A pipeline name that want to getting log"),
94
+ ],
95
+ limit: Annotated[
96
+ int,
97
+ Argument(help="A number of the limitation of logging"),
98
+ ] = 100,
99
+ desc: Annotated[
100
+ bool,
101
+ Option(
102
+ "--desc",
103
+ help="A descending flag that order by logging release datetime.",
104
+ ),
105
+ ] = True,
106
+ ):
107
+ logger.info(f"{name} : limit {limit} : desc: {desc}")
108
+ return [""]
109
+
110
+
111
+ class LogMode(str, Enum):
112
+ get = "get"
113
+ delete = "delete"
114
+
115
+
116
+ @cli_log.command("pipeline-delete")
117
+ def pipeline_log_delete(
118
+ mode: Annotated[
119
+ LogMode,
120
+ Argument(case_sensitive=True),
121
+ ]
122
+ ):
123
+ logger.info(mode)
124
+
125
+
126
+ @cli.callback()
127
+ def main():
128
+ """
129
+ Manage workflow with CLI.
130
+ """
131
+
132
+
133
+ if __name__ == "__main__":
134
+ cli()