ddeutil-workflow 0.0.6__py3-none-any.whl → 0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.6"
1
+ __version__: str = "0.0.7"
@@ -3,7 +3,29 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- from .exceptions import StageException
7
- from .on import On
8
- from .pipeline import Pipeline
9
- from .stage import Stage
6
+ from .exceptions import (
7
+ JobException,
8
+ ParamValueException,
9
+ PipelineException,
10
+ StageException,
11
+ UtilException,
12
+ )
13
+ from .on import AwsOn, On
14
+ from .pipeline import Job, Pipeline
15
+ from .stage import (
16
+ BashStage,
17
+ EmptyStage,
18
+ HookStage,
19
+ PyStage,
20
+ Stage,
21
+ TriggerStage,
22
+ )
23
+ from .utils import (
24
+ ChoiceParam,
25
+ DatetimeParam,
26
+ IntParam,
27
+ Param,
28
+ StrParam,
29
+ dash2underscore,
30
+ param2template,
31
+ )
@@ -27,12 +27,21 @@ class Re:
27
27
  """Regular expression config."""
28
28
 
29
29
  # NOTE: Search caller
30
+ # \${{\s*(?P<caller>[a-zA-Z0-9_.\s'\"\[\]\(\)\-\{}]+?)\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
30
31
  __re_caller: str = r"""
31
32
  \$
32
33
  {{
33
- \s*(?P<caller>
34
+ \s*
35
+ (?P<caller>
34
36
  [a-zA-Z0-9_.\s'\"\[\]\(\)\-\{}]+?
35
37
  )\s*
38
+ (?P<post_filters>
39
+ (?:
40
+ \|\s*
41
+ (?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]*)
42
+ \s*
43
+ )*
44
+ )
36
45
  }}
37
46
  """
38
47
  RE_CALLER: Pattern = re.compile(
@@ -40,6 +49,7 @@ class Re:
40
49
  )
41
50
 
42
51
  # NOTE: Search task
52
+ # ^(?P<path>[^/@]+)/(?P<func>[^@]+)@(?P<tag>.+)$
43
53
  __re_task_fmt: str = r"""
44
54
  ^
45
55
  (?P<path>[^/@]+)
@@ -0,0 +1,120 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import asyncio
9
+ import queue
10
+ import time
11
+ import uuid
12
+ from contextlib import asynccontextmanager
13
+ from datetime import datetime
14
+
15
+ from apscheduler.executors.pool import ProcessPoolExecutor
16
+ from apscheduler.jobstores.memory import MemoryJobStore
17
+ from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
18
+ from apscheduler.schedulers.asyncio import AsyncIOScheduler
19
+ from fastapi import BackgroundTasks, FastAPI
20
+ from fastapi.middleware.gzip import GZipMiddleware
21
+ from fastapi.responses import UJSONResponse
22
+ from pydantic import BaseModel
23
+
24
+ from .log import get_logger
25
+ from .repeat import repeat_every
26
+ from .route import schedule_route, workflow_route
27
+
28
+ logger = get_logger(__name__)
29
+
30
+
31
+ def broker_upper_messages():
32
+ for _ in range(app.queue_limit):
33
+ try:
34
+ obj = app.queue.get_nowait()
35
+ app.output_dict[obj["request_id"]] = obj["text"].upper()
36
+ logger.info(f"Upper message: {app.output_dict}")
37
+ except queue.Empty:
38
+ pass
39
+
40
+
41
+ jobstores = {
42
+ "default": MemoryJobStore(),
43
+ "sqlite": SQLAlchemyJobStore(url="sqlite:///jobs-store.sqlite"),
44
+ }
45
+ executors = {
46
+ "default": {"type": "threadpool", "max_workers": 5},
47
+ "processpool": ProcessPoolExecutor(max_workers=5),
48
+ }
49
+ scheduler = AsyncIOScheduler(
50
+ jobstores=jobstores,
51
+ executors=executors,
52
+ timezone="Asia/Bangkok",
53
+ )
54
+
55
+
56
+ @asynccontextmanager
57
+ async def lifespan(_: FastAPI):
58
+ scheduler.start()
59
+ yield
60
+ scheduler.shutdown(wait=False)
61
+
62
+
63
+ app = FastAPI(lifespan=lifespan)
64
+ app.add_middleware(GZipMiddleware, minimum_size=1000)
65
+ app.include_router(schedule_route)
66
+ app.include_router(workflow_route)
67
+
68
+ app.scheduler = scheduler
69
+ app.scheduler.add_job(
70
+ broker_upper_messages,
71
+ "interval",
72
+ seconds=10,
73
+ )
74
+ app.queue = queue.Queue()
75
+ app.output_dict = {}
76
+ app.queue_limit = 2
77
+
78
+
79
+ def write_pipeline(task_id: str, message=""):
80
+ logger.info(f"{task_id} : {message}")
81
+ time.sleep(5)
82
+ logger.info(f"{task_id} : run task successfully!!!")
83
+
84
+
85
+ @app.post("/schedule/{name}", response_class=UJSONResponse)
86
+ async def send_schedule(name: str, background_tasks: BackgroundTasks):
87
+ background_tasks.add_task(
88
+ write_pipeline,
89
+ name,
90
+ message=f"some message for {name}",
91
+ )
92
+ await fetch_current_time()
93
+ return {"message": f"Schedule sent {name!r} in the background"}
94
+
95
+
96
+ @repeat_every(seconds=2, max_repetitions=3)
97
+ async def fetch_current_time():
98
+ logger.info(f"Fetch: {datetime.now()}")
99
+
100
+
101
+ class Payload(BaseModel):
102
+ text: str
103
+
104
+
105
+ async def get_result(request_id):
106
+ while 1:
107
+ if request_id in app.output_dict:
108
+ result = app.output_dict[request_id]
109
+ del app.output_dict[request_id]
110
+ return {"message": result}
111
+ await asyncio.sleep(0.001)
112
+
113
+
114
+ @app.post("/upper", response_class=UJSONResponse)
115
+ async def message_upper(payload: Payload):
116
+ request_id: str = str(uuid.uuid4())
117
+ app.queue.put(
118
+ {"text": payload.text, "request_id": request_id},
119
+ )
120
+ return await get_result(request_id)
@@ -0,0 +1,41 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ import functools
7
+ import time
8
+
9
+ import schedule
10
+
11
+
12
+ def catch_exceptions(cancel_on_failure=False):
13
+ def catch_exceptions_decorator(job_func):
14
+ @functools.wraps(job_func)
15
+ def wrapper(*args, **kwargs):
16
+ try:
17
+ return job_func(*args, **kwargs)
18
+ except Exception as err:
19
+ print(err)
20
+
21
+ if cancel_on_failure:
22
+ return schedule.CancelJob
23
+
24
+ return wrapper
25
+
26
+ return catch_exceptions_decorator
27
+
28
+
29
+ @catch_exceptions(cancel_on_failure=True)
30
+ def bad_task():
31
+ return 1 / 0
32
+
33
+
34
+ schedule.every(5).seconds.do(bad_task)
35
+
36
+ if __name__ == "__main__":
37
+ while True:
38
+ schedule.run_pending()
39
+ time.sleep(1)
40
+ if not schedule.get_jobs():
41
+ break
@@ -22,3 +22,6 @@ class JobException(WorkflowException): ...
22
22
 
23
23
 
24
24
  class PipelineException(WorkflowException): ...
25
+
26
+
27
+ class ParamValueException(ValueError): ...
@@ -0,0 +1,30 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import logging
9
+ from functools import lru_cache
10
+
11
+ from rich.console import Console
12
+ from rich.logging import RichHandler
13
+
14
+ console = Console(color_system="256", width=200, style="blue")
15
+
16
+
17
+ @lru_cache
18
+ def get_logger(module_name):
19
+ logger = logging.getLogger(module_name)
20
+ handler = RichHandler(
21
+ rich_tracebacks=True, console=console, tracebacks_show_locals=True
22
+ )
23
+ handler.setFormatter(
24
+ logging.Formatter(
25
+ "[ %(threadName)s:%(funcName)s:%(process)d ] - %(message)s"
26
+ )
27
+ )
28
+ logger.addHandler(handler)
29
+ logger.setLevel(logging.DEBUG)
30
+ return logger