ddeutil-workflow 0.0.8__py3-none-any.whl → 0.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,29 +3,62 @@
3
3
  # Licensed under the MIT License.
4
4
  # This code refs from: https://github.com/priyanshu-panwar/fastapi-utilities
5
5
  # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
6
8
  import asyncio
7
- import logging
9
+ import os
8
10
  from asyncio import ensure_future
9
11
  from datetime import datetime
10
12
  from functools import wraps
13
+ from zoneinfo import ZoneInfo
11
14
 
12
- from croniter import croniter
13
15
  from starlette.concurrency import run_in_threadpool
14
16
 
17
+ from .cron import CronJob
18
+ from .log import get_logger
19
+
20
+ logger = get_logger("ddeutil.workflow")
15
21
 
16
- def get_delta(cron: str):
22
+
23
+ def get_cronjob_delta(cron: str):
17
24
  """This function returns the time delta between now and the next cron
18
25
  execution time.
19
26
  """
20
- now: datetime = datetime.now()
21
- cron = croniter(cron, now)
22
- return (cron.get_next(datetime) - now).total_seconds()
27
+ now: datetime = datetime.now(
28
+ tz=ZoneInfo(os.getenv("WORKFLOW_CORE_TIMEZONE", "UTC"))
29
+ )
30
+ cron = CronJob(cron)
31
+ return (cron.schedule(now).next - now).total_seconds()
32
+
33
+
34
+ def cron_valid(cron: str):
35
+ try:
36
+ CronJob(cron)
37
+ except Exception as err:
38
+ raise ValueError(f"Crontab value does not valid, {cron}") from err
39
+
40
+
41
+ async def run_func(
42
+ is_coroutine,
43
+ func,
44
+ *args,
45
+ raise_exceptions: bool = False,
46
+ **kwargs,
47
+ ):
48
+ try:
49
+ if is_coroutine:
50
+ await func(*args, **kwargs)
51
+ else:
52
+ await run_in_threadpool(func, *args, **kwargs)
53
+ except Exception as e:
54
+ logger.exception(e)
55
+ if raise_exceptions:
56
+ raise e
23
57
 
24
58
 
25
59
  def repeat_at(
26
60
  *,
27
61
  cron: str,
28
- logger: logging.Logger = None,
29
62
  raise_exceptions: bool = False,
30
63
  max_repetitions: int = None,
31
64
  ):
@@ -34,40 +67,37 @@ def repeat_at(
34
67
 
35
68
  :param cron: str
36
69
  Cron-style string for periodic execution, eg. '0 0 * * *' every midnight
37
- :param logger: logging.Logger (default None)
38
- Logger object to log exceptions
39
70
  :param raise_exceptions: bool (default False)
40
71
  Whether to raise exceptions or log them
41
72
  :param max_repetitions: int (default None)
42
73
  Maximum number of times to repeat the function. If None, repeat
43
74
  indefinitely.
44
-
45
75
  """
76
+ if max_repetitions and max_repetitions <= 0:
77
+ raise ValueError(
78
+ "max_repetitions should more than zero if it want to set"
79
+ )
46
80
 
47
81
  def decorator(func):
48
- is_coroutine = asyncio.iscoroutinefunction(func)
82
+ is_coroutine: bool = asyncio.iscoroutinefunction(func)
49
83
 
50
84
  @wraps(func)
51
85
  def wrapper(*_args, **_kwargs):
52
- repititions = 0
53
- if not croniter.is_valid(cron):
54
- raise ValueError("Invalid cron expression")
86
+ repititions: int = 0
87
+ cron_valid(cron)
55
88
 
56
89
  async def loop(*args, **kwargs):
57
90
  nonlocal repititions
58
91
  while max_repetitions is None or repititions < max_repetitions:
59
- try:
60
- sleepTime = get_delta(cron)
61
- await asyncio.sleep(sleepTime)
62
- if is_coroutine:
63
- await func(*args, **kwargs)
64
- else:
65
- await run_in_threadpool(func, *args, **kwargs)
66
- except Exception as e:
67
- if logger is not None:
68
- logger.exception(e)
69
- if raise_exceptions:
70
- raise e
92
+ sleep_time = get_cronjob_delta(cron)
93
+ await asyncio.sleep(sleep_time)
94
+ await run_func(
95
+ is_coroutine,
96
+ func,
97
+ *args,
98
+ raise_exceptions=raise_exceptions,
99
+ **kwargs,
100
+ )
71
101
  repititions += 1
72
102
 
73
103
  ensure_future(loop(*_args, **_kwargs))
@@ -81,7 +111,6 @@ def repeat_every(
81
111
  *,
82
112
  seconds: float,
83
113
  wait_first: bool = False,
84
- logger: logging.Logger = None,
85
114
  raise_exceptions: bool = False,
86
115
  max_repetitions: int = None,
87
116
  ):
@@ -93,17 +122,19 @@ def repeat_every(
93
122
  :param wait_first: bool (default False)
94
123
  Whether to wait `seconds` seconds before executing the function for the
95
124
  first time.
96
- :param logger: logging.Logger (default None)
97
- The logger to use for logging exceptions.
98
125
  :param raise_exceptions: bool (default False)
99
126
  Whether to raise exceptions instead of logging them.
100
127
  :param max_repetitions: int (default None)
101
128
  The maximum number of times to repeat the function. If None, the
102
129
  function will repeat indefinitely.
103
130
  """
131
+ if max_repetitions and max_repetitions <= 0:
132
+ raise ValueError(
133
+ "max_repetitions should more than zero if it want to set"
134
+ )
104
135
 
105
136
  def decorator(func):
106
- is_coroutine = asyncio.iscoroutinefunction(func)
137
+ is_coroutine: bool = asyncio.iscoroutinefunction(func)
107
138
 
108
139
  @wraps(func)
109
140
  async def wrapper(*_args, **_kwargs):
@@ -111,19 +142,19 @@ def repeat_every(
111
142
 
112
143
  async def loop(*args, **kwargs):
113
144
  nonlocal repetitions
145
+
114
146
  if wait_first:
115
147
  await asyncio.sleep(seconds)
148
+
116
149
  while max_repetitions is None or repetitions < max_repetitions:
117
- try:
118
- if is_coroutine:
119
- await func(*args, **kwargs)
120
- else:
121
- await run_in_threadpool(func, *args, **kwargs)
122
- except Exception as e:
123
- if logger is not None:
124
- logger.exception(e)
125
- if raise_exceptions:
126
- raise e
150
+ await run_func(
151
+ is_coroutine,
152
+ func,
153
+ *args,
154
+ raise_exceptions=raise_exceptions,
155
+ **kwargs,
156
+ )
157
+
127
158
  repetitions += 1
128
159
  await asyncio.sleep(seconds)
129
160
 
ddeutil/workflow/route.py CHANGED
@@ -1,78 +1,92 @@
1
- from enum import Enum
2
-
3
- from fastapi import APIRouter, Request, status
4
- from pydantic import BaseModel, ConfigDict, Field
5
-
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from fastapi import APIRouter, HTTPException, Request
9
+ from fastapi import status as st
10
+ from fastapi.responses import UJSONResponse
11
+
12
+ from .__types import DictData
6
13
  from .log import get_logger
7
-
8
- logger = get_logger(__name__)
9
- workflow_route = APIRouter(prefix="/workflow")
14
+ from .pipeline import Pipeline
15
+ from .repeat import repeat_every
16
+ from .utils import Loader
17
+
18
+ logger = get_logger("ddeutil.workflow")
19
+ workflow = APIRouter(
20
+ prefix="/workflow",
21
+ tags=["workflow"],
22
+ )
23
+ schedule = APIRouter(
24
+ prefix="/schedule",
25
+ tags=["schedule"],
26
+ )
10
27
 
11
28
 
12
- @workflow_route.get("/{name}")
13
- async def get_pipeline(name: str):
14
- return {"message": f"getting pipeline {name}"}
29
+ @workflow.get(
30
+ "/",
31
+ response_class=UJSONResponse,
32
+ status_code=st.HTTP_200_OK,
33
+ )
34
+ async def get_workflows():
35
+ """Return all pipeline workflows that exists in config path."""
36
+ pipelines: DictData = Loader.finds(Pipeline)
37
+ return {
38
+ "message": f"getting all pipelines: {pipelines}",
39
+ }
40
+
41
+
42
+ @workflow.get(
43
+ "/{name}",
44
+ response_class=UJSONResponse,
45
+ status_code=st.HTTP_200_OK,
46
+ )
47
+ async def get_workflow(name: str) -> DictData:
48
+ """Return model of pipeline that passing an input pipeline name."""
49
+ try:
50
+ pipeline: Pipeline = Pipeline.from_loader(name=name, externals={})
51
+ except ValueError:
52
+ raise HTTPException(
53
+ status_code=st.HTTP_404_NOT_FOUND,
54
+ detail=(
55
+ f"Workflow pipeline name: {name!r} does not found in /conf path"
56
+ ),
57
+ ) from None
58
+ return pipeline.model_dump(
59
+ by_alias=True,
60
+ exclude_none=True,
61
+ exclude_unset=True,
62
+ exclude_defaults=True,
63
+ )
15
64
 
16
65
 
17
- @workflow_route.get("/{name}/logs")
18
- async def get_pipeline_log(name: str):
66
+ @workflow.get("/{name}/logs")
67
+ async def get_workflow_logs(name: str):
19
68
  return {"message": f"getting pipeline {name} logs"}
20
69
 
21
70
 
22
- class JobNotFoundError(Exception):
23
- pass
24
-
71
+ @workflow.get("/{name}/logs/{release}")
72
+ async def get_workflow_release_log(name: str, release: str):
73
+ return {"message": f"getting pipeline {name} log in release {release}"}
25
74
 
26
- schedule_route = APIRouter(prefix="/schedule", tags=["schedule"])
27
75
 
28
-
29
- class TriggerEnum(str, Enum):
30
- interval = "interval"
31
- cron = "cron"
32
-
33
-
34
- class Job(BaseModel):
35
- model_config = ConfigDict(
36
- json_schema_extra={
37
- "example": {
38
- "func": "example.main:pytest_job",
39
- "trigger": "interval",
40
- "seconds": 3,
41
- "id": "pytest_job",
42
- },
43
- },
44
- )
45
- func: str = Field()
46
- trigger: TriggerEnum = Field(title="Trigger type")
47
- seconds: int = Field(title="Interval in seconds")
48
- id: str = Field(title="Job ID")
49
-
50
-
51
- @schedule_route.post(
52
- "/", name="scheduler:add_job", status_code=status.HTTP_201_CREATED
76
+ @workflow.delete(
77
+ "/{name}/logs/{release}",
78
+ status_code=st.HTTP_204_NO_CONTENT,
53
79
  )
54
- async def add_job(request: Request, job: Job):
55
- job = request.app.scheduler.add_job(**job.dict())
56
- return {"job": f"{job.id}"}
80
+ async def del_workflow_release_log(name: str, release: str):
81
+ return {"message": f"getting pipeline {name} log in release {release}"}
57
82
 
58
83
 
59
- @schedule_route.get("/", name="scheduler:get_jobs", response_model=list)
60
- async def get_jobs(request: Request):
61
- jobs = request.app.scheduler.get_jobs()
62
- jobs = [
63
- {k: v for k, v in job.__getstate__().items() if k != "trigger"}
64
- for job in jobs
65
- ]
66
- return jobs
84
+ @schedule.on_event("startup")
85
+ @repeat_every(seconds=60)
86
+ def schedule_broker_up():
87
+ logger.info("Start listening schedule from queue ...")
67
88
 
68
89
 
69
- @schedule_route.delete("/{job_id}", name="scheduler:remove_job")
70
- async def remove_job(request: Request, job_id: str):
71
- try:
72
- deleted = request.app.scheduler.remove_job(job_id=job_id)
73
- logger.debug(f"Job {job_id} deleted: {deleted}")
74
- return {"job": f"{job_id}"}
75
- except AttributeError as err:
76
- raise JobNotFoundError(
77
- f"No job by the id of {job_id} was found"
78
- ) from err
90
+ @schedule.get("/", response_class=UJSONResponse)
91
+ async def get_jobs(request: Request):
92
+ return {}