ddeutil-workflow 0.0.35__tar.gz → 0.0.36__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/PKG-INFO +11 -9
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/README.md +8 -8
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/pyproject.toml +14 -3
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/__init__.py +2 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/api.py +47 -8
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/repeat.py +21 -11
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/routes/__init__.py +1 -0
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/job.py +73 -0
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/logs.py +64 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/routes/schedules.py +1 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/audit.py +6 -3
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/job.py +6 -16
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/logs.py +326 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/params.py +52 -15
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/result.py +3 -5
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/scheduler.py +31 -10
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/stages.py +61 -4
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/utils.py +7 -1
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/workflow.py +1 -15
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/PKG-INFO +11 -9
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/SOURCES.txt +1 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/requires.txt +2 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_job.py +0 -4
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_logs.py +1 -1
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_params.py +18 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_utils.py +16 -1
- ddeutil_workflow-0.0.35/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/logs.py +0 -36
- ddeutil_workflow-0.0.35/src/ddeutil/workflow/logs.py +0 -214
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/LICENSE +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/routes/workflows.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/caller.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/conf.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/cron.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/templates.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_audit.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_call_tag.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_cron_on.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_job_exec.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_release.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_release_queue.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_schedule.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_schedule_pending.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_schedule_tasks.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_schedule_workflow.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_scheduler_control.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_stage_handler_exec.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_templates.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_templates_filter.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_workflow_exec_job.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_workflow_exec_poke.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_workflow_exec_release.py +0 -0
- {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.36
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -29,6 +29,8 @@ Requires-Dist: python-dotenv==1.0.1
|
|
29
29
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
30
30
|
Provides-Extra: api
|
31
31
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
|
32
|
+
Requires-Dist: httpx; extra == "api"
|
33
|
+
Requires-Dist: ujson; extra == "api"
|
32
34
|
|
33
35
|
# Workflow Orchestration
|
34
36
|
|
@@ -78,12 +80,12 @@ flowchart LR
|
|
78
80
|
|
79
81
|
subgraph Docker Container
|
80
82
|
direction TB
|
81
|
-
G@{ shape: rounded, label: "Observe<br>Application" }
|
83
|
+
G@{ shape: rounded, label: "📡Observe<br>Application" }
|
82
84
|
end
|
83
85
|
|
84
86
|
subgraph Docker Container
|
85
87
|
direction TB
|
86
|
-
B@{ shape: rounded, label: "Workflow<br>Application" }
|
88
|
+
B@{ shape: rounded, label: "🏃Workflow<br>Application" }
|
87
89
|
end
|
88
90
|
|
89
91
|
A <-->|action &<br>response| B
|
@@ -95,7 +97,7 @@ flowchart LR
|
|
95
97
|
E@{ shape: lin-cyl, label: "Audit<br>Logs" }
|
96
98
|
end
|
97
99
|
|
98
|
-
subgraph
|
100
|
+
subgraph Config Context
|
99
101
|
F@{ shape: tag-rect, label: "YAML<br>files" }
|
100
102
|
end
|
101
103
|
|
@@ -130,10 +132,10 @@ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
|
|
130
132
|
If you want to install this package with application add-ons, you should add
|
131
133
|
`app` in installation;
|
132
134
|
|
133
|
-
| Use-case | Install Optional |
|
134
|
-
|
135
|
-
| Python | `ddeutil-workflow` | :heavy_check_mark:
|
136
|
-
| FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark:
|
135
|
+
| Use-case | Install Optional | Support |
|
136
|
+
|----------------|--------------------------|:-------------------:|
|
137
|
+
| Python | `ddeutil-workflow` | :heavy_check_mark: |
|
138
|
+
| FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
|
137
139
|
|
138
140
|
## :beers: Usage
|
139
141
|
|
@@ -295,7 +297,7 @@ like crontab job but via Python API.
|
|
295
297
|
### API Server
|
296
298
|
|
297
299
|
```shell
|
298
|
-
(venv) $ uvicorn
|
300
|
+
(venv) $ uvicorn ddeutil.workflow.api:app \
|
299
301
|
--host 127.0.0.1 \
|
300
302
|
--port 80 \
|
301
303
|
--no-access-log
|
@@ -46,12 +46,12 @@ flowchart LR
|
|
46
46
|
|
47
47
|
subgraph Docker Container
|
48
48
|
direction TB
|
49
|
-
G@{ shape: rounded, label: "Observe<br>Application" }
|
49
|
+
G@{ shape: rounded, label: "📡Observe<br>Application" }
|
50
50
|
end
|
51
51
|
|
52
52
|
subgraph Docker Container
|
53
53
|
direction TB
|
54
|
-
B@{ shape: rounded, label: "Workflow<br>Application" }
|
54
|
+
B@{ shape: rounded, label: "🏃Workflow<br>Application" }
|
55
55
|
end
|
56
56
|
|
57
57
|
A <-->|action &<br>response| B
|
@@ -63,7 +63,7 @@ flowchart LR
|
|
63
63
|
E@{ shape: lin-cyl, label: "Audit<br>Logs" }
|
64
64
|
end
|
65
65
|
|
66
|
-
subgraph
|
66
|
+
subgraph Config Context
|
67
67
|
F@{ shape: tag-rect, label: "YAML<br>files" }
|
68
68
|
end
|
69
69
|
|
@@ -98,10 +98,10 @@ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
|
|
98
98
|
If you want to install this package with application add-ons, you should add
|
99
99
|
`app` in installation;
|
100
100
|
|
101
|
-
| Use-case | Install Optional |
|
102
|
-
|
103
|
-
| Python | `ddeutil-workflow` | :heavy_check_mark:
|
104
|
-
| FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark:
|
101
|
+
| Use-case | Install Optional | Support |
|
102
|
+
|----------------|--------------------------|:-------------------:|
|
103
|
+
| Python | `ddeutil-workflow` | :heavy_check_mark: |
|
104
|
+
| FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
|
105
105
|
|
106
106
|
## :beers: Usage
|
107
107
|
|
@@ -263,7 +263,7 @@ like crontab job but via Python API.
|
|
263
263
|
### API Server
|
264
264
|
|
265
265
|
```shell
|
266
|
-
(venv) $ uvicorn
|
266
|
+
(venv) $ uvicorn ddeutil.workflow.api:app \
|
267
267
|
--host 127.0.0.1 \
|
268
268
|
--port 80 \
|
269
269
|
--no-access-log
|
@@ -35,7 +35,11 @@ dependencies = [
|
|
35
35
|
dynamic = ["version"]
|
36
36
|
|
37
37
|
[project.optional-dependencies]
|
38
|
-
api = [
|
38
|
+
api = [
|
39
|
+
"fastapi>=0.115.0,<1.0.0",
|
40
|
+
"httpx",
|
41
|
+
"ujson",
|
42
|
+
]
|
39
43
|
|
40
44
|
[project.urls]
|
41
45
|
Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
|
@@ -66,7 +70,11 @@ omit = [
|
|
66
70
|
"src/ddeutil/workflow/api/__init__.py",
|
67
71
|
"src/ddeutil/workflow/api/api.py",
|
68
72
|
"src/ddeutil/workflow/api/repeat.py",
|
69
|
-
"src/ddeutil/workflow/api/
|
73
|
+
"src/ddeutil/workflow/api/routes/__init__.py",
|
74
|
+
"src/ddeutil/workflow/api/routes/job.py",
|
75
|
+
"src/ddeutil/workflow/api/routes/logs.py",
|
76
|
+
"src/ddeutil/workflow/api/routes/schedules.py",
|
77
|
+
"src/ddeutil/workflow/api/routes/workflows.py",
|
70
78
|
"app.py",
|
71
79
|
]
|
72
80
|
|
@@ -89,7 +97,10 @@ addopts = [
|
|
89
97
|
"--strict-config",
|
90
98
|
"--strict-markers",
|
91
99
|
]
|
92
|
-
filterwarnings = [
|
100
|
+
filterwarnings = [
|
101
|
+
"error",
|
102
|
+
"ignore::DeprecationWarning",
|
103
|
+
]
|
93
104
|
log_cli = true
|
94
105
|
log_cli_level = "DEBUG"
|
95
106
|
log_cli_format = "%(asctime)s [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)"
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.36"
|
@@ -11,7 +11,11 @@ from datetime import datetime, timedelta
|
|
11
11
|
from typing import TypedDict
|
12
12
|
|
13
13
|
from dotenv import load_dotenv
|
14
|
-
from fastapi import FastAPI
|
14
|
+
from fastapi import FastAPI, Request
|
15
|
+
from fastapi import status as st
|
16
|
+
from fastapi.encoders import jsonable_encoder
|
17
|
+
from fastapi.exceptions import RequestValidationError
|
18
|
+
from fastapi.middleware.cors import CORSMiddleware
|
15
19
|
from fastapi.middleware.gzip import GZipMiddleware
|
16
20
|
from fastapi.responses import UJSONResponse
|
17
21
|
|
@@ -20,7 +24,7 @@ from ..conf import config, get_logger
|
|
20
24
|
from ..scheduler import ReleaseThread, ReleaseThreads
|
21
25
|
from ..workflow import ReleaseQueue, WorkflowTask
|
22
26
|
from .repeat import repeat_at
|
23
|
-
from .routes import log
|
27
|
+
from .routes import job, log
|
24
28
|
|
25
29
|
load_dotenv()
|
26
30
|
logger = get_logger("ddeutil.workflow")
|
@@ -61,24 +65,38 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
|
|
61
65
|
|
62
66
|
|
63
67
|
app = FastAPI(
|
64
|
-
titile="Workflow
|
68
|
+
titile="Workflow",
|
65
69
|
description=(
|
66
|
-
"This is workflow FastAPI
|
67
|
-
"execute
|
70
|
+
"This is a workflow FastAPI application that use to manage manual "
|
71
|
+
"execute, logging, and schedule workflow via RestAPI."
|
68
72
|
),
|
69
73
|
version=__version__,
|
70
74
|
lifespan=lifespan,
|
71
75
|
default_response_class=UJSONResponse,
|
72
76
|
)
|
73
77
|
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
78
|
+
origins: list[str] = [
|
79
|
+
"http://localhost",
|
80
|
+
"http://localhost:88",
|
81
|
+
"http://localhost:80",
|
82
|
+
]
|
83
|
+
app.add_middleware(
|
84
|
+
CORSMiddleware,
|
85
|
+
allow_origins=origins,
|
86
|
+
allow_credentials=True,
|
87
|
+
allow_methods=["*"],
|
88
|
+
allow_headers=["*"],
|
89
|
+
)
|
74
90
|
|
75
91
|
|
76
92
|
@app.get("/")
|
77
93
|
async def health():
|
78
|
-
|
94
|
+
"""Index view that not return any template without json status."""
|
95
|
+
return {"message": "Workflow already start up with healthy status."}
|
79
96
|
|
80
97
|
|
81
|
-
# NOTE Add the logs
|
98
|
+
# NOTE Add the jobs and logs routes by default.
|
99
|
+
app.include_router(job, prefix=config.prefix_path)
|
82
100
|
app.include_router(log, prefix=config.prefix_path)
|
83
101
|
|
84
102
|
|
@@ -111,12 +129,13 @@ if config.enable_route_schedule:
|
|
111
129
|
stop=datetime.now(config.tz) + timedelta(minutes=1),
|
112
130
|
queue=app.state.workflow_queue,
|
113
131
|
threads=app.state.workflow_threads,
|
114
|
-
|
132
|
+
audit=get_audit(),
|
115
133
|
)
|
116
134
|
|
117
135
|
@schedule.on_event("startup")
|
118
136
|
@repeat_at(cron="*/5 * * * *", delay=10)
|
119
137
|
def monitoring():
|
138
|
+
"""Monitoring workflow thread that running in the background."""
|
120
139
|
logger.debug("[MONITOR]: Start monitoring threading.")
|
121
140
|
snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
|
122
141
|
for t_name in snapshot_threads:
|
@@ -126,3 +145,23 @@ if config.enable_route_schedule:
|
|
126
145
|
# NOTE: remove the thread that running success.
|
127
146
|
if not thread_release["thread"].is_alive():
|
128
147
|
app.state.workflow_threads.pop(t_name)
|
148
|
+
|
149
|
+
|
150
|
+
@app.exception_handler(RequestValidationError)
|
151
|
+
async def validation_exception_handler(
|
152
|
+
request: Request, exc: RequestValidationError
|
153
|
+
):
|
154
|
+
return UJSONResponse(
|
155
|
+
status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
|
156
|
+
content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
|
157
|
+
)
|
158
|
+
|
159
|
+
|
160
|
+
if __name__ == "__main__":
|
161
|
+
import uvicorn
|
162
|
+
|
163
|
+
uvicorn.run(
|
164
|
+
app,
|
165
|
+
host="0.0.0.0",
|
166
|
+
port=80,
|
167
|
+
)
|
@@ -21,17 +21,26 @@ logger = get_logger("ddeutil.workflow")
|
|
21
21
|
def get_cronjob_delta(cron: str) -> float:
|
22
22
|
"""This function returns the time delta between now and the next cron
|
23
23
|
execution time.
|
24
|
+
|
25
|
+
:rtype: float
|
24
26
|
"""
|
25
27
|
now: datetime = datetime.now(tz=config.tz)
|
26
28
|
cron = CronJob(cron)
|
27
29
|
return (cron.schedule(now).next - now).total_seconds()
|
28
30
|
|
29
31
|
|
30
|
-
def cron_valid(cron: str):
|
32
|
+
def cron_valid(cron: str, raise_error: bool = True) -> bool:
|
33
|
+
"""Check this crontab string value is valid with its cron syntax.
|
34
|
+
|
35
|
+
:rtype: bool
|
36
|
+
"""
|
31
37
|
try:
|
32
38
|
CronJob(cron)
|
39
|
+
return True
|
33
40
|
except Exception as err:
|
34
|
-
|
41
|
+
if raise_error:
|
42
|
+
raise ValueError(f"Crontab value does not valid, {cron}") from err
|
43
|
+
return False
|
35
44
|
|
36
45
|
|
37
46
|
async def run_func(
|
@@ -41,6 +50,7 @@ async def run_func(
|
|
41
50
|
raise_exceptions: bool = False,
|
42
51
|
**kwargs,
|
43
52
|
):
|
53
|
+
"""Run function inside the repeat decorator functions."""
|
44
54
|
try:
|
45
55
|
if is_coroutine:
|
46
56
|
await func(*args, **kwargs)
|
@@ -62,11 +72,11 @@ def repeat_at(
|
|
62
72
|
"""This function returns a decorator that makes a function execute
|
63
73
|
periodically as per the cron expression provided.
|
64
74
|
|
65
|
-
:param cron: str
|
66
|
-
|
67
|
-
:param delay:
|
68
|
-
:param raise_exceptions: bool
|
69
|
-
|
75
|
+
:param cron: (str) A Cron-style string for periodic execution, e.g.
|
76
|
+
'0 0 * * *' every midnight
|
77
|
+
:param delay: (float) A delay seconds value.
|
78
|
+
:param raise_exceptions: (bool) A raise exception flag. Whether to raise
|
79
|
+
exceptions or log them if raise was set be false.
|
70
80
|
:param max_repetitions: int (default None)
|
71
81
|
Maximum number of times to repeat the function. If None, repeat
|
72
82
|
indefinitely.
|
@@ -81,12 +91,12 @@ def repeat_at(
|
|
81
91
|
|
82
92
|
@wraps(func)
|
83
93
|
def wrapper(*_args, **_kwargs):
|
84
|
-
|
94
|
+
repetitions: int = 0
|
85
95
|
cron_valid(cron)
|
86
96
|
|
87
97
|
async def loop(*args, **kwargs):
|
88
|
-
nonlocal
|
89
|
-
while max_repetitions is None or
|
98
|
+
nonlocal repetitions
|
99
|
+
while max_repetitions is None or repetitions < max_repetitions:
|
90
100
|
sleep_time = get_cronjob_delta(cron) + delay
|
91
101
|
await asyncio.sleep(sleep_time)
|
92
102
|
await run_func(
|
@@ -96,7 +106,7 @@ def repeat_at(
|
|
96
106
|
raise_exceptions=raise_exceptions,
|
97
107
|
**kwargs,
|
98
108
|
)
|
99
|
-
|
109
|
+
repetitions += 1
|
100
110
|
|
101
111
|
ensure_future(loop(*_args, **_kwargs))
|
102
112
|
|
{ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/routes/__init__.py
RENAMED
@@ -3,6 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
from .job import job_route as job
|
6
7
|
from .logs import log_route as log
|
7
8
|
from .schedules import schedule_route as schedule
|
8
9
|
from .workflows import workflow_route as workflow
|
@@ -0,0 +1,73 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
from typing import Any, Optional
|
9
|
+
|
10
|
+
from fastapi import APIRouter
|
11
|
+
from fastapi.responses import UJSONResponse
|
12
|
+
from pydantic import BaseModel
|
13
|
+
|
14
|
+
from ...__types import DictData
|
15
|
+
from ...conf import get_logger
|
16
|
+
from ...exceptions import JobException
|
17
|
+
from ...job import Job
|
18
|
+
from ...result import Result
|
19
|
+
|
20
|
+
logger = get_logger("ddeutil.workflow")
|
21
|
+
|
22
|
+
|
23
|
+
job_route = APIRouter(
|
24
|
+
prefix="/job",
|
25
|
+
tags=["job"],
|
26
|
+
default_response_class=UJSONResponse,
|
27
|
+
)
|
28
|
+
|
29
|
+
|
30
|
+
class ResultPost(BaseModel):
|
31
|
+
context: DictData
|
32
|
+
run_id: str
|
33
|
+
parent_run_id: Optional[str] = None
|
34
|
+
|
35
|
+
|
36
|
+
@job_route.post(path="/execute/")
|
37
|
+
async def job_execute(
|
38
|
+
result: ResultPost,
|
39
|
+
job: Job,
|
40
|
+
params: dict[str, Any],
|
41
|
+
):
|
42
|
+
"""Execute job via API."""
|
43
|
+
rs: Result = Result(
|
44
|
+
context=result.context,
|
45
|
+
run_id=result.run_id,
|
46
|
+
parent_run_id=result.parent_run_id,
|
47
|
+
)
|
48
|
+
try:
|
49
|
+
job.set_outputs(
|
50
|
+
job.execute(
|
51
|
+
params=params,
|
52
|
+
run_id=rs.run_id,
|
53
|
+
parent_run_id=rs.parent_run_id,
|
54
|
+
).context,
|
55
|
+
to=params,
|
56
|
+
)
|
57
|
+
except JobException as err:
|
58
|
+
rs.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
|
59
|
+
|
60
|
+
return {
|
61
|
+
"message": "Start execute job via API.",
|
62
|
+
"result": {
|
63
|
+
"run_id": rs.run_id,
|
64
|
+
"parent_run_id": rs.parent_run_id,
|
65
|
+
},
|
66
|
+
"job": job.model_dump(
|
67
|
+
by_alias=True,
|
68
|
+
exclude_none=True,
|
69
|
+
exclude_unset=True,
|
70
|
+
exclude_defaults=True,
|
71
|
+
),
|
72
|
+
"params": params,
|
73
|
+
}
|
@@ -0,0 +1,64 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
"""This route include audit and trace log paths."""
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
from fastapi import APIRouter
|
10
|
+
from fastapi.responses import UJSONResponse
|
11
|
+
|
12
|
+
from ...audit import get_audit
|
13
|
+
from ...logs import get_trace_obj
|
14
|
+
|
15
|
+
log_route = APIRouter(
|
16
|
+
prefix="/logs",
|
17
|
+
tags=["logs", "trace", "audit"],
|
18
|
+
default_response_class=UJSONResponse,
|
19
|
+
)
|
20
|
+
|
21
|
+
|
22
|
+
@log_route.get(path="/trace/")
|
23
|
+
async def get_traces():
|
24
|
+
"""Get all trace logs."""
|
25
|
+
return {
|
26
|
+
"message": "Getting trace logs",
|
27
|
+
"traces": list(get_trace_obj().find_logs()),
|
28
|
+
}
|
29
|
+
|
30
|
+
|
31
|
+
@log_route.get(path="/trace/{run_id}")
|
32
|
+
async def get_trace_with_id(run_id: str):
|
33
|
+
"""Get trace log with specific running ID."""
|
34
|
+
return get_trace_obj().find_log_with_id(run_id)
|
35
|
+
|
36
|
+
|
37
|
+
@log_route.get(path="/audit/")
|
38
|
+
async def get_audits():
|
39
|
+
"""Get all audit logs."""
|
40
|
+
return {
|
41
|
+
"message": "Getting audit logs",
|
42
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
43
|
+
}
|
44
|
+
|
45
|
+
|
46
|
+
@log_route.get(path="/audit/{workflow}/")
|
47
|
+
async def get_audit_with_workflow(workflow: str):
|
48
|
+
"""Get all audit logs."""
|
49
|
+
return {
|
50
|
+
"message": f"Getting audit logs with workflow name {workflow}",
|
51
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
52
|
+
}
|
53
|
+
|
54
|
+
|
55
|
+
@log_route.get(path="/audit/{workflow}/{release}")
|
56
|
+
async def get_audit_with_workflow_release(workflow: str, release: str):
|
57
|
+
"""Get all audit logs."""
|
58
|
+
return {
|
59
|
+
"message": (
|
60
|
+
f"Getting audit logs with workflow name {workflow} and release "
|
61
|
+
f"{release}"
|
62
|
+
),
|
63
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
64
|
+
}
|
@@ -112,7 +112,8 @@ class FileAudit(BaseAudit):
|
|
112
112
|
:param release: A release datetime that want to search log.
|
113
113
|
|
114
114
|
:raise FileNotFoundError:
|
115
|
-
:raise NotImplementedError:
|
115
|
+
:raise NotImplementedError: If an input release does not pass to this
|
116
|
+
method. Because this method does not implement latest log.
|
116
117
|
|
117
118
|
:rtype: Self
|
118
119
|
"""
|
@@ -181,7 +182,9 @@ class FileAudit(BaseAudit):
|
|
181
182
|
trace.debug("[LOG]: Skip writing log cause config was set")
|
182
183
|
return self
|
183
184
|
|
184
|
-
log_file: Path =
|
185
|
+
log_file: Path = (
|
186
|
+
self.pointer() / f"{self.parent_run_id or self.run_id}.log"
|
187
|
+
)
|
185
188
|
log_file.write_text(
|
186
189
|
json.dumps(
|
187
190
|
self.model_dump(exclude=excluded),
|
@@ -196,7 +199,7 @@ class FileAudit(BaseAudit):
|
|
196
199
|
class SQLiteAudit(BaseAudit): # pragma: no cov
|
197
200
|
"""SQLite Audit Pydantic Model."""
|
198
201
|
|
199
|
-
table_name: ClassVar[str] = "
|
202
|
+
table_name: ClassVar[str] = "audits"
|
200
203
|
schemas: ClassVar[
|
201
204
|
str
|
202
205
|
] = """
|
@@ -32,7 +32,7 @@ from pydantic.functional_validators import field_validator, model_validator
|
|
32
32
|
from typing_extensions import Self
|
33
33
|
|
34
34
|
from .__types import DictData, DictStr, Matrix, TupleStr
|
35
|
-
from .conf import config
|
35
|
+
from .conf import config
|
36
36
|
from .exceptions import (
|
37
37
|
JobException,
|
38
38
|
StageException,
|
@@ -48,7 +48,6 @@ from .utils import (
|
|
48
48
|
gen_id,
|
49
49
|
)
|
50
50
|
|
51
|
-
logger = get_logger("ddeutil.workflow")
|
52
51
|
MatrixFilter = list[dict[str, Union[str, int]]]
|
53
52
|
|
54
53
|
|
@@ -59,7 +58,6 @@ __all__: TupleStr = (
|
|
59
58
|
"RunsOn",
|
60
59
|
"RunsOnLocal",
|
61
60
|
"RunsOnSelfHosted",
|
62
|
-
"RunsOnDocker",
|
63
61
|
"RunsOnK8s",
|
64
62
|
"make",
|
65
63
|
)
|
@@ -225,12 +223,11 @@ class RunsOnType(str, Enum):
|
|
225
223
|
"""Runs-On enum object."""
|
226
224
|
|
227
225
|
LOCAL: str = "local"
|
228
|
-
DOCKER: str = "docker"
|
229
226
|
SELF_HOSTED: str = "self_hosted"
|
230
227
|
K8S: str = "k8s"
|
231
228
|
|
232
229
|
|
233
|
-
class BaseRunsOn(BaseModel):
|
230
|
+
class BaseRunsOn(BaseModel): # pragma: no cov
|
234
231
|
model_config = ConfigDict(use_enum_values=True)
|
235
232
|
|
236
233
|
type: Literal[RunsOnType.LOCAL]
|
@@ -240,13 +237,13 @@ class BaseRunsOn(BaseModel):
|
|
240
237
|
)
|
241
238
|
|
242
239
|
|
243
|
-
class RunsOnLocal(BaseRunsOn):
|
240
|
+
class RunsOnLocal(BaseRunsOn): # pragma: no cov
|
244
241
|
"""Runs-on local."""
|
245
242
|
|
246
243
|
type: Literal[RunsOnType.LOCAL] = Field(default=RunsOnType.LOCAL)
|
247
244
|
|
248
245
|
|
249
|
-
class RunsOnSelfHosted(BaseRunsOn):
|
246
|
+
class RunsOnSelfHosted(BaseRunsOn): # pragma: no cov
|
250
247
|
"""Runs-on self-hosted."""
|
251
248
|
|
252
249
|
type: Literal[RunsOnType.SELF_HOSTED] = Field(
|
@@ -254,13 +251,7 @@ class RunsOnSelfHosted(BaseRunsOn):
|
|
254
251
|
)
|
255
252
|
|
256
253
|
|
257
|
-
class
|
258
|
-
"""Runs-on local Docker."""
|
259
|
-
|
260
|
-
type: Literal[RunsOnType.DOCKER] = Field(default=RunsOnType.DOCKER)
|
261
|
-
|
262
|
-
|
263
|
-
class RunsOnK8s(BaseRunsOn):
|
254
|
+
class RunsOnK8s(BaseRunsOn): # pragma: no cov
|
264
255
|
"""Runs-on Kubernetes."""
|
265
256
|
|
266
257
|
type: Literal[RunsOnType.K8S] = Field(default=RunsOnType.K8S)
|
@@ -270,7 +261,6 @@ RunsOn = Annotated[
|
|
270
261
|
Union[
|
271
262
|
RunsOnLocal,
|
272
263
|
RunsOnSelfHosted,
|
273
|
-
RunsOnDocker,
|
274
264
|
RunsOnK8s,
|
275
265
|
],
|
276
266
|
Field(discriminator="type"),
|
@@ -286,7 +276,7 @@ class Job(BaseModel):
|
|
286
276
|
|
287
277
|
Data Validate:
|
288
278
|
>>> job = {
|
289
|
-
... "runs-on":
|
279
|
+
... "runs-on": {"type": "local"},
|
290
280
|
... "strategy": {
|
291
281
|
... "max-parallel": 1,
|
292
282
|
... "matrix": {
|