ddeutil-workflow 0.0.34__tar.gz → 0.0.36__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/PKG-INFO +32 -27
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/README.md +29 -26
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/pyproject.toml +14 -3
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/__init__.py +8 -3
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/api.py +58 -14
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/repeat.py +21 -11
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/__init__.py +9 -0
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/job.py +73 -0
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/logs.py +64 -0
- ddeutil_workflow-0.0.34/src/ddeutil/workflow/api/route.py → ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/schedules.py +3 -131
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/workflows.py +137 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/audit.py +9 -6
- ddeutil_workflow-0.0.34/src/ddeutil/workflow/call.py → ddeutil_workflow-0.0.36/src/ddeutil/workflow/caller.py +4 -4
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/job.py +63 -24
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/logs.py +326 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/params.py +87 -22
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/result.py +132 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/scheduler.py +69 -41
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/stages.py +68 -14
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/utils.py +7 -1
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/workflow.py +2 -16
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/PKG-INFO +32 -27
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/SOURCES.txt +8 -2
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/requires.txt +2 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_call_tag.py +2 -2
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_job.py +26 -2
- ddeutil_workflow-0.0.36/tests/test_logs.py +6 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_params.py +39 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_utils.py +16 -1
- ddeutil_workflow-0.0.34/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.34/src/ddeutil/workflow/result.py +0 -256
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/LICENSE +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/conf.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/cron.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil/workflow/templates.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_audit.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_cron_on.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_job_exec.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_release.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_release_queue.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_schedule.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_schedule_pending.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_schedule_tasks.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_schedule_workflow.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_scheduler_control.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_stage_handler_exec.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_templates.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_templates_filter.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_workflow_exec_job.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_workflow_exec_poke.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_workflow_exec_release.py +0 -0
- {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.36}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.36
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -29,6 +29,8 @@ Requires-Dist: python-dotenv==1.0.1
|
|
29
29
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
30
30
|
Provides-Extra: api
|
31
31
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
|
32
|
+
Requires-Dist: httpx; extra == "api"
|
33
|
+
Requires-Dist: ujson; extra == "api"
|
32
34
|
|
33
35
|
# Workflow Orchestration
|
34
36
|
|
@@ -62,7 +64,7 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
62
64
|
1. The Minimum frequency unit of scheduling is **1 minute** :warning:
|
63
65
|
2. Can not re-run only failed stage and its pending downstream :rotating_light:
|
64
66
|
3. All parallel tasks inside workflow engine use Multi-Threading
|
65
|
-
(Python 3.13 unlock GIL :unlock:)
|
67
|
+
(🐍 Python 3.13 unlock GIL :unlock:)
|
66
68
|
|
67
69
|
---
|
68
70
|
|
@@ -78,45 +80,48 @@ flowchart LR
|
|
78
80
|
|
79
81
|
subgraph Docker Container
|
80
82
|
direction TB
|
81
|
-
G@{ shape: rounded, label: "Observe<br>Application" }
|
83
|
+
G@{ shape: rounded, label: "📡Observe<br>Application" }
|
82
84
|
end
|
83
85
|
|
84
86
|
subgraph Docker Container
|
85
87
|
direction TB
|
86
|
-
B@{ shape: rounded, label: "Workflow<br>Application" }
|
88
|
+
B@{ shape: rounded, label: "🏃Workflow<br>Application" }
|
87
89
|
end
|
88
90
|
|
89
|
-
A
|
90
|
-
B
|
91
|
-
G
|
91
|
+
A <-->|action &<br>response| B
|
92
|
+
B -...-> |response| G
|
93
|
+
G -...-> |request| B
|
92
94
|
|
93
95
|
subgraph Data Context
|
94
|
-
|
95
|
-
|
96
|
+
D@{ shape: processes, label: "Logs" }
|
97
|
+
E@{ shape: lin-cyl, label: "Audit<br>Logs" }
|
96
98
|
end
|
97
99
|
|
98
|
-
subgraph
|
99
|
-
|
100
|
+
subgraph Config Context
|
101
|
+
F@{ shape: tag-rect, label: "YAML<br>files" }
|
100
102
|
end
|
101
103
|
|
102
|
-
|
103
|
-
|
104
|
+
A ---> |push| H(Repo)
|
105
|
+
H -.-> |pull| F
|
104
106
|
|
105
|
-
B
|
106
|
-
|
107
|
-
B
|
107
|
+
B <-->|disable &<br>read| F
|
108
|
+
|
109
|
+
B <-->|read &<br>write| E
|
110
|
+
|
111
|
+
B -->|write| D
|
108
112
|
|
109
113
|
D -.->|read| G
|
110
114
|
E -.->|read| G
|
111
115
|
```
|
112
116
|
|
113
117
|
> [!WARNING]
|
114
|
-
>
|
115
|
-
>
|
116
|
-
>
|
117
|
-
|
118
|
-
>
|
119
|
-
>
|
118
|
+
> _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
|
119
|
+
> and all configs pattern from several data orchestration framework tools from
|
120
|
+
> my data engineering experience. :grimacing:
|
121
|
+
|
122
|
+
> [!NOTE]
|
123
|
+
> Other workflow orchestration tools that I interest and pick them to be inspiration
|
124
|
+
> some for this package:
|
120
125
|
>
|
121
126
|
> - [Google **Workflows**](https://cloud.google.com/workflows)
|
122
127
|
> - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
|
@@ -127,10 +132,10 @@ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
|
|
127
132
|
If you want to install this package with application add-ons, you should add
|
128
133
|
`app` in installation;
|
129
134
|
|
130
|
-
| Use-case | Install Optional |
|
131
|
-
|
132
|
-
| Python | `ddeutil-workflow` | :heavy_check_mark:
|
133
|
-
| FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark:
|
135
|
+
| Use-case | Install Optional | Support |
|
136
|
+
|----------------|--------------------------|:-------------------:|
|
137
|
+
| Python | `ddeutil-workflow` | :heavy_check_mark: |
|
138
|
+
| FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
|
134
139
|
|
135
140
|
## :beers: Usage
|
136
141
|
|
@@ -292,7 +297,7 @@ like crontab job but via Python API.
|
|
292
297
|
### API Server
|
293
298
|
|
294
299
|
```shell
|
295
|
-
(venv) $ uvicorn
|
300
|
+
(venv) $ uvicorn ddeutil.workflow.api:app \
|
296
301
|
--host 127.0.0.1 \
|
297
302
|
--port 80 \
|
298
303
|
--no-access-log
|
@@ -30,7 +30,7 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
30
30
|
1. The Minimum frequency unit of scheduling is **1 minute** :warning:
|
31
31
|
2. Can not re-run only failed stage and its pending downstream :rotating_light:
|
32
32
|
3. All parallel tasks inside workflow engine use Multi-Threading
|
33
|
-
(Python 3.13 unlock GIL :unlock:)
|
33
|
+
(🐍 Python 3.13 unlock GIL :unlock:)
|
34
34
|
|
35
35
|
---
|
36
36
|
|
@@ -46,45 +46,48 @@ flowchart LR
|
|
46
46
|
|
47
47
|
subgraph Docker Container
|
48
48
|
direction TB
|
49
|
-
G@{ shape: rounded, label: "Observe<br>Application" }
|
49
|
+
G@{ shape: rounded, label: "📡Observe<br>Application" }
|
50
50
|
end
|
51
51
|
|
52
52
|
subgraph Docker Container
|
53
53
|
direction TB
|
54
|
-
B@{ shape: rounded, label: "Workflow<br>Application" }
|
54
|
+
B@{ shape: rounded, label: "🏃Workflow<br>Application" }
|
55
55
|
end
|
56
56
|
|
57
|
-
A
|
58
|
-
B
|
59
|
-
G
|
57
|
+
A <-->|action &<br>response| B
|
58
|
+
B -...-> |response| G
|
59
|
+
G -...-> |request| B
|
60
60
|
|
61
61
|
subgraph Data Context
|
62
|
-
|
63
|
-
|
62
|
+
D@{ shape: processes, label: "Logs" }
|
63
|
+
E@{ shape: lin-cyl, label: "Audit<br>Logs" }
|
64
64
|
end
|
65
65
|
|
66
|
-
subgraph
|
67
|
-
|
66
|
+
subgraph Config Context
|
67
|
+
F@{ shape: tag-rect, label: "YAML<br>files" }
|
68
68
|
end
|
69
69
|
|
70
|
-
|
71
|
-
|
70
|
+
A ---> |push| H(Repo)
|
71
|
+
H -.-> |pull| F
|
72
72
|
|
73
|
-
B
|
74
|
-
|
75
|
-
B
|
73
|
+
B <-->|disable &<br>read| F
|
74
|
+
|
75
|
+
B <-->|read &<br>write| E
|
76
|
+
|
77
|
+
B -->|write| D
|
76
78
|
|
77
79
|
D -.->|read| G
|
78
80
|
E -.->|read| G
|
79
81
|
```
|
80
82
|
|
81
83
|
> [!WARNING]
|
82
|
-
>
|
83
|
-
>
|
84
|
-
>
|
85
|
-
|
86
|
-
>
|
87
|
-
>
|
84
|
+
> _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
|
85
|
+
> and all configs pattern from several data orchestration framework tools from
|
86
|
+
> my data engineering experience. :grimacing:
|
87
|
+
|
88
|
+
> [!NOTE]
|
89
|
+
> Other workflow orchestration tools that I interest and pick them to be inspiration
|
90
|
+
> some for this package:
|
88
91
|
>
|
89
92
|
> - [Google **Workflows**](https://cloud.google.com/workflows)
|
90
93
|
> - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
|
@@ -95,10 +98,10 @@ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
|
|
95
98
|
If you want to install this package with application add-ons, you should add
|
96
99
|
`app` in installation;
|
97
100
|
|
98
|
-
| Use-case | Install Optional |
|
99
|
-
|
100
|
-
| Python | `ddeutil-workflow` | :heavy_check_mark:
|
101
|
-
| FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark:
|
101
|
+
| Use-case | Install Optional | Support |
|
102
|
+
|----------------|--------------------------|:-------------------:|
|
103
|
+
| Python | `ddeutil-workflow` | :heavy_check_mark: |
|
104
|
+
| FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
|
102
105
|
|
103
106
|
## :beers: Usage
|
104
107
|
|
@@ -260,7 +263,7 @@ like crontab job but via Python API.
|
|
260
263
|
### API Server
|
261
264
|
|
262
265
|
```shell
|
263
|
-
(venv) $ uvicorn
|
266
|
+
(venv) $ uvicorn ddeutil.workflow.api:app \
|
264
267
|
--host 127.0.0.1 \
|
265
268
|
--port 80 \
|
266
269
|
--no-access-log
|
@@ -35,7 +35,11 @@ dependencies = [
|
|
35
35
|
dynamic = ["version"]
|
36
36
|
|
37
37
|
[project.optional-dependencies]
|
38
|
-
api = [
|
38
|
+
api = [
|
39
|
+
"fastapi>=0.115.0,<1.0.0",
|
40
|
+
"httpx",
|
41
|
+
"ujson",
|
42
|
+
]
|
39
43
|
|
40
44
|
[project.urls]
|
41
45
|
Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
|
@@ -66,7 +70,11 @@ omit = [
|
|
66
70
|
"src/ddeutil/workflow/api/__init__.py",
|
67
71
|
"src/ddeutil/workflow/api/api.py",
|
68
72
|
"src/ddeutil/workflow/api/repeat.py",
|
69
|
-
"src/ddeutil/workflow/api/
|
73
|
+
"src/ddeutil/workflow/api/routes/__init__.py",
|
74
|
+
"src/ddeutil/workflow/api/routes/job.py",
|
75
|
+
"src/ddeutil/workflow/api/routes/logs.py",
|
76
|
+
"src/ddeutil/workflow/api/routes/schedules.py",
|
77
|
+
"src/ddeutil/workflow/api/routes/workflows.py",
|
70
78
|
"app.py",
|
71
79
|
]
|
72
80
|
|
@@ -89,7 +97,10 @@ addopts = [
|
|
89
97
|
"--strict-config",
|
90
98
|
"--strict-markers",
|
91
99
|
]
|
92
|
-
filterwarnings = [
|
100
|
+
filterwarnings = [
|
101
|
+
"error",
|
102
|
+
"ignore::DeprecationWarning",
|
103
|
+
]
|
93
104
|
log_cli = true
|
94
105
|
log_cli_level = "DEBUG"
|
95
106
|
log_cli_format = "%(asctime)s [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)"
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.36"
|
@@ -9,7 +9,7 @@ from .audit import (
|
|
9
9
|
Audit,
|
10
10
|
get_audit,
|
11
11
|
)
|
12
|
-
from .
|
12
|
+
from .caller import (
|
13
13
|
ReturnTagFunc,
|
14
14
|
TagFunc,
|
15
15
|
extract_call,
|
@@ -37,8 +37,15 @@ from .exceptions import (
|
|
37
37
|
)
|
38
38
|
from .job import (
|
39
39
|
Job,
|
40
|
+
RunsOn,
|
40
41
|
Strategy,
|
41
42
|
)
|
43
|
+
from .logs import (
|
44
|
+
TraceData,
|
45
|
+
TraceLog,
|
46
|
+
get_dt_tznow,
|
47
|
+
get_trace,
|
48
|
+
)
|
42
49
|
from .params import (
|
43
50
|
ChoiceParam,
|
44
51
|
DatetimeParam,
|
@@ -49,9 +56,7 @@ from .params import (
|
|
49
56
|
from .result import (
|
50
57
|
Result,
|
51
58
|
Status,
|
52
|
-
TraceLog,
|
53
59
|
default_gen_id,
|
54
|
-
get_dt_tznow,
|
55
60
|
)
|
56
61
|
from .scheduler import (
|
57
62
|
Schedule,
|
@@ -11,7 +11,11 @@ from datetime import datetime, timedelta
|
|
11
11
|
from typing import TypedDict
|
12
12
|
|
13
13
|
from dotenv import load_dotenv
|
14
|
-
from fastapi import FastAPI
|
14
|
+
from fastapi import FastAPI, Request
|
15
|
+
from fastapi import status as st
|
16
|
+
from fastapi.encoders import jsonable_encoder
|
17
|
+
from fastapi.exceptions import RequestValidationError
|
18
|
+
from fastapi.middleware.cors import CORSMiddleware
|
15
19
|
from fastapi.middleware.gzip import GZipMiddleware
|
16
20
|
from fastapi.responses import UJSONResponse
|
17
21
|
|
@@ -20,6 +24,7 @@ from ..conf import config, get_logger
|
|
20
24
|
from ..scheduler import ReleaseThread, ReleaseThreads
|
21
25
|
from ..workflow import ReleaseQueue, WorkflowTask
|
22
26
|
from .repeat import repeat_at
|
27
|
+
from .routes import job, log
|
23
28
|
|
24
29
|
load_dotenv()
|
25
30
|
logger = get_logger("ddeutil.workflow")
|
@@ -60,39 +65,57 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
|
|
60
65
|
|
61
66
|
|
62
67
|
app = FastAPI(
|
63
|
-
titile="Workflow
|
68
|
+
titile="Workflow",
|
64
69
|
description=(
|
65
|
-
"This is workflow FastAPI
|
66
|
-
"execute
|
70
|
+
"This is a workflow FastAPI application that use to manage manual "
|
71
|
+
"execute, logging, and schedule workflow via RestAPI."
|
67
72
|
),
|
68
73
|
version=__version__,
|
69
74
|
lifespan=lifespan,
|
70
75
|
default_response_class=UJSONResponse,
|
71
76
|
)
|
72
77
|
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
78
|
+
origins: list[str] = [
|
79
|
+
"http://localhost",
|
80
|
+
"http://localhost:88",
|
81
|
+
"http://localhost:80",
|
82
|
+
]
|
83
|
+
app.add_middleware(
|
84
|
+
CORSMiddleware,
|
85
|
+
allow_origins=origins,
|
86
|
+
allow_credentials=True,
|
87
|
+
allow_methods=["*"],
|
88
|
+
allow_headers=["*"],
|
89
|
+
)
|
73
90
|
|
74
91
|
|
75
92
|
@app.get("/")
|
76
93
|
async def health():
|
77
|
-
|
94
|
+
"""Index view that not return any template without json status."""
|
95
|
+
return {"message": "Workflow already start up with healthy status."}
|
96
|
+
|
78
97
|
|
98
|
+
# NOTE Add the jobs and logs routes by default.
|
99
|
+
app.include_router(job, prefix=config.prefix_path)
|
100
|
+
app.include_router(log, prefix=config.prefix_path)
|
79
101
|
|
80
|
-
|
102
|
+
|
103
|
+
# NOTE: Enable the workflows route.
|
81
104
|
if config.enable_route_workflow:
|
82
|
-
from .
|
105
|
+
from .routes import workflow
|
83
106
|
|
84
|
-
app.include_router(
|
107
|
+
app.include_router(workflow, prefix=config.prefix_path)
|
85
108
|
|
86
109
|
|
87
|
-
# NOTE: Enable the
|
110
|
+
# NOTE: Enable the schedules route.
|
88
111
|
if config.enable_route_schedule:
|
89
112
|
from ..audit import get_audit
|
90
113
|
from ..scheduler import schedule_task
|
91
|
-
from .
|
114
|
+
from .routes import schedule
|
92
115
|
|
93
|
-
app.include_router(
|
116
|
+
app.include_router(schedule, prefix=config.prefix_path)
|
94
117
|
|
95
|
-
@
|
118
|
+
@schedule.on_event("startup")
|
96
119
|
@repeat_at(cron="* * * * *", delay=2)
|
97
120
|
def scheduler_listener():
|
98
121
|
"""Schedule broker every minute at 02 second."""
|
@@ -106,12 +129,13 @@ if config.enable_route_schedule:
|
|
106
129
|
stop=datetime.now(config.tz) + timedelta(minutes=1),
|
107
130
|
queue=app.state.workflow_queue,
|
108
131
|
threads=app.state.workflow_threads,
|
109
|
-
|
132
|
+
audit=get_audit(),
|
110
133
|
)
|
111
134
|
|
112
|
-
@
|
135
|
+
@schedule.on_event("startup")
|
113
136
|
@repeat_at(cron="*/5 * * * *", delay=10)
|
114
137
|
def monitoring():
|
138
|
+
"""Monitoring workflow thread that running in the background."""
|
115
139
|
logger.debug("[MONITOR]: Start monitoring threading.")
|
116
140
|
snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
|
117
141
|
for t_name in snapshot_threads:
|
@@ -121,3 +145,23 @@ if config.enable_route_schedule:
|
|
121
145
|
# NOTE: remove the thread that running success.
|
122
146
|
if not thread_release["thread"].is_alive():
|
123
147
|
app.state.workflow_threads.pop(t_name)
|
148
|
+
|
149
|
+
|
150
|
+
@app.exception_handler(RequestValidationError)
|
151
|
+
async def validation_exception_handler(
|
152
|
+
request: Request, exc: RequestValidationError
|
153
|
+
):
|
154
|
+
return UJSONResponse(
|
155
|
+
status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
|
156
|
+
content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
|
157
|
+
)
|
158
|
+
|
159
|
+
|
160
|
+
if __name__ == "__main__":
|
161
|
+
import uvicorn
|
162
|
+
|
163
|
+
uvicorn.run(
|
164
|
+
app,
|
165
|
+
host="0.0.0.0",
|
166
|
+
port=80,
|
167
|
+
)
|
@@ -21,17 +21,26 @@ logger = get_logger("ddeutil.workflow")
|
|
21
21
|
def get_cronjob_delta(cron: str) -> float:
|
22
22
|
"""This function returns the time delta between now and the next cron
|
23
23
|
execution time.
|
24
|
+
|
25
|
+
:rtype: float
|
24
26
|
"""
|
25
27
|
now: datetime = datetime.now(tz=config.tz)
|
26
28
|
cron = CronJob(cron)
|
27
29
|
return (cron.schedule(now).next - now).total_seconds()
|
28
30
|
|
29
31
|
|
30
|
-
def cron_valid(cron: str):
|
32
|
+
def cron_valid(cron: str, raise_error: bool = True) -> bool:
|
33
|
+
"""Check this crontab string value is valid with its cron syntax.
|
34
|
+
|
35
|
+
:rtype: bool
|
36
|
+
"""
|
31
37
|
try:
|
32
38
|
CronJob(cron)
|
39
|
+
return True
|
33
40
|
except Exception as err:
|
34
|
-
|
41
|
+
if raise_error:
|
42
|
+
raise ValueError(f"Crontab value does not valid, {cron}") from err
|
43
|
+
return False
|
35
44
|
|
36
45
|
|
37
46
|
async def run_func(
|
@@ -41,6 +50,7 @@ async def run_func(
|
|
41
50
|
raise_exceptions: bool = False,
|
42
51
|
**kwargs,
|
43
52
|
):
|
53
|
+
"""Run function inside the repeat decorator functions."""
|
44
54
|
try:
|
45
55
|
if is_coroutine:
|
46
56
|
await func(*args, **kwargs)
|
@@ -62,11 +72,11 @@ def repeat_at(
|
|
62
72
|
"""This function returns a decorator that makes a function execute
|
63
73
|
periodically as per the cron expression provided.
|
64
74
|
|
65
|
-
:param cron: str
|
66
|
-
|
67
|
-
:param delay:
|
68
|
-
:param raise_exceptions: bool
|
69
|
-
|
75
|
+
:param cron: (str) A Cron-style string for periodic execution, e.g.
|
76
|
+
'0 0 * * *' every midnight
|
77
|
+
:param delay: (float) A delay seconds value.
|
78
|
+
:param raise_exceptions: (bool) A raise exception flag. Whether to raise
|
79
|
+
exceptions or log them if raise was set be false.
|
70
80
|
:param max_repetitions: int (default None)
|
71
81
|
Maximum number of times to repeat the function. If None, repeat
|
72
82
|
indefinitely.
|
@@ -81,12 +91,12 @@ def repeat_at(
|
|
81
91
|
|
82
92
|
@wraps(func)
|
83
93
|
def wrapper(*_args, **_kwargs):
|
84
|
-
|
94
|
+
repetitions: int = 0
|
85
95
|
cron_valid(cron)
|
86
96
|
|
87
97
|
async def loop(*args, **kwargs):
|
88
|
-
nonlocal
|
89
|
-
while max_repetitions is None or
|
98
|
+
nonlocal repetitions
|
99
|
+
while max_repetitions is None or repetitions < max_repetitions:
|
90
100
|
sleep_time = get_cronjob_delta(cron) + delay
|
91
101
|
await asyncio.sleep(sleep_time)
|
92
102
|
await run_func(
|
@@ -96,7 +106,7 @@ def repeat_at(
|
|
96
106
|
raise_exceptions=raise_exceptions,
|
97
107
|
**kwargs,
|
98
108
|
)
|
99
|
-
|
109
|
+
repetitions += 1
|
100
110
|
|
101
111
|
ensure_future(loop(*_args, **_kwargs))
|
102
112
|
|
@@ -0,0 +1,9 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from .job import job_route as job
|
7
|
+
from .logs import log_route as log
|
8
|
+
from .schedules import schedule_route as schedule
|
9
|
+
from .workflows import workflow_route as workflow
|
@@ -0,0 +1,73 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
from typing import Any, Optional
|
9
|
+
|
10
|
+
from fastapi import APIRouter
|
11
|
+
from fastapi.responses import UJSONResponse
|
12
|
+
from pydantic import BaseModel
|
13
|
+
|
14
|
+
from ...__types import DictData
|
15
|
+
from ...conf import get_logger
|
16
|
+
from ...exceptions import JobException
|
17
|
+
from ...job import Job
|
18
|
+
from ...result import Result
|
19
|
+
|
20
|
+
logger = get_logger("ddeutil.workflow")
|
21
|
+
|
22
|
+
|
23
|
+
job_route = APIRouter(
|
24
|
+
prefix="/job",
|
25
|
+
tags=["job"],
|
26
|
+
default_response_class=UJSONResponse,
|
27
|
+
)
|
28
|
+
|
29
|
+
|
30
|
+
class ResultPost(BaseModel):
|
31
|
+
context: DictData
|
32
|
+
run_id: str
|
33
|
+
parent_run_id: Optional[str] = None
|
34
|
+
|
35
|
+
|
36
|
+
@job_route.post(path="/execute/")
|
37
|
+
async def job_execute(
|
38
|
+
result: ResultPost,
|
39
|
+
job: Job,
|
40
|
+
params: dict[str, Any],
|
41
|
+
):
|
42
|
+
"""Execute job via API."""
|
43
|
+
rs: Result = Result(
|
44
|
+
context=result.context,
|
45
|
+
run_id=result.run_id,
|
46
|
+
parent_run_id=result.parent_run_id,
|
47
|
+
)
|
48
|
+
try:
|
49
|
+
job.set_outputs(
|
50
|
+
job.execute(
|
51
|
+
params=params,
|
52
|
+
run_id=rs.run_id,
|
53
|
+
parent_run_id=rs.parent_run_id,
|
54
|
+
).context,
|
55
|
+
to=params,
|
56
|
+
)
|
57
|
+
except JobException as err:
|
58
|
+
rs.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
|
59
|
+
|
60
|
+
return {
|
61
|
+
"message": "Start execute job via API.",
|
62
|
+
"result": {
|
63
|
+
"run_id": rs.run_id,
|
64
|
+
"parent_run_id": rs.parent_run_id,
|
65
|
+
},
|
66
|
+
"job": job.model_dump(
|
67
|
+
by_alias=True,
|
68
|
+
exclude_none=True,
|
69
|
+
exclude_unset=True,
|
70
|
+
exclude_defaults=True,
|
71
|
+
),
|
72
|
+
"params": params,
|
73
|
+
}
|
@@ -0,0 +1,64 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
"""This route include audit and trace log paths."""
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
from fastapi import APIRouter
|
10
|
+
from fastapi.responses import UJSONResponse
|
11
|
+
|
12
|
+
from ...audit import get_audit
|
13
|
+
from ...logs import get_trace_obj
|
14
|
+
|
15
|
+
log_route = APIRouter(
|
16
|
+
prefix="/logs",
|
17
|
+
tags=["logs", "trace", "audit"],
|
18
|
+
default_response_class=UJSONResponse,
|
19
|
+
)
|
20
|
+
|
21
|
+
|
22
|
+
@log_route.get(path="/trace/")
|
23
|
+
async def get_traces():
|
24
|
+
"""Get all trace logs."""
|
25
|
+
return {
|
26
|
+
"message": "Getting trace logs",
|
27
|
+
"traces": list(get_trace_obj().find_logs()),
|
28
|
+
}
|
29
|
+
|
30
|
+
|
31
|
+
@log_route.get(path="/trace/{run_id}")
|
32
|
+
async def get_trace_with_id(run_id: str):
|
33
|
+
"""Get trace log with specific running ID."""
|
34
|
+
return get_trace_obj().find_log_with_id(run_id)
|
35
|
+
|
36
|
+
|
37
|
+
@log_route.get(path="/audit/")
|
38
|
+
async def get_audits():
|
39
|
+
"""Get all audit logs."""
|
40
|
+
return {
|
41
|
+
"message": "Getting audit logs",
|
42
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
43
|
+
}
|
44
|
+
|
45
|
+
|
46
|
+
@log_route.get(path="/audit/{workflow}/")
|
47
|
+
async def get_audit_with_workflow(workflow: str):
|
48
|
+
"""Get all audit logs."""
|
49
|
+
return {
|
50
|
+
"message": f"Getting audit logs with workflow name {workflow}",
|
51
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
52
|
+
}
|
53
|
+
|
54
|
+
|
55
|
+
@log_route.get(path="/audit/{workflow}/{release}")
|
56
|
+
async def get_audit_with_workflow_release(workflow: str, release: str):
|
57
|
+
"""Get all audit logs."""
|
58
|
+
return {
|
59
|
+
"message": (
|
60
|
+
f"Getting audit logs with workflow name {workflow} and release "
|
61
|
+
f"{release}"
|
62
|
+
),
|
63
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
64
|
+
}
|