ddeutil-workflow 0.0.33__tar.gz → 0.0.35__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.33/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.35}/PKG-INFO +36 -32
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/README.md +35 -31
- ddeutil_workflow-0.0.35/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/__init__.py +19 -10
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/api/api.py +13 -8
- ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/__init__.py +8 -0
- ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/logs.py +36 -0
- ddeutil_workflow-0.0.33/src/ddeutil/workflow/api/route.py → ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/schedules.py +2 -131
- ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/workflows.py +137 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/audit.py +28 -37
- ddeutil_workflow-0.0.33/src/ddeutil/workflow/hook.py → ddeutil_workflow-0.0.35/src/ddeutil/workflow/caller.py +27 -27
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/conf.py +47 -12
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/job.py +149 -138
- ddeutil_workflow-0.0.35/src/ddeutil/workflow/logs.py +214 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/params.py +40 -12
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/result.py +40 -61
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/scheduler.py +185 -163
- ddeutil_workflow-0.0.33/src/ddeutil/workflow/stage.py → ddeutil_workflow-0.0.35/src/ddeutil/workflow/stages.py +105 -42
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/utils.py +20 -2
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/workflow.py +142 -117
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35/src/ddeutil_workflow.egg-info}/PKG-INFO +36 -32
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil_workflow.egg-info/SOURCES.txt +10 -5
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_audit.py +9 -9
- ddeutil_workflow-0.0.33/tests/test_hook_tag.py → ddeutil_workflow-0.0.35/tests/test_call_tag.py +4 -4
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_conf.py +6 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_job.py +30 -2
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_job_exec.py +23 -14
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_job_exec_strategy.py +18 -11
- ddeutil_workflow-0.0.35/tests/test_logs.py +6 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_params.py +21 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_result.py +3 -17
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_schedule_pending.py +1 -1
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_schedule_tasks.py +1 -1
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_schedule_workflow.py +1 -1
- ddeutil_workflow-0.0.35/tests/test_scheduler_control.py +49 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_stage.py +1 -1
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_stage_handler_exec.py +76 -54
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_utils.py +9 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_workflow_exec.py +32 -26
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_workflow_exec_poke.py +31 -24
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_workflow_exec_release.py +12 -10
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_workflow_task.py +15 -6
- ddeutil_workflow-0.0.33/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.33/tests/test_schedule_control.py +0 -38
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/LICENSE +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/pyproject.toml +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/api/repeat.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/cron.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/templates.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_cron_on.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_release.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_release_queue.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_schedule.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_templates.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_templates_filter.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.33 → ddeutil_workflow-0.0.35}/tests/test_workflow_exec_job.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.35
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -62,7 +62,7 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
62
62
|
1. The Minimum frequency unit of scheduling is **1 minute** :warning:
|
63
63
|
2. Can not re-run only failed stage and its pending downstream :rotating_light:
|
64
64
|
3. All parallel tasks inside workflow engine use Multi-Threading
|
65
|
-
(Python 3.13 unlock GIL :unlock:)
|
65
|
+
(🐍 Python 3.13 unlock GIL :unlock:)
|
66
66
|
|
67
67
|
---
|
68
68
|
|
@@ -74,49 +74,52 @@ you should to set the data layer separate this core program before run this appl
|
|
74
74
|
|
75
75
|
```mermaid
|
76
76
|
flowchart LR
|
77
|
-
|
78
|
-
|
79
|
-
|
77
|
+
A((fa:fa-user User))
|
78
|
+
|
79
|
+
subgraph Docker Container
|
80
|
+
direction TB
|
80
81
|
G@{ shape: rounded, label: "Observe<br>Application" }
|
81
|
-
end
|
82
82
|
end
|
83
83
|
|
84
|
-
A --->|action| B(Workflow<br>Application)
|
85
|
-
B ---> |response| A
|
86
|
-
B -..-> |response| G
|
87
|
-
G -..-> |request| B
|
88
|
-
|
89
84
|
subgraph Docker Container
|
90
|
-
|
85
|
+
direction TB
|
86
|
+
B@{ shape: rounded, label: "Workflow<br>Application" }
|
91
87
|
end
|
92
88
|
|
89
|
+
A <-->|action &<br>response| B
|
90
|
+
B -...-> |response| G
|
91
|
+
G -...-> |request| B
|
92
|
+
|
93
93
|
subgraph Data Context
|
94
|
-
|
95
|
-
|
94
|
+
D@{ shape: processes, label: "Logs" }
|
95
|
+
E@{ shape: lin-cyl, label: "Audit<br>Logs" }
|
96
96
|
end
|
97
97
|
|
98
98
|
subgraph Git Context
|
99
|
-
|
99
|
+
F@{ shape: tag-rect, label: "YAML<br>files" }
|
100
100
|
end
|
101
101
|
|
102
|
-
|
103
|
-
|
102
|
+
A ---> |push| H(Repo)
|
103
|
+
H -.-> |pull| F
|
104
|
+
|
105
|
+
B <-->|disable &<br>read| F
|
104
106
|
|
105
|
-
B
|
106
|
-
|
107
|
-
B
|
107
|
+
B <-->|read &<br>write| E
|
108
|
+
|
109
|
+
B -->|write| D
|
108
110
|
|
109
111
|
D -.->|read| G
|
110
112
|
E -.->|read| G
|
111
113
|
```
|
112
114
|
|
113
115
|
> [!WARNING]
|
114
|
-
>
|
115
|
-
>
|
116
|
-
>
|
117
|
-
|
118
|
-
>
|
119
|
-
>
|
116
|
+
> _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
|
117
|
+
> and all configs pattern from several data orchestration framework tools from
|
118
|
+
> my data engineering experience. :grimacing:
|
119
|
+
|
120
|
+
> [!NOTE]
|
121
|
+
> Other workflow orchestration tools that I interest and pick them to be inspiration
|
122
|
+
> some for this package:
|
120
123
|
>
|
121
124
|
> - [Google **Workflows**](https://cloud.google.com/workflows)
|
122
125
|
> - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
|
@@ -138,9 +141,9 @@ This is examples that use workflow file for running common Data Engineering
|
|
138
141
|
use-case.
|
139
142
|
|
140
143
|
> [!IMPORTANT]
|
141
|
-
> I recommend you to use the `
|
144
|
+
> I recommend you to use the `call` stage for all actions that you want to do
|
142
145
|
> with workflow activity that you want to orchestrate. Because it is able to
|
143
|
-
> dynamic an input argument with the same
|
146
|
+
> dynamic an input argument with the same call function that make you use less
|
144
147
|
> time to maintenance your data workflows.
|
145
148
|
|
146
149
|
```yaml
|
@@ -182,7 +185,7 @@ run-py-local:
|
|
182
185
|
writing_mode: flatten
|
183
186
|
aws_s3_path: my-data/open-data/${{ params.source-extract }}
|
184
187
|
|
185
|
-
# This Authentication code should implement with your custom
|
188
|
+
# This Authentication code should implement with your custom call
|
186
189
|
# function. The template allow you to use environment variable.
|
187
190
|
aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
|
188
191
|
aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
|
@@ -247,7 +250,7 @@ it will use default value and do not raise any error to you.
|
|
247
250
|
| Name | Component | Default | Description |
|
248
251
|
|:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
|
249
252
|
| **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
|
250
|
-
| **REGISTRY** | Core | `.` | List of importable string for the
|
253
|
+
| **REGISTRY** | Core | `.` | List of importable string for the call stage. |
|
251
254
|
| **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
|
252
255
|
| **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
|
253
256
|
| **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
|
@@ -264,9 +267,10 @@ it will use default value and do not raise any error to you.
|
|
264
267
|
| **PATH** | Log | `./logs` | The log path of the workflow saving log. |
|
265
268
|
| **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
|
266
269
|
| **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
|
270
|
+
| **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
|
267
271
|
| **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
|
268
|
-
| **
|
269
|
-
| **PATH** | Audit | `./
|
272
|
+
| **ENABLE_WRITE** | Log | `false` | |
|
273
|
+
| **PATH** | Audit | `./audits` | |
|
270
274
|
| **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
|
271
275
|
| **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
|
272
276
|
| **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
|
@@ -30,7 +30,7 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
30
30
|
1. The Minimum frequency unit of scheduling is **1 minute** :warning:
|
31
31
|
2. Can not re-run only failed stage and its pending downstream :rotating_light:
|
32
32
|
3. All parallel tasks inside workflow engine use Multi-Threading
|
33
|
-
(Python 3.13 unlock GIL :unlock:)
|
33
|
+
(🐍 Python 3.13 unlock GIL :unlock:)
|
34
34
|
|
35
35
|
---
|
36
36
|
|
@@ -42,49 +42,52 @@ you should to set the data layer separate this core program before run this appl
|
|
42
42
|
|
43
43
|
```mermaid
|
44
44
|
flowchart LR
|
45
|
-
|
46
|
-
|
47
|
-
|
45
|
+
A((fa:fa-user User))
|
46
|
+
|
47
|
+
subgraph Docker Container
|
48
|
+
direction TB
|
48
49
|
G@{ shape: rounded, label: "Observe<br>Application" }
|
49
|
-
end
|
50
50
|
end
|
51
51
|
|
52
|
-
A --->|action| B(Workflow<br>Application)
|
53
|
-
B ---> |response| A
|
54
|
-
B -..-> |response| G
|
55
|
-
G -..-> |request| B
|
56
|
-
|
57
52
|
subgraph Docker Container
|
58
|
-
|
53
|
+
direction TB
|
54
|
+
B@{ shape: rounded, label: "Workflow<br>Application" }
|
59
55
|
end
|
60
56
|
|
57
|
+
A <-->|action &<br>response| B
|
58
|
+
B -...-> |response| G
|
59
|
+
G -...-> |request| B
|
60
|
+
|
61
61
|
subgraph Data Context
|
62
|
-
|
63
|
-
|
62
|
+
D@{ shape: processes, label: "Logs" }
|
63
|
+
E@{ shape: lin-cyl, label: "Audit<br>Logs" }
|
64
64
|
end
|
65
65
|
|
66
66
|
subgraph Git Context
|
67
|
-
|
67
|
+
F@{ shape: tag-rect, label: "YAML<br>files" }
|
68
68
|
end
|
69
69
|
|
70
|
-
|
71
|
-
|
70
|
+
A ---> |push| H(Repo)
|
71
|
+
H -.-> |pull| F
|
72
|
+
|
73
|
+
B <-->|disable &<br>read| F
|
72
74
|
|
73
|
-
B
|
74
|
-
|
75
|
-
B
|
75
|
+
B <-->|read &<br>write| E
|
76
|
+
|
77
|
+
B -->|write| D
|
76
78
|
|
77
79
|
D -.->|read| G
|
78
80
|
E -.->|read| G
|
79
81
|
```
|
80
82
|
|
81
83
|
> [!WARNING]
|
82
|
-
>
|
83
|
-
>
|
84
|
-
>
|
85
|
-
|
86
|
-
>
|
87
|
-
>
|
84
|
+
> _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
|
85
|
+
> and all configs pattern from several data orchestration framework tools from
|
86
|
+
> my data engineering experience. :grimacing:
|
87
|
+
|
88
|
+
> [!NOTE]
|
89
|
+
> Other workflow orchestration tools that I interest and pick them to be inspiration
|
90
|
+
> some for this package:
|
88
91
|
>
|
89
92
|
> - [Google **Workflows**](https://cloud.google.com/workflows)
|
90
93
|
> - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
|
@@ -106,9 +109,9 @@ This is examples that use workflow file for running common Data Engineering
|
|
106
109
|
use-case.
|
107
110
|
|
108
111
|
> [!IMPORTANT]
|
109
|
-
> I recommend you to use the `
|
112
|
+
> I recommend you to use the `call` stage for all actions that you want to do
|
110
113
|
> with workflow activity that you want to orchestrate. Because it is able to
|
111
|
-
> dynamic an input argument with the same
|
114
|
+
> dynamic an input argument with the same call function that make you use less
|
112
115
|
> time to maintenance your data workflows.
|
113
116
|
|
114
117
|
```yaml
|
@@ -150,7 +153,7 @@ run-py-local:
|
|
150
153
|
writing_mode: flatten
|
151
154
|
aws_s3_path: my-data/open-data/${{ params.source-extract }}
|
152
155
|
|
153
|
-
# This Authentication code should implement with your custom
|
156
|
+
# This Authentication code should implement with your custom call
|
154
157
|
# function. The template allow you to use environment variable.
|
155
158
|
aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
|
156
159
|
aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
|
@@ -215,7 +218,7 @@ it will use default value and do not raise any error to you.
|
|
215
218
|
| Name | Component | Default | Description |
|
216
219
|
|:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
|
217
220
|
| **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
|
218
|
-
| **REGISTRY** | Core | `.` | List of importable string for the
|
221
|
+
| **REGISTRY** | Core | `.` | List of importable string for the call stage. |
|
219
222
|
| **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
|
220
223
|
| **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
|
221
224
|
| **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
|
@@ -232,9 +235,10 @@ it will use default value and do not raise any error to you.
|
|
232
235
|
| **PATH** | Log | `./logs` | The log path of the workflow saving log. |
|
233
236
|
| **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
|
234
237
|
| **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
|
238
|
+
| **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
|
235
239
|
| **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
|
236
|
-
| **
|
237
|
-
| **PATH** | Audit | `./
|
240
|
+
| **ENABLE_WRITE** | Log | `false` | |
|
241
|
+
| **PATH** | Audit | `./audits` | |
|
238
242
|
| **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
|
239
243
|
| **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
|
240
244
|
| **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.35"
|
@@ -9,6 +9,13 @@ from .audit import (
|
|
9
9
|
Audit,
|
10
10
|
get_audit,
|
11
11
|
)
|
12
|
+
from .caller import (
|
13
|
+
ReturnTagFunc,
|
14
|
+
TagFunc,
|
15
|
+
extract_call,
|
16
|
+
make_registry,
|
17
|
+
tag,
|
18
|
+
)
|
12
19
|
from .conf import (
|
13
20
|
Config,
|
14
21
|
Loader,
|
@@ -28,17 +35,15 @@ from .exceptions import (
|
|
28
35
|
UtilException,
|
29
36
|
WorkflowException,
|
30
37
|
)
|
31
|
-
from .hook import (
|
32
|
-
ReturnTagFunc,
|
33
|
-
TagFunc,
|
34
|
-
extract_hook,
|
35
|
-
make_registry,
|
36
|
-
tag,
|
37
|
-
)
|
38
38
|
from .job import (
|
39
39
|
Job,
|
40
40
|
Strategy,
|
41
41
|
)
|
42
|
+
from .logs import (
|
43
|
+
TraceLog,
|
44
|
+
get_dt_tznow,
|
45
|
+
get_trace,
|
46
|
+
)
|
42
47
|
from .params import (
|
43
48
|
ChoiceParam,
|
44
49
|
DatetimeParam,
|
@@ -46,7 +51,11 @@ from .params import (
|
|
46
51
|
Param,
|
47
52
|
StrParam,
|
48
53
|
)
|
49
|
-
from .result import
|
54
|
+
from .result import (
|
55
|
+
Result,
|
56
|
+
Status,
|
57
|
+
default_gen_id,
|
58
|
+
)
|
50
59
|
from .scheduler import (
|
51
60
|
Schedule,
|
52
61
|
ScheduleWorkflow,
|
@@ -54,10 +63,10 @@ from .scheduler import (
|
|
54
63
|
schedule_runner,
|
55
64
|
schedule_task,
|
56
65
|
)
|
57
|
-
from .
|
66
|
+
from .stages import (
|
58
67
|
BashStage,
|
68
|
+
CallStage,
|
59
69
|
EmptyStage,
|
60
|
-
HookStage,
|
61
70
|
PyStage,
|
62
71
|
Stage,
|
63
72
|
TriggerStage,
|
@@ -20,6 +20,7 @@ from ..conf import config, get_logger
|
|
20
20
|
from ..scheduler import ReleaseThread, ReleaseThreads
|
21
21
|
from ..workflow import ReleaseQueue, WorkflowTask
|
22
22
|
from .repeat import repeat_at
|
23
|
+
from .routes import log
|
23
24
|
|
24
25
|
load_dotenv()
|
25
26
|
logger = get_logger("ddeutil.workflow")
|
@@ -77,22 +78,26 @@ async def health():
|
|
77
78
|
return {"message": "Workflow API already start up"}
|
78
79
|
|
79
80
|
|
80
|
-
# NOTE
|
81
|
+
# NOTE Add the logs route by default.
|
82
|
+
app.include_router(log, prefix=config.prefix_path)
|
83
|
+
|
84
|
+
|
85
|
+
# NOTE: Enable the workflows route.
|
81
86
|
if config.enable_route_workflow:
|
82
|
-
from .
|
87
|
+
from .routes import workflow
|
83
88
|
|
84
|
-
app.include_router(
|
89
|
+
app.include_router(workflow, prefix=config.prefix_path)
|
85
90
|
|
86
91
|
|
87
|
-
# NOTE: Enable the
|
92
|
+
# NOTE: Enable the schedules route.
|
88
93
|
if config.enable_route_schedule:
|
89
94
|
from ..audit import get_audit
|
90
95
|
from ..scheduler import schedule_task
|
91
|
-
from .
|
96
|
+
from .routes import schedule
|
92
97
|
|
93
|
-
app.include_router(
|
98
|
+
app.include_router(schedule, prefix=config.prefix_path)
|
94
99
|
|
95
|
-
@
|
100
|
+
@schedule.on_event("startup")
|
96
101
|
@repeat_at(cron="* * * * *", delay=2)
|
97
102
|
def scheduler_listener():
|
98
103
|
"""Schedule broker every minute at 02 second."""
|
@@ -109,7 +114,7 @@ if config.enable_route_schedule:
|
|
109
114
|
log=get_audit(),
|
110
115
|
)
|
111
116
|
|
112
|
-
@
|
117
|
+
@schedule.on_event("startup")
|
113
118
|
@repeat_at(cron="*/5 * * * *", delay=10)
|
114
119
|
def monitoring():
|
115
120
|
logger.debug("[MONITOR]: Start monitoring threading.")
|
@@ -0,0 +1,8 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from .logs import log_route as log
|
7
|
+
from .schedules import schedule_route as schedule
|
8
|
+
from .workflows import workflow_route as workflow
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
from fastapi import APIRouter
|
9
|
+
from fastapi.responses import UJSONResponse
|
10
|
+
|
11
|
+
from ...conf import get_logger
|
12
|
+
from ...logs import get_trace_obj
|
13
|
+
|
14
|
+
logger = get_logger("ddeutil.workflow")
|
15
|
+
|
16
|
+
|
17
|
+
# NOTE: Start create the schedule routes.
|
18
|
+
#
|
19
|
+
log_route = APIRouter(
|
20
|
+
prefix="/logs",
|
21
|
+
tags=["logs"],
|
22
|
+
default_response_class=UJSONResponse,
|
23
|
+
)
|
24
|
+
|
25
|
+
|
26
|
+
@log_route.get(path="/")
|
27
|
+
async def get_logs():
|
28
|
+
return {
|
29
|
+
"message": "Getting logs",
|
30
|
+
"audits": list(get_trace_obj().find_logs()),
|
31
|
+
}
|
32
|
+
|
33
|
+
|
34
|
+
@log_route.get(path="/{run_id}")
|
35
|
+
async def get_log_with_run_id(run_id: str):
|
36
|
+
return get_trace_obj().find_log_with_id(run_id)
|
@@ -6,30 +6,17 @@
|
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
8
|
import copy
|
9
|
-
from dataclasses import asdict
|
10
9
|
from datetime import datetime, timedelta
|
11
|
-
from typing import Any
|
12
10
|
|
13
11
|
from fastapi import APIRouter, HTTPException, Request
|
14
12
|
from fastapi import status as st
|
15
13
|
from fastapi.responses import UJSONResponse
|
16
|
-
from pydantic import BaseModel
|
17
14
|
|
18
|
-
from
|
19
|
-
from
|
20
|
-
from ..conf import Loader, config, get_logger
|
21
|
-
from ..result import Result
|
22
|
-
from ..scheduler import Schedule
|
23
|
-
from ..workflow import Workflow
|
15
|
+
from ...conf import config, get_logger
|
16
|
+
from ...scheduler import Schedule
|
24
17
|
|
25
18
|
logger = get_logger("ddeutil.workflow")
|
26
19
|
|
27
|
-
workflow_route = APIRouter(
|
28
|
-
prefix="/workflows",
|
29
|
-
tags=["workflows"],
|
30
|
-
default_response_class=UJSONResponse,
|
31
|
-
)
|
32
|
-
|
33
20
|
schedule_route = APIRouter(
|
34
21
|
prefix="/schedules",
|
35
22
|
tags=["schedules"],
|
@@ -37,122 +24,6 @@ schedule_route = APIRouter(
|
|
37
24
|
)
|
38
25
|
|
39
26
|
|
40
|
-
@workflow_route.get(path="/")
|
41
|
-
async def get_workflows() -> DictData:
|
42
|
-
"""Return all workflow workflows that exists in config path."""
|
43
|
-
workflows: DictData = dict(Loader.finds(Workflow))
|
44
|
-
return {
|
45
|
-
"message": f"Getting all workflows: {len(workflows)}",
|
46
|
-
"count": len(workflows),
|
47
|
-
"workflows": workflows,
|
48
|
-
}
|
49
|
-
|
50
|
-
|
51
|
-
@workflow_route.get(path="/{name}")
|
52
|
-
async def get_workflow_by_name(name: str) -> DictData:
|
53
|
-
"""Return model of workflow that passing an input workflow name."""
|
54
|
-
try:
|
55
|
-
workflow: Workflow = Workflow.from_loader(name=name, externals={})
|
56
|
-
except ValueError as err:
|
57
|
-
logger.exception(err)
|
58
|
-
raise HTTPException(
|
59
|
-
status_code=st.HTTP_404_NOT_FOUND,
|
60
|
-
detail=(
|
61
|
-
f"Workflow workflow name: {name!r} does not found in /conf path"
|
62
|
-
),
|
63
|
-
) from None
|
64
|
-
return workflow.model_dump(
|
65
|
-
by_alias=True,
|
66
|
-
exclude_none=True,
|
67
|
-
exclude_unset=True,
|
68
|
-
exclude_defaults=True,
|
69
|
-
)
|
70
|
-
|
71
|
-
|
72
|
-
class ExecutePayload(BaseModel):
|
73
|
-
params: dict[str, Any]
|
74
|
-
|
75
|
-
|
76
|
-
@workflow_route.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
|
77
|
-
async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
|
78
|
-
"""Return model of workflow that passing an input workflow name."""
|
79
|
-
try:
|
80
|
-
workflow: Workflow = Workflow.from_loader(name=name, externals={})
|
81
|
-
except ValueError:
|
82
|
-
raise HTTPException(
|
83
|
-
status_code=st.HTTP_404_NOT_FOUND,
|
84
|
-
detail=(
|
85
|
-
f"Workflow workflow name: {name!r} does not found in /conf path"
|
86
|
-
),
|
87
|
-
) from None
|
88
|
-
|
89
|
-
# NOTE: Start execute manually
|
90
|
-
try:
|
91
|
-
result: Result = workflow.execute(params=payload.params)
|
92
|
-
except Exception as err:
|
93
|
-
raise HTTPException(
|
94
|
-
status_code=st.HTTP_500_INTERNAL_SERVER_ERROR,
|
95
|
-
detail=f"{type(err)}: {err}",
|
96
|
-
) from None
|
97
|
-
|
98
|
-
return asdict(result)
|
99
|
-
|
100
|
-
|
101
|
-
@workflow_route.get(path="/{name}/logs")
|
102
|
-
async def get_workflow_logs(name: str):
|
103
|
-
try:
|
104
|
-
return {
|
105
|
-
"message": f"Getting workflow {name!r} logs",
|
106
|
-
"logs": [
|
107
|
-
log.model_dump(
|
108
|
-
by_alias=True,
|
109
|
-
exclude_none=True,
|
110
|
-
exclude_unset=True,
|
111
|
-
exclude_defaults=True,
|
112
|
-
)
|
113
|
-
for log in get_audit().find_logs(name=name)
|
114
|
-
],
|
115
|
-
}
|
116
|
-
except FileNotFoundError:
|
117
|
-
raise HTTPException(
|
118
|
-
status_code=st.HTTP_404_NOT_FOUND,
|
119
|
-
detail=f"Does not found log for workflow {name!r}",
|
120
|
-
) from None
|
121
|
-
|
122
|
-
|
123
|
-
@workflow_route.get(path="/{name}/logs/{release}")
|
124
|
-
async def get_workflow_release_log(name: str, release: str):
|
125
|
-
try:
|
126
|
-
log: Audit = get_audit().find_log_with_release(
|
127
|
-
name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
|
128
|
-
)
|
129
|
-
except FileNotFoundError:
|
130
|
-
raise HTTPException(
|
131
|
-
status_code=st.HTTP_404_NOT_FOUND,
|
132
|
-
detail=(
|
133
|
-
f"Does not found log for workflow {name!r} "
|
134
|
-
f"with release {release!r}"
|
135
|
-
),
|
136
|
-
) from None
|
137
|
-
return {
|
138
|
-
"message": f"Getting workflow {name!r} log in release {release}",
|
139
|
-
"log": log.model_dump(
|
140
|
-
by_alias=True,
|
141
|
-
exclude_none=True,
|
142
|
-
exclude_unset=True,
|
143
|
-
exclude_defaults=True,
|
144
|
-
),
|
145
|
-
}
|
146
|
-
|
147
|
-
|
148
|
-
@workflow_route.delete(
|
149
|
-
path="/{name}/logs/{release}",
|
150
|
-
status_code=st.HTTP_204_NO_CONTENT,
|
151
|
-
)
|
152
|
-
async def del_workflow_release_log(name: str, release: str):
|
153
|
-
return {"message": f"Deleted workflow {name!r} log in release {release}"}
|
154
|
-
|
155
|
-
|
156
27
|
@schedule_route.get(path="/{name}")
|
157
28
|
async def get_schedules(name: str):
|
158
29
|
try:
|