ddeutil-workflow 0.0.23__tar.gz → 0.0.25__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.23/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.25}/PKG-INFO +33 -35
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/README.md +31 -33
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/pyproject.toml +5 -5
- ddeutil_workflow-0.0.25/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/__cron.py +6 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/__init__.py +8 -7
- ddeutil_workflow-0.0.25/src/ddeutil/workflow/api/__init__.py +1 -0
- {ddeutil_workflow-0.0.23/src/ddeutil/workflow → ddeutil_workflow-0.0.25/src/ddeutil/workflow/api}/api.py +43 -21
- {ddeutil_workflow-0.0.23/src/ddeutil/workflow → ddeutil_workflow-0.0.25/src/ddeutil/workflow/api}/repeat.py +2 -2
- {ddeutil_workflow-0.0.23/src/ddeutil/workflow → ddeutil_workflow-0.0.25/src/ddeutil/workflow/api}/route.py +81 -62
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/cli.py +33 -55
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/conf.py +38 -45
- ddeutil_workflow-0.0.23/src/ddeutil/workflow/on.py → ddeutil_workflow-0.0.25/src/ddeutil/workflow/cron.py +1 -1
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/exceptions.py +3 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/scheduler.py +212 -165
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/stage.py +5 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/utils.py +7 -5
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/workflow.py +149 -149
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25/src/ddeutil_workflow.egg-info}/PKG-INFO +33 -35
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/SOURCES.txt +12 -8
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/requires.txt +1 -1
- ddeutil_workflow-0.0.25/tests/test_cli.py +44 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_conf.py +31 -8
- ddeutil_workflow-0.0.23/tests/test_on.py → ddeutil_workflow-0.0.25/tests/test_cron_on.py +10 -10
- ddeutil_workflow-0.0.23/tests/test_scheduler.py → ddeutil_workflow-0.0.25/tests/test_schedule.py +32 -48
- ddeutil_workflow-0.0.25/tests/test_schedule_control.py +32 -0
- ddeutil_workflow-0.0.25/tests/test_schedule_tasks.py +82 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_stage.py +1 -1
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_workflow.py +7 -7
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_workflow_poke.py +6 -4
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_workflow_release.py +6 -61
- ddeutil_workflow-0.0.25/tests/test_workflow_release_and_queue.py +77 -0
- ddeutil_workflow-0.0.25/tests/test_workflow_schedule.py +126 -0
- ddeutil_workflow-0.0.25/tests/test_workflow_task.py +183 -0
- ddeutil_workflow-0.0.23/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.23/tests/test_scheduler_tasks.py +0 -60
- ddeutil_workflow-0.0.23/tests/test_workflow_schedule.py +0 -58
- ddeutil_workflow-0.0.23/tests/test_workflow_task_data.py +0 -82
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/LICENSE +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/job.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/params.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/result.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_conf_log.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_job_exec_py.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_stage_exec_bash.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_stage_exec_hook.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_stage_exec_py.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_stage_exec_trigger.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_utils.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_utils_filter.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_utils_tag.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_utils_template.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_workflow_exec_hook.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_workflow_exec_needs.py +0 -0
- {ddeutil_workflow-0.0.23 → ddeutil_workflow-0.0.25}/tests/test_workflow_job_exec.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.25
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -24,7 +24,7 @@ Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil>=0.4.3
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
|
27
|
-
Requires-Dist: pydantic==2.10.
|
27
|
+
Requires-Dist: pydantic==2.10.4
|
28
28
|
Requires-Dist: python-dotenv==1.0.1
|
29
29
|
Requires-Dist: typer==0.15.1
|
30
30
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
@@ -68,8 +68,8 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
68
68
|
> with `.yml` files and all of config file from several data orchestration framework
|
69
69
|
> tools from my experience on Data Engineer. :grimacing:
|
70
70
|
>
|
71
|
-
> Other workflow that I interest on them and pick some interested feature
|
72
|
-
> package:
|
71
|
+
> Other workflow tools that I interest on them and pick some interested feature
|
72
|
+
> implement to this package:
|
73
73
|
>
|
74
74
|
> - [Google **Workflows**](https://cloud.google.com/workflows)
|
75
75
|
> - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
|
@@ -85,9 +85,6 @@ If you want to install this package with application add-ons, you should add
|
|
85
85
|
| Python & CLI | `pip install ddeutil-workflow` | :heavy_check_mark: |
|
86
86
|
| FastAPI Server | `pip install ddeutil-workflow[api]` | :heavy_check_mark: |
|
87
87
|
|
88
|
-
|
89
|
-
> I added this feature to the main milestone.
|
90
|
-
>
|
91
88
|
> :egg: **Docker Images** supported:
|
92
89
|
>
|
93
90
|
> | Docker Image | Python Version | Support |
|
@@ -113,7 +110,7 @@ use-case.
|
|
113
110
|
run-py-local:
|
114
111
|
|
115
112
|
# Validate model that use to parsing exists for template file
|
116
|
-
type: ddeutil.workflow.Workflow
|
113
|
+
type: ddeutil.workflow.workflow.Workflow
|
117
114
|
on:
|
118
115
|
# If workflow deploy to schedule, it will running every 5 minutes
|
119
116
|
# with Asia/Bangkok timezone.
|
@@ -182,34 +179,35 @@ The main configuration that use to dynamic changing with your propose of this
|
|
182
179
|
application. If any configuration values do not set yet, it will use default value
|
183
180
|
and do not raise any error to you.
|
184
181
|
|
185
|
-
| Environment | Component
|
186
|
-
|
187
|
-
| `WORKFLOW_ROOT_PATH` |
|
188
|
-
| `WORKFLOW_CORE_REGISTRY` |
|
189
|
-
| `WORKFLOW_CORE_REGISTRY_FILTER` |
|
190
|
-
| `WORKFLOW_CORE_PATH_CONF` |
|
191
|
-
| `WORKFLOW_CORE_TIMEZONE` |
|
192
|
-
| `WORKFLOW_CORE_STAGE_DEFAULT_ID` |
|
193
|
-
| `WORKFLOW_CORE_STAGE_RAISE_ERROR` |
|
194
|
-
| `WORKFLOW_CORE_JOB_DEFAULT_ID` |
|
195
|
-
| `WORKFLOW_CORE_JOB_RAISE_ERROR` |
|
196
|
-
| `WORKFLOW_CORE_MAX_NUM_POKING` |
|
197
|
-
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` |
|
198
|
-
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` |
|
199
|
-
| `
|
200
|
-
| `
|
201
|
-
| `
|
202
|
-
| `
|
203
|
-
| `
|
204
|
-
| `
|
205
|
-
| `
|
182
|
+
| Environment | Component | Default | Description | Remark |
|
183
|
+
|:----------------------------------------|:----------:|:---------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
|
184
|
+
| `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
|
185
|
+
| `WORKFLOW_CORE_REGISTRY` | Core | src,src.ddeutil.workflow,tests,tests.utils | List of importable string for the hook stage. | |
|
186
|
+
| `WORKFLOW_CORE_REGISTRY_FILTER` | Core | src.ddeutil.workflow.utils,ddeutil.workflow.utils | List of importable string for the filter template. | |
|
187
|
+
| `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
|
188
|
+
| `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
|
189
|
+
| `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
|
190
|
+
| `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
|
191
|
+
| `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
|
192
|
+
| `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
|
193
|
+
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
194
|
+
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
195
|
+
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
196
|
+
| `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
|
197
|
+
| `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
|
198
|
+
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
199
|
+
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
200
|
+
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
201
|
+
| `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
|
202
|
+
| `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
|
203
|
+
| `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
|
206
204
|
|
207
205
|
**API Application**:
|
208
206
|
|
209
|
-
| Environment |
|
210
|
-
|
211
|
-
| `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` |
|
212
|
-
| `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` |
|
207
|
+
| Environment | Component | Default | Description | Remark |
|
208
|
+
|:--------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
|
209
|
+
| `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
|
210
|
+
| `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
|
213
211
|
|
214
212
|
## :rocket: Deployment
|
215
213
|
|
@@ -217,7 +215,7 @@ This package able to run as a application service for receive manual trigger
|
|
217
215
|
from the master node via RestAPI or use to be Scheduler background service
|
218
216
|
like crontab job but via Python API.
|
219
217
|
|
220
|
-
###
|
218
|
+
### CLI
|
221
219
|
|
222
220
|
```shell
|
223
221
|
(venv) $ ddeutil-workflow schedule
|
@@ -230,7 +228,7 @@ like crontab job but via Python API.
|
|
230
228
|
```
|
231
229
|
|
232
230
|
> [!NOTE]
|
233
|
-
> If this package already deploy, it able to use
|
231
|
+
> If this package already deploy, it able to use multiprocess;
|
234
232
|
> `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
|
235
233
|
|
236
234
|
### Docker Container
|
@@ -35,8 +35,8 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
35
35
|
> with `.yml` files and all of config file from several data orchestration framework
|
36
36
|
> tools from my experience on Data Engineer. :grimacing:
|
37
37
|
>
|
38
|
-
> Other workflow that I interest on them and pick some interested feature
|
39
|
-
> package:
|
38
|
+
> Other workflow tools that I interest on them and pick some interested feature
|
39
|
+
> implement to this package:
|
40
40
|
>
|
41
41
|
> - [Google **Workflows**](https://cloud.google.com/workflows)
|
42
42
|
> - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
|
@@ -52,9 +52,6 @@ If you want to install this package with application add-ons, you should add
|
|
52
52
|
| Python & CLI | `pip install ddeutil-workflow` | :heavy_check_mark: |
|
53
53
|
| FastAPI Server | `pip install ddeutil-workflow[api]` | :heavy_check_mark: |
|
54
54
|
|
55
|
-
|
56
|
-
> I added this feature to the main milestone.
|
57
|
-
>
|
58
55
|
> :egg: **Docker Images** supported:
|
59
56
|
>
|
60
57
|
> | Docker Image | Python Version | Support |
|
@@ -80,7 +77,7 @@ use-case.
|
|
80
77
|
run-py-local:
|
81
78
|
|
82
79
|
# Validate model that use to parsing exists for template file
|
83
|
-
type: ddeutil.workflow.Workflow
|
80
|
+
type: ddeutil.workflow.workflow.Workflow
|
84
81
|
on:
|
85
82
|
# If workflow deploy to schedule, it will running every 5 minutes
|
86
83
|
# with Asia/Bangkok timezone.
|
@@ -149,34 +146,35 @@ The main configuration that use to dynamic changing with your propose of this
|
|
149
146
|
application. If any configuration values do not set yet, it will use default value
|
150
147
|
and do not raise any error to you.
|
151
148
|
|
152
|
-
| Environment | Component
|
153
|
-
|
154
|
-
| `WORKFLOW_ROOT_PATH` |
|
155
|
-
| `WORKFLOW_CORE_REGISTRY` |
|
156
|
-
| `WORKFLOW_CORE_REGISTRY_FILTER` |
|
157
|
-
| `WORKFLOW_CORE_PATH_CONF` |
|
158
|
-
| `WORKFLOW_CORE_TIMEZONE` |
|
159
|
-
| `WORKFLOW_CORE_STAGE_DEFAULT_ID` |
|
160
|
-
| `WORKFLOW_CORE_STAGE_RAISE_ERROR` |
|
161
|
-
| `WORKFLOW_CORE_JOB_DEFAULT_ID` |
|
162
|
-
| `WORKFLOW_CORE_JOB_RAISE_ERROR` |
|
163
|
-
| `WORKFLOW_CORE_MAX_NUM_POKING` |
|
164
|
-
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` |
|
165
|
-
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` |
|
166
|
-
| `
|
167
|
-
| `
|
168
|
-
| `
|
169
|
-
| `
|
170
|
-
| `
|
171
|
-
| `
|
172
|
-
| `
|
149
|
+
| Environment | Component | Default | Description | Remark |
|
150
|
+
|:----------------------------------------|:----------:|:---------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
|
151
|
+
| `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
|
152
|
+
| `WORKFLOW_CORE_REGISTRY` | Core | src,src.ddeutil.workflow,tests,tests.utils | List of importable string for the hook stage. | |
|
153
|
+
| `WORKFLOW_CORE_REGISTRY_FILTER` | Core | src.ddeutil.workflow.utils,ddeutil.workflow.utils | List of importable string for the filter template. | |
|
154
|
+
| `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
|
155
|
+
| `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
|
156
|
+
| `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
|
157
|
+
| `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
|
158
|
+
| `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
|
159
|
+
| `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
|
160
|
+
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
161
|
+
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
162
|
+
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
163
|
+
| `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
|
164
|
+
| `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
|
165
|
+
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
166
|
+
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
167
|
+
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
168
|
+
| `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
|
169
|
+
| `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
|
170
|
+
| `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
|
173
171
|
|
174
172
|
**API Application**:
|
175
173
|
|
176
|
-
| Environment |
|
177
|
-
|
178
|
-
| `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` |
|
179
|
-
| `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` |
|
174
|
+
| Environment | Component | Default | Description | Remark |
|
175
|
+
|:--------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
|
176
|
+
| `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
|
177
|
+
| `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
|
180
178
|
|
181
179
|
## :rocket: Deployment
|
182
180
|
|
@@ -184,7 +182,7 @@ This package able to run as a application service for receive manual trigger
|
|
184
182
|
from the master node via RestAPI or use to be Scheduler background service
|
185
183
|
like crontab job but via Python API.
|
186
184
|
|
187
|
-
###
|
185
|
+
### CLI
|
188
186
|
|
189
187
|
```shell
|
190
188
|
(venv) $ ddeutil-workflow schedule
|
@@ -197,7 +195,7 @@ like crontab job but via Python API.
|
|
197
195
|
```
|
198
196
|
|
199
197
|
> [!NOTE]
|
200
|
-
> If this package already deploy, it able to use
|
198
|
+
> If this package already deploy, it able to use multiprocess;
|
201
199
|
> `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
|
202
200
|
|
203
201
|
### Docker Container
|
@@ -28,7 +28,7 @@ requires-python = ">=3.9.13"
|
|
28
28
|
dependencies = [
|
29
29
|
"ddeutil>=0.4.3",
|
30
30
|
"ddeutil-io[yaml,toml]>=0.2.3",
|
31
|
-
"pydantic==2.10.
|
31
|
+
"pydantic==2.10.4",
|
32
32
|
"python-dotenv==1.0.1",
|
33
33
|
"typer==0.15.1",
|
34
34
|
"schedule==1.2.2,<2.0.0",
|
@@ -65,11 +65,11 @@ source = ["ddeutil.workflow"]
|
|
65
65
|
omit = [
|
66
66
|
"src/ddeutil/workflow/__about__.py",
|
67
67
|
"src/ddeutil/workflow/__cron.py",
|
68
|
-
# NOTE: Omit this files because it does not ready to production.
|
69
|
-
"src/ddeutil/workflow/api.py",
|
70
68
|
"src/ddeutil/workflow/cli.py",
|
71
|
-
"src/ddeutil/workflow/
|
72
|
-
"src/ddeutil/workflow/
|
69
|
+
"src/ddeutil/workflow/api/__init__.py",
|
70
|
+
"src/ddeutil/workflow/api/api.py",
|
71
|
+
"src/ddeutil/workflow/api/repeat.py",
|
72
|
+
"src/ddeutil/workflow/api/route.py",
|
73
73
|
]
|
74
74
|
|
75
75
|
[tool.coverage.report]
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.25"
|
@@ -736,6 +736,12 @@ class CronRunner:
|
|
736
736
|
self.is_year: bool = isinstance(cron, CronJobYear)
|
737
737
|
self.reset_flag: bool = True
|
738
738
|
|
739
|
+
def __repr__(self) -> str:
|
740
|
+
return (
|
741
|
+
f"{self.__class__.__name__}(CronJob('{self.cron}'), "
|
742
|
+
f"{self.date:%Y-%m-%d %H:%M:%S}, tz='{self.tz}')"
|
743
|
+
)
|
744
|
+
|
739
745
|
def reset(self) -> None:
|
740
746
|
"""Resets the iterator to start time."""
|
741
747
|
self.date: datetime = self.__start_date
|
@@ -3,11 +3,17 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
from .__cron import CronRunner
|
6
7
|
from .conf import (
|
7
8
|
Config,
|
8
9
|
FileLog,
|
9
10
|
Loader,
|
10
11
|
)
|
12
|
+
from .cron import (
|
13
|
+
On,
|
14
|
+
YearOn,
|
15
|
+
interval2crontab,
|
16
|
+
)
|
11
17
|
from .exceptions import (
|
12
18
|
JobException,
|
13
19
|
ParamValueException,
|
@@ -19,11 +25,6 @@ from .job import (
|
|
19
25
|
Job,
|
20
26
|
Strategy,
|
21
27
|
)
|
22
|
-
from .on import (
|
23
|
-
On,
|
24
|
-
YearOn,
|
25
|
-
interval2crontab,
|
26
|
-
)
|
27
28
|
from .params import (
|
28
29
|
ChoiceParam,
|
29
30
|
DatetimeParam,
|
@@ -35,6 +36,7 @@ from .result import Result
|
|
35
36
|
from .scheduler import (
|
36
37
|
Schedule,
|
37
38
|
WorkflowSchedule,
|
39
|
+
schedule_runner,
|
38
40
|
)
|
39
41
|
from .stage import (
|
40
42
|
BashStage,
|
@@ -68,11 +70,10 @@ from .utils import (
|
|
68
70
|
map_post_filter,
|
69
71
|
not_in_template,
|
70
72
|
param2template,
|
71
|
-
queue2str,
|
72
73
|
str2template,
|
73
74
|
tag,
|
74
75
|
)
|
75
76
|
from .workflow import (
|
76
77
|
Workflow,
|
77
|
-
|
78
|
+
WorkflowTask,
|
78
79
|
)
|
@@ -0,0 +1 @@
|
|
1
|
+
from .api import app
|
@@ -11,7 +11,6 @@ import uuid
|
|
11
11
|
from collections.abc import AsyncIterator
|
12
12
|
from datetime import datetime, timedelta
|
13
13
|
from queue import Empty, Queue
|
14
|
-
from threading import Thread
|
15
14
|
from typing import TypedDict
|
16
15
|
|
17
16
|
from dotenv import load_dotenv
|
@@ -20,34 +19,39 @@ from fastapi.middleware.gzip import GZipMiddleware
|
|
20
19
|
from fastapi.responses import UJSONResponse
|
21
20
|
from pydantic import BaseModel
|
22
21
|
|
23
|
-
from
|
24
|
-
from
|
22
|
+
from ..__about__ import __version__
|
23
|
+
from ..conf import config, get_logger
|
24
|
+
from ..scheduler import ReleaseThread, ReleaseThreads
|
25
|
+
from ..workflow import WorkflowQueue, WorkflowTask
|
25
26
|
from .repeat import repeat_at, repeat_every
|
26
|
-
from .workflow import WorkflowTaskData
|
27
27
|
|
28
28
|
load_dotenv()
|
29
29
|
logger = get_logger("ddeutil.workflow")
|
30
30
|
|
31
31
|
|
32
32
|
class State(TypedDict):
|
33
|
+
"""TypeDict for State of FastAPI application."""
|
34
|
+
|
35
|
+
# NOTE: For upper queue route.
|
33
36
|
upper_queue: Queue
|
34
37
|
upper_result: dict[str, str]
|
38
|
+
|
39
|
+
# NOTE: For schedule listener.
|
35
40
|
scheduler: list[str]
|
36
|
-
workflow_threads:
|
37
|
-
workflow_tasks: list[
|
38
|
-
workflow_queue: dict[str,
|
39
|
-
workflow_running: dict[str, list[datetime]]
|
41
|
+
workflow_threads: ReleaseThreads
|
42
|
+
workflow_tasks: list[WorkflowTask]
|
43
|
+
workflow_queue: dict[str, WorkflowQueue]
|
40
44
|
|
41
45
|
|
42
46
|
@contextlib.asynccontextmanager
|
43
47
|
async def lifespan(a: FastAPI) -> AsyncIterator[State]:
|
48
|
+
"""Lifespan function for the FastAPI application."""
|
44
49
|
a.state.upper_queue = Queue()
|
45
50
|
a.state.upper_result = {}
|
46
51
|
a.state.scheduler = []
|
47
52
|
a.state.workflow_threads = {}
|
48
53
|
a.state.workflow_tasks = []
|
49
54
|
a.state.workflow_queue = {}
|
50
|
-
a.state.workflow_running = {}
|
51
55
|
|
52
56
|
await asyncio.create_task(broker_upper_messages())
|
53
57
|
|
@@ -65,7 +69,6 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
|
|
65
69
|
#
|
66
70
|
"scheduler": a.state.scheduler,
|
67
71
|
"workflow_queue": a.state.workflow_queue,
|
68
|
-
"workflow_running": a.state.workflow_running,
|
69
72
|
"workflow_threads": a.state.workflow_threads,
|
70
73
|
"workflow_tasks": a.state.workflow_tasks,
|
71
74
|
}
|
@@ -114,12 +117,11 @@ async def get_result(request_id: str) -> dict[str, str]:
|
|
114
117
|
|
115
118
|
|
116
119
|
@app.get("/")
|
117
|
-
@app.get("/api")
|
118
120
|
async def health():
|
119
121
|
return {"message": "Workflow API already start up"}
|
120
122
|
|
121
123
|
|
122
|
-
@app.post("/
|
124
|
+
@app.post(f"{config.prefix_path}/upper")
|
123
125
|
async def message_upper(payload: Payload):
|
124
126
|
"""Convert message from any case to the upper case."""
|
125
127
|
request_id: str = str(uuid.uuid4())
|
@@ -129,27 +131,47 @@ async def message_upper(payload: Payload):
|
|
129
131
|
return await get_result(request_id)
|
130
132
|
|
131
133
|
|
134
|
+
# NOTE: Enable the workflow route.
|
132
135
|
if config.enable_route_workflow:
|
133
|
-
from .route import
|
136
|
+
from .route import workflow_route
|
137
|
+
|
138
|
+
app.include_router(workflow_route, prefix=config.prefix_path)
|
134
139
|
|
135
|
-
app.include_router(workflow)
|
136
140
|
|
141
|
+
# NOTE: Enable the schedule route.
|
137
142
|
if config.enable_route_schedule:
|
138
|
-
from
|
139
|
-
from
|
143
|
+
from ..conf import FileLog
|
144
|
+
from ..scheduler import schedule_task
|
145
|
+
from .route import schedule_route
|
140
146
|
|
141
|
-
app.include_router(
|
147
|
+
app.include_router(schedule_route, prefix=config.prefix_path)
|
142
148
|
|
143
|
-
@
|
149
|
+
@schedule_route.on_event("startup")
|
144
150
|
@repeat_at(cron="* * * * *", delay=2)
|
145
|
-
def
|
151
|
+
def scheduler_listener():
|
152
|
+
"""Schedule broker every minute at 02 second."""
|
146
153
|
logger.debug(
|
147
154
|
f"[SCHEDULER]: Start listening schedule from queue "
|
148
155
|
f"{app.state.scheduler}"
|
149
156
|
)
|
150
157
|
if app.state.workflow_tasks:
|
151
|
-
|
158
|
+
schedule_task(
|
152
159
|
app.state.workflow_tasks,
|
153
|
-
stop=datetime.now() + timedelta(minutes=1),
|
160
|
+
stop=datetime.now(config.tz) + timedelta(minutes=1),
|
161
|
+
queue=app.state.workflow_queue,
|
154
162
|
threads=app.state.workflow_threads,
|
163
|
+
log=FileLog,
|
155
164
|
)
|
165
|
+
|
166
|
+
@schedule_route.on_event("startup")
|
167
|
+
@repeat_at(cron="*/5 * * * *")
|
168
|
+
def monitoring():
|
169
|
+
logger.debug("[MONITOR]: Start monitoring threading.")
|
170
|
+
snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
|
171
|
+
for t_name in snapshot_threads:
|
172
|
+
|
173
|
+
thread_release: ReleaseThread = app.state.workflow_threads[t_name]
|
174
|
+
|
175
|
+
# NOTE: remove the thread that running success.
|
176
|
+
if not thread_release["thread"].is_alive():
|
177
|
+
app.state.workflow_threads.pop(t_name)
|
@@ -12,8 +12,8 @@ from functools import wraps
|
|
12
12
|
|
13
13
|
from starlette.concurrency import run_in_threadpool
|
14
14
|
|
15
|
-
from
|
16
|
-
from
|
15
|
+
from ..__cron import CronJob
|
16
|
+
from ..conf import config, get_logger
|
17
17
|
|
18
18
|
logger = get_logger("ddeutil.workflow")
|
19
19
|
|