ddeutil-workflow 0.0.24__tar.gz → 0.0.25__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {ddeutil_workflow-0.0.24/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.25}/PKG-INFO +32 -35
  2. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/README.md +31 -34
  3. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/pyproject.toml +4 -4
  4. ddeutil_workflow-0.0.25/src/ddeutil/workflow/__about__.py +1 -0
  5. ddeutil_workflow-0.0.25/src/ddeutil/workflow/api/__init__.py +1 -0
  6. {ddeutil_workflow-0.0.24/src/ddeutil/workflow → ddeutil_workflow-0.0.25/src/ddeutil/workflow/api}/api.py +42 -20
  7. {ddeutil_workflow-0.0.24/src/ddeutil/workflow → ddeutil_workflow-0.0.25/src/ddeutil/workflow/api}/repeat.py +2 -2
  8. {ddeutil_workflow-0.0.24/src/ddeutil/workflow → ddeutil_workflow-0.0.25/src/ddeutil/workflow/api}/route.py +81 -62
  9. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/cli.py +33 -55
  10. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/conf.py +21 -44
  11. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/cron.py +1 -1
  12. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/exceptions.py +3 -0
  13. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/scheduler.py +35 -23
  14. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/utils.py +7 -1
  15. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/workflow.py +37 -20
  16. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25/src/ddeutil_workflow.egg-info}/PKG-INFO +32 -35
  17. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/SOURCES.txt +6 -3
  18. ddeutil_workflow-0.0.25/tests/test_cli.py +44 -0
  19. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_conf.py +24 -7
  20. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_cron_on.py +3 -3
  21. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_schedule.py +4 -4
  22. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_schedule_tasks.py +12 -2
  23. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_stage.py +1 -1
  24. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow.py +7 -7
  25. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow_poke.py +6 -4
  26. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow_release.py +6 -72
  27. ddeutil_workflow-0.0.25/tests/test_workflow_release_and_queue.py +77 -0
  28. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow_schedule.py +14 -6
  29. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow_task.py +13 -4
  30. ddeutil_workflow-0.0.24/src/ddeutil/workflow/__about__.py +0 -1
  31. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/LICENSE +0 -0
  32. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/setup.cfg +0 -0
  33. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/__cron.py +0 -0
  34. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/__init__.py +0 -0
  35. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/__types.py +0 -0
  36. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/job.py +0 -0
  37. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/params.py +0 -0
  38. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/result.py +0 -0
  39. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil/workflow/stage.py +0 -0
  40. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  41. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
  42. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  43. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  44. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test__cron.py +0 -0
  45. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test__regex.py +0 -0
  46. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_conf_log.py +0 -0
  47. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_job.py +0 -0
  48. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_job_exec_py.py +0 -0
  49. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_job_exec_strategy.py +0 -0
  50. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_job_strategy.py +0 -0
  51. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_params.py +0 -0
  52. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_result.py +0 -0
  53. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_schedule_control.py +0 -0
  54. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_stage_exec_bash.py +0 -0
  55. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_stage_exec_hook.py +0 -0
  56. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_stage_exec_py.py +0 -0
  57. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_stage_exec_trigger.py +0 -0
  58. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_utils.py +0 -0
  59. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_utils_filter.py +0 -0
  60. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_utils_tag.py +0 -0
  61. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_utils_template.py +0 -0
  62. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow_exec.py +0 -0
  63. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow_exec_hook.py +0 -0
  64. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow_exec_needs.py +0 -0
  65. {ddeutil_workflow-0.0.24 → ddeutil_workflow-0.0.25}/tests/test_workflow_job_exec.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.24
3
+ Version: 0.0.25
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -68,8 +68,8 @@ configuration. It called **Metadata Driven Data Workflow**.
68
68
  > with `.yml` files and all of config file from several data orchestration framework
69
69
  > tools from my experience on Data Engineer. :grimacing:
70
70
  >
71
- > Other workflow that I interest on them and pick some interested feature to this
72
- > package:
71
+ > Other workflow tools that I interest on them and pick some interested feature
72
+ > implement to this package:
73
73
  >
74
74
  > - [Google **Workflows**](https://cloud.google.com/workflows)
75
75
  > - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
@@ -85,9 +85,6 @@ If you want to install this package with application add-ons, you should add
85
85
  | Python & CLI | `pip install ddeutil-workflow` | :heavy_check_mark: |
86
86
  | FastAPI Server | `pip install ddeutil-workflow[api]` | :heavy_check_mark: |
87
87
 
88
-
89
- > I added this feature to the main milestone.
90
- >
91
88
  > :egg: **Docker Images** supported:
92
89
  >
93
90
  > | Docker Image | Python Version | Support |
@@ -113,7 +110,7 @@ use-case.
113
110
  run-py-local:
114
111
 
115
112
  # Validate model that use to parsing exists for template file
116
- type: ddeutil.workflow.Workflow
113
+ type: ddeutil.workflow.workflow.Workflow
117
114
  on:
118
115
  # If workflow deploy to schedule, it will running every 5 minutes
119
116
  # with Asia/Bangkok timezone.
@@ -182,35 +179,35 @@ The main configuration that use to dynamic changing with your propose of this
182
179
  application. If any configuration values do not set yet, it will use default value
183
180
  and do not raise any error to you.
184
181
 
185
- | Environment | Component | Default | Description | Remark |
186
- |:----------------------------------------|:----------|:---------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
187
- | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
188
- | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage. | |
189
- | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template. | |
190
- | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
191
- | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
192
- | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
193
- | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
194
- | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
195
- | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
196
- | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
197
- | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
198
- | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
199
- | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
200
- | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
201
- | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
202
- | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
203
- | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
204
- | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
205
- | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
206
- | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
182
+ | Environment | Component | Default | Description | Remark |
183
+ |:----------------------------------------|:----------:|:---------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
184
+ | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
185
+ | `WORKFLOW_CORE_REGISTRY` | Core | src,src.ddeutil.workflow,tests,tests.utils | List of importable string for the hook stage. | |
186
+ | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | src.ddeutil.workflow.utils,ddeutil.workflow.utils | List of importable string for the filter template. | |
187
+ | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
188
+ | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
189
+ | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
190
+ | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
191
+ | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
192
+ | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
193
+ | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
194
+ | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
195
+ | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
196
+ | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
197
+ | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
198
+ | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
199
+ | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
200
+ | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
201
+ | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
202
+ | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
203
+ | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
207
204
 
208
205
  **API Application**:
209
206
 
210
- | Environment | Component | Default | Description | Remark |
211
- |:--------------------------------------|-----------|---------|------------------------------------------------------------------------------------|--------|
212
- | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
213
- | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
207
+ | Environment | Component | Default | Description | Remark |
208
+ |:--------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
209
+ | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
210
+ | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
214
211
 
215
212
  ## :rocket: Deployment
216
213
 
@@ -218,7 +215,7 @@ This package able to run as a application service for receive manual trigger
218
215
  from the master node via RestAPI or use to be Scheduler background service
219
216
  like crontab job but via Python API.
220
217
 
221
- ### Schedule App
218
+ ### CLI
222
219
 
223
220
  ```shell
224
221
  (venv) $ ddeutil-workflow schedule
@@ -231,7 +228,7 @@ like crontab job but via Python API.
231
228
  ```
232
229
 
233
230
  > [!NOTE]
234
- > If this package already deploy, it able to use
231
+ > If this package already deploy, it able to use multiprocess;
235
232
  > `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
236
233
 
237
234
  ### Docker Container
@@ -35,8 +35,8 @@ configuration. It called **Metadata Driven Data Workflow**.
35
35
  > with `.yml` files and all of config file from several data orchestration framework
36
36
  > tools from my experience on Data Engineer. :grimacing:
37
37
  >
38
- > Other workflow that I interest on them and pick some interested feature to this
39
- > package:
38
+ > Other workflow tools that I interest on them and pick some interested feature
39
+ > implement to this package:
40
40
  >
41
41
  > - [Google **Workflows**](https://cloud.google.com/workflows)
42
42
  > - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
@@ -52,9 +52,6 @@ If you want to install this package with application add-ons, you should add
52
52
  | Python & CLI | `pip install ddeutil-workflow` | :heavy_check_mark: |
53
53
  | FastAPI Server | `pip install ddeutil-workflow[api]` | :heavy_check_mark: |
54
54
 
55
-
56
- > I added this feature to the main milestone.
57
- >
58
55
  > :egg: **Docker Images** supported:
59
56
  >
60
57
  > | Docker Image | Python Version | Support |
@@ -80,7 +77,7 @@ use-case.
80
77
  run-py-local:
81
78
 
82
79
  # Validate model that use to parsing exists for template file
83
- type: ddeutil.workflow.Workflow
80
+ type: ddeutil.workflow.workflow.Workflow
84
81
  on:
85
82
  # If workflow deploy to schedule, it will running every 5 minutes
86
83
  # with Asia/Bangkok timezone.
@@ -149,35 +146,35 @@ The main configuration that use to dynamic changing with your propose of this
149
146
  application. If any configuration values do not set yet, it will use default value
150
147
  and do not raise any error to you.
151
148
 
152
- | Environment | Component | Default | Description | Remark |
153
- |:----------------------------------------|:----------|:---------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
154
- | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
155
- | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage. | |
156
- | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template. | |
157
- | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
158
- | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
159
- | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
160
- | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
161
- | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
162
- | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
163
- | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
164
- | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
165
- | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
166
- | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
167
- | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
168
- | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
169
- | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
170
- | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
171
- | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
172
- | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
173
- | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
149
+ | Environment | Component | Default | Description | Remark |
150
+ |:----------------------------------------|:----------:|:---------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
151
+ | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
152
+ | `WORKFLOW_CORE_REGISTRY` | Core | src,src.ddeutil.workflow,tests,tests.utils | List of importable string for the hook stage. | |
153
+ | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | src.ddeutil.workflow.utils,ddeutil.workflow.utils | List of importable string for the filter template. | |
154
+ | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
155
+ | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
156
+ | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
157
+ | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
158
+ | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
159
+ | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
160
+ | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
161
+ | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
162
+ | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
163
+ | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
164
+ | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
165
+ | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
166
+ | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
167
+ | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
168
+ | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
169
+ | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
170
+ | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
174
171
 
175
172
  **API Application**:
176
173
 
177
- | Environment | Component | Default | Description | Remark |
178
- |:--------------------------------------|-----------|---------|------------------------------------------------------------------------------------|--------|
179
- | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
180
- | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
174
+ | Environment | Component | Default | Description | Remark |
175
+ |:--------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
176
+ | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
177
+ | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
181
178
 
182
179
  ## :rocket: Deployment
183
180
 
@@ -185,7 +182,7 @@ This package able to run as a application service for receive manual trigger
185
182
  from the master node via RestAPI or use to be Scheduler background service
186
183
  like crontab job but via Python API.
187
184
 
188
- ### Schedule App
185
+ ### CLI
189
186
 
190
187
  ```shell
191
188
  (venv) $ ddeutil-workflow schedule
@@ -198,7 +195,7 @@ like crontab job but via Python API.
198
195
  ```
199
196
 
200
197
  > [!NOTE]
201
- > If this package already deploy, it able to use
198
+ > If this package already deploy, it able to use multiprocess;
202
199
  > `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
203
200
 
204
201
  ### Docker Container
@@ -65,11 +65,11 @@ source = ["ddeutil.workflow"]
65
65
  omit = [
66
66
  "src/ddeutil/workflow/__about__.py",
67
67
  "src/ddeutil/workflow/__cron.py",
68
- # NOTE: Omit this files because it does not ready to production.
69
- "src/ddeutil/workflow/api.py",
70
68
  "src/ddeutil/workflow/cli.py",
71
- "src/ddeutil/workflow/repeat.py",
72
- "src/ddeutil/workflow/route.py",
69
+ "src/ddeutil/workflow/api/__init__.py",
70
+ "src/ddeutil/workflow/api/api.py",
71
+ "src/ddeutil/workflow/api/repeat.py",
72
+ "src/ddeutil/workflow/api/route.py",
73
73
  ]
74
74
 
75
75
  [tool.coverage.report]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.25"
@@ -0,0 +1 @@
1
+ from .api import app
@@ -11,7 +11,6 @@ import uuid
11
11
  from collections.abc import AsyncIterator
12
12
  from datetime import datetime, timedelta
13
13
  from queue import Empty, Queue
14
- from threading import Thread
15
14
  from typing import TypedDict
16
15
 
17
16
  from dotenv import load_dotenv
@@ -20,34 +19,39 @@ from fastapi.middleware.gzip import GZipMiddleware
20
19
  from fastapi.responses import UJSONResponse
21
20
  from pydantic import BaseModel
22
21
 
23
- from .__about__ import __version__
24
- from .conf import config, get_logger
22
+ from ..__about__ import __version__
23
+ from ..conf import config, get_logger
24
+ from ..scheduler import ReleaseThread, ReleaseThreads
25
+ from ..workflow import WorkflowQueue, WorkflowTask
25
26
  from .repeat import repeat_at, repeat_every
26
- from .workflow import WorkflowTask
27
27
 
28
28
  load_dotenv()
29
29
  logger = get_logger("ddeutil.workflow")
30
30
 
31
31
 
32
32
  class State(TypedDict):
33
+ """TypeDict for State of FastAPI application."""
34
+
35
+ # NOTE: For upper queue route.
33
36
  upper_queue: Queue
34
37
  upper_result: dict[str, str]
38
+
39
+ # NOTE: For schedule listener.
35
40
  scheduler: list[str]
36
- workflow_threads: dict[str, Thread]
41
+ workflow_threads: ReleaseThreads
37
42
  workflow_tasks: list[WorkflowTask]
38
- workflow_queue: dict[str, list[datetime]]
39
- workflow_running: dict[str, list[datetime]]
43
+ workflow_queue: dict[str, WorkflowQueue]
40
44
 
41
45
 
42
46
  @contextlib.asynccontextmanager
43
47
  async def lifespan(a: FastAPI) -> AsyncIterator[State]:
48
+ """Lifespan function for the FastAPI application."""
44
49
  a.state.upper_queue = Queue()
45
50
  a.state.upper_result = {}
46
51
  a.state.scheduler = []
47
52
  a.state.workflow_threads = {}
48
53
  a.state.workflow_tasks = []
49
54
  a.state.workflow_queue = {}
50
- a.state.workflow_running = {}
51
55
 
52
56
  await asyncio.create_task(broker_upper_messages())
53
57
 
@@ -65,7 +69,6 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
65
69
  #
66
70
  "scheduler": a.state.scheduler,
67
71
  "workflow_queue": a.state.workflow_queue,
68
- "workflow_running": a.state.workflow_running,
69
72
  "workflow_threads": a.state.workflow_threads,
70
73
  "workflow_tasks": a.state.workflow_tasks,
71
74
  }
@@ -114,12 +117,11 @@ async def get_result(request_id: str) -> dict[str, str]:
114
117
 
115
118
 
116
119
  @app.get("/")
117
- @app.get("/api")
118
120
  async def health():
119
121
  return {"message": "Workflow API already start up"}
120
122
 
121
123
 
122
- @app.post("/api")
124
+ @app.post(f"{config.prefix_path}/upper")
123
125
  async def message_upper(payload: Payload):
124
126
  """Convert message from any case to the upper case."""
125
127
  request_id: str = str(uuid.uuid4())
@@ -129,27 +131,47 @@ async def message_upper(payload: Payload):
129
131
  return await get_result(request_id)
130
132
 
131
133
 
134
+ # NOTE: Enable the workflow route.
132
135
  if config.enable_route_workflow:
133
- from .route import workflow
136
+ from .route import workflow_route
137
+
138
+ app.include_router(workflow_route, prefix=config.prefix_path)
134
139
 
135
- app.include_router(workflow)
136
140
 
141
+ # NOTE: Enable the schedule route.
137
142
  if config.enable_route_schedule:
138
- from .route import schedule
139
- from .scheduler import workflow_task
143
+ from ..conf import FileLog
144
+ from ..scheduler import schedule_task
145
+ from .route import schedule_route
140
146
 
141
- app.include_router(schedule)
147
+ app.include_router(schedule_route, prefix=config.prefix_path)
142
148
 
143
- @schedule.on_event("startup")
149
+ @schedule_route.on_event("startup")
144
150
  @repeat_at(cron="* * * * *", delay=2)
145
- def schedule_broker_up():
151
+ def scheduler_listener():
152
+ """Schedule broker every minute at 02 second."""
146
153
  logger.debug(
147
154
  f"[SCHEDULER]: Start listening schedule from queue "
148
155
  f"{app.state.scheduler}"
149
156
  )
150
157
  if app.state.workflow_tasks:
151
- workflow_task(
158
+ schedule_task(
152
159
  app.state.workflow_tasks,
153
- stop=datetime.now() + timedelta(minutes=1),
160
+ stop=datetime.now(config.tz) + timedelta(minutes=1),
161
+ queue=app.state.workflow_queue,
154
162
  threads=app.state.workflow_threads,
163
+ log=FileLog,
155
164
  )
165
+
166
+ @schedule_route.on_event("startup")
167
+ @repeat_at(cron="*/5 * * * *")
168
+ def monitoring():
169
+ logger.debug("[MONITOR]: Start monitoring threading.")
170
+ snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
171
+ for t_name in snapshot_threads:
172
+
173
+ thread_release: ReleaseThread = app.state.workflow_threads[t_name]
174
+
175
+ # NOTE: remove the thread that running success.
176
+ if not thread_release["thread"].is_alive():
177
+ app.state.workflow_threads.pop(t_name)
@@ -12,8 +12,8 @@ from functools import wraps
12
12
 
13
13
  from starlette.concurrency import run_in_threadpool
14
14
 
15
- from .__cron import CronJob
16
- from .conf import config, get_logger
15
+ from ..__cron import CronJob
16
+ from ..conf import config, get_logger
17
17
 
18
18
  logger = get_logger("ddeutil.workflow")
19
19