ddeutil-workflow 0.0.26__tar.gz → 0.0.26.post1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. {ddeutil_workflow-0.0.26/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.26.post1}/PKG-INFO +30 -29
  2. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/README.md +29 -28
  3. ddeutil_workflow-0.0.26.post1/src/ddeutil/workflow/__about__.py +1 -0
  4. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/__init__.py +5 -2
  5. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/api/route.py +40 -3
  6. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/conf.py +151 -105
  7. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/scheduler.py +2 -2
  8. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/workflow.py +4 -4
  9. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1/src/ddeutil_workflow.egg-info}/PKG-INFO +30 -29
  10. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_conf.py +19 -8
  11. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_conf_log.py +9 -3
  12. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_schedule_control.py +5 -5
  13. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_utils.py +14 -3
  14. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_utils_filter.py +4 -4
  15. ddeutil_workflow-0.0.26/src/ddeutil/workflow/__about__.py +0 -1
  16. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/LICENSE +0 -0
  17. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/pyproject.toml +0 -0
  18. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/setup.cfg +0 -0
  19. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/__cron.py +0 -0
  20. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/__types.py +0 -0
  21. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/api/__init__.py +0 -0
  22. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/api/api.py +0 -0
  23. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/api/repeat.py +0 -0
  24. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/cron.py +0 -0
  25. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/exceptions.py +0 -0
  26. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/job.py +0 -0
  27. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/params.py +0 -0
  28. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/result.py +0 -0
  29. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/stage.py +0 -0
  30. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil/workflow/utils.py +0 -0
  31. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
  32. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  33. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  34. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  35. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test__cron.py +0 -0
  36. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test__regex.py +0 -0
  37. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_cron_on.py +0 -0
  38. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_job.py +0 -0
  39. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_job_exec_py.py +0 -0
  40. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_job_exec_strategy.py +0 -0
  41. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_job_strategy.py +0 -0
  42. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_params.py +0 -0
  43. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_result.py +0 -0
  44. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_schedule.py +0 -0
  45. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_schedule_tasks.py +0 -0
  46. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_stage.py +0 -0
  47. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_stage_exec_bash.py +0 -0
  48. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_stage_exec_hook.py +0 -0
  49. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_stage_exec_py.py +0 -0
  50. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_stage_exec_trigger.py +0 -0
  51. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_utils_tag.py +0 -0
  52. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_utils_template.py +0 -0
  53. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow.py +0 -0
  54. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_exec.py +0 -0
  55. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_exec_hook.py +0 -0
  56. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_exec_needs.py +0 -0
  57. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_job_exec.py +0 -0
  58. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_poke.py +0 -0
  59. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_release.py +0 -0
  60. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_release_and_queue.py +0 -0
  61. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_schedule.py +0 -0
  62. {ddeutil_workflow-0.0.26 → ddeutil_workflow-0.0.26.post1}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.26
3
+ Version: 0.0.26.post1
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -99,7 +99,7 @@ use-case.
99
99
  run-py-local:
100
100
 
101
101
  # Validate model that use to parsing exists for template file
102
- type: ddeutil.workflow.workflow.Workflow
102
+ type: Workflow
103
103
  on:
104
104
  # If workflow deploy to schedule, it will running every 5 minutes
105
105
  # with Asia/Bangkok timezone.
@@ -150,7 +150,7 @@ So, this package provide the `Schedule` template for this action.
150
150
  schedule-run-local-wf:
151
151
 
152
152
  # Validate model that use to parsing exists for template file
153
- type: ddeutil.workflow.scheduler.Schedule
153
+ type: Schedule
154
154
  workflows:
155
155
 
156
156
  # Map existing workflow that want to deploy with scheduler application.
@@ -168,35 +168,36 @@ The main configuration that use to dynamic changing with your propose of this
168
168
  application. If any configuration values do not set yet, it will use default value
169
169
  and do not raise any error to you.
170
170
 
171
- | Environment | Component | Default | Description | Remark |
172
- |:----------------------------------------|:----------:|:---------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
173
- | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
174
- | `WORKFLOW_CORE_REGISTRY` | Core | src,src.ddeutil.workflow,tests,tests.utils | List of importable string for the hook stage. | |
175
- | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | src.ddeutil.workflow.utils,ddeutil.workflow.utils | List of importable string for the filter template. | |
176
- | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
177
- | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
178
- | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
179
- | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
180
- | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
181
- | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
182
- | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
183
- | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
184
- | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
185
- | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
186
- | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
187
- | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
188
- | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
189
- | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
190
- | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
191
- | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
192
- | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
171
+ | Environment | Component | Default | Description | Remark |
172
+ |:-------------------------------------------|:---------:|:-----------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
173
+ | **WORKFLOW_ROOT_PATH** | Core | `.` | The root path of the workflow application. | |
174
+ | **WORKFLOW_CORE_REGISTRY** | Core | `src,src.ddeutil.workflow,tests,tests.utils` | List of importable string for the hook stage. | |
175
+ | **WORKFLOW_CORE_REGISTRY_FILTER** | Core | `src.ddeutil.workflow.utils,ddeutil.workflow.utils` | List of importable string for the filter template. | |
176
+ | **WORKFLOW_CORE_PATH_CONF** | Core | `conf` | The config path that keep all template `.yaml` files. | |
177
+ | **WORKFLOW_CORE_TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. | |
178
+ | **WORKFLOW_CORE_STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. | |
179
+ | **WORKFLOW_CORE_STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. | |
180
+ | **WORKFLOW_CORE_JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
181
+ | **WORKFLOW_CORE_JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. | |
182
+ | **WORKFLOW_CORE_MAX_NUM_POKING** | Core | `4` | . | |
183
+ | **WORKFLOW_CORE_MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. | |
184
+ | **WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT** | Core | `600` | | |
185
+ | **WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW** | Core | `5` | | |
186
+ | **WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST** | Core | `16` | | |
187
+ | **WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. | |
188
+ | **WORKFLOW_LOG_PATH** | Log | `./logs` | The log path of the workflow saving log. | |
189
+ | **WORKFLOW_LOG_DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. | |
190
+ | **WORKFLOW_LOG_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. | |
191
+ | **WORKFLOW_APP_MAX_PROCESS** | Schedule | `2` | The maximum process worker number that run in scheduler app module. | |
192
+ | **WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS** | Schedule | `100` | A schedule per process that run parallel. | |
193
+ | **WORKFLOW_APP_STOP_BOUNDARY_DELTA** | Schedule | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. | |
193
194
 
194
195
  **API Application**:
195
196
 
196
- | Environment | Component | Default | Description | Remark |
197
- |:--------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
198
- | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
199
- | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
197
+ | Environment | Component | Default | Description | Remark |
198
+ |:----------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
199
+ | **WORKFLOW_API_ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. | |
200
+ | **WORKFLOW_API_ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. | |
200
201
 
201
202
  ## :rocket: Deployment
202
203
 
@@ -67,7 +67,7 @@ use-case.
67
67
  run-py-local:
68
68
 
69
69
  # Validate model that use to parsing exists for template file
70
- type: ddeutil.workflow.workflow.Workflow
70
+ type: Workflow
71
71
  on:
72
72
  # If workflow deploy to schedule, it will running every 5 minutes
73
73
  # with Asia/Bangkok timezone.
@@ -118,7 +118,7 @@ So, this package provide the `Schedule` template for this action.
118
118
  schedule-run-local-wf:
119
119
 
120
120
  # Validate model that use to parsing exists for template file
121
- type: ddeutil.workflow.scheduler.Schedule
121
+ type: Schedule
122
122
  workflows:
123
123
 
124
124
  # Map existing workflow that want to deploy with scheduler application.
@@ -136,35 +136,36 @@ The main configuration that use to dynamic changing with your propose of this
136
136
  application. If any configuration values do not set yet, it will use default value
137
137
  and do not raise any error to you.
138
138
 
139
- | Environment | Component | Default | Description | Remark |
140
- |:----------------------------------------|:----------:|:---------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
141
- | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
142
- | `WORKFLOW_CORE_REGISTRY` | Core | src,src.ddeutil.workflow,tests,tests.utils | List of importable string for the hook stage. | |
143
- | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | src.ddeutil.workflow.utils,ddeutil.workflow.utils | List of importable string for the filter template. | |
144
- | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
145
- | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
146
- | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
147
- | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
148
- | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
149
- | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
150
- | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
151
- | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
152
- | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
153
- | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
154
- | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
155
- | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
156
- | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
157
- | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
158
- | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
159
- | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
160
- | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
139
+ | Environment | Component | Default | Description | Remark |
140
+ |:-------------------------------------------|:---------:|:-----------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
141
+ | **WORKFLOW_ROOT_PATH** | Core | `.` | The root path of the workflow application. | |
142
+ | **WORKFLOW_CORE_REGISTRY** | Core | `src,src.ddeutil.workflow,tests,tests.utils` | List of importable string for the hook stage. | |
143
+ | **WORKFLOW_CORE_REGISTRY_FILTER** | Core | `src.ddeutil.workflow.utils,ddeutil.workflow.utils` | List of importable string for the filter template. | |
144
+ | **WORKFLOW_CORE_PATH_CONF** | Core | `conf` | The config path that keep all template `.yaml` files. | |
145
+ | **WORKFLOW_CORE_TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. | |
146
+ | **WORKFLOW_CORE_STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. | |
147
+ | **WORKFLOW_CORE_STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. | |
148
+ | **WORKFLOW_CORE_JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
149
+ | **WORKFLOW_CORE_JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. | |
150
+ | **WORKFLOW_CORE_MAX_NUM_POKING** | Core | `4` | . | |
151
+ | **WORKFLOW_CORE_MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. | |
152
+ | **WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT** | Core | `600` | | |
153
+ | **WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW** | Core | `5` | | |
154
+ | **WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST** | Core | `16` | | |
155
+ | **WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. | |
156
+ | **WORKFLOW_LOG_PATH** | Log | `./logs` | The log path of the workflow saving log. | |
157
+ | **WORKFLOW_LOG_DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. | |
158
+ | **WORKFLOW_LOG_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. | |
159
+ | **WORKFLOW_APP_MAX_PROCESS** | Schedule | `2` | The maximum process worker number that run in scheduler app module. | |
160
+ | **WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS** | Schedule | `100` | A schedule per process that run parallel. | |
161
+ | **WORKFLOW_APP_STOP_BOUNDARY_DELTA** | Schedule | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. | |
161
162
 
162
163
  **API Application**:
163
164
 
164
- | Environment | Component | Default | Description | Remark |
165
- |:--------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
166
- | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
167
- | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
165
+ | Environment | Component | Default | Description | Remark |
166
+ |:----------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
167
+ | **WORKFLOW_API_ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. | |
168
+ | **WORKFLOW_API_ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. | |
168
169
 
169
170
  ## :rocket: Deployment
170
171
 
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.26.post1"
@@ -3,11 +3,14 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- from .__cron import CronRunner
6
+ from .__cron import CronJob, CronRunner
7
7
  from .conf import (
8
8
  Config,
9
- FileLog,
10
9
  Loader,
10
+ Log,
11
+ env,
12
+ get_log,
13
+ get_logger,
11
14
  )
12
15
  from .cron import (
13
16
  On,
@@ -16,7 +16,7 @@ from fastapi.responses import UJSONResponse
16
16
  from pydantic import BaseModel
17
17
 
18
18
  from ..__types import DictData
19
- from ..conf import Loader, config, get_logger
19
+ from ..conf import FileLog, Loader, config, get_logger
20
20
  from ..result import Result
21
21
  from ..scheduler import Schedule
22
22
  from ..workflow import Workflow
@@ -99,12 +99,49 @@ async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
99
99
 
100
100
  @workflow_route.get(path="/{name}/logs")
101
101
  async def get_workflow_logs(name: str):
102
- return {"message": f"Getting workflow {name!r} logs"}
102
+ try:
103
+ return {
104
+ "message": f"Getting workflow {name!r} logs",
105
+ "logs": [
106
+ log.model_dump(
107
+ by_alias=True,
108
+ exclude_none=True,
109
+ exclude_unset=True,
110
+ exclude_defaults=True,
111
+ )
112
+ for log in FileLog.find_logs(name=name)
113
+ ],
114
+ }
115
+ except FileNotFoundError:
116
+ raise HTTPException(
117
+ status_code=st.HTTP_404_NOT_FOUND,
118
+ detail=f"Does not found log for workflow {name!r}",
119
+ ) from None
103
120
 
104
121
 
105
122
  @workflow_route.get(path="/{name}/logs/{release}")
106
123
  async def get_workflow_release_log(name: str, release: str):
107
- return {"message": f"Getting workflow {name!r} log in release {release}"}
124
+ try:
125
+ log: FileLog = FileLog.find_log_with_release(
126
+ name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
127
+ )
128
+ except FileNotFoundError:
129
+ raise HTTPException(
130
+ status_code=st.HTTP_404_NOT_FOUND,
131
+ detail=(
132
+ f"Does not found log for workflow {name!r} "
133
+ f"with release {release!r}"
134
+ ),
135
+ ) from None
136
+ return {
137
+ "message": f"Getting workflow {name!r} log in release {release}",
138
+ "log": log.model_dump(
139
+ by_alias=True,
140
+ exclude_none=True,
141
+ exclude_unset=True,
142
+ exclude_defaults=True,
143
+ ),
144
+ }
108
145
 
109
146
 
110
147
  @workflow_route.delete(
@@ -13,27 +13,30 @@ from collections.abc import Iterator
13
13
  from datetime import datetime, timedelta
14
14
  from functools import cached_property, lru_cache
15
15
  from pathlib import Path
16
- from typing import ClassVar, Optional, TypeVar, Union
16
+ from typing import ClassVar, Optional, Union
17
17
  from zoneinfo import ZoneInfo
18
18
 
19
19
  from ddeutil.core import str2bool
20
- from ddeutil.io import PathSearch, YamlFlResolve
21
- from dotenv import load_dotenv
20
+ from ddeutil.io import YamlFlResolve
22
21
  from pydantic import BaseModel, Field
23
22
  from pydantic.functional_validators import model_validator
24
23
  from typing_extensions import Self
25
24
 
26
25
  from .__types import DictData, TupleStr
27
26
 
28
- AnyModel = TypeVar("AnyModel", bound=BaseModel)
29
- AnyModelType = type[AnyModel]
30
27
 
31
- load_dotenv()
28
+ def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
+ return os.getenv(f"WORKFLOW_{var}", default)
30
+
31
+
32
+ def glob_files(path: Path) -> Iterator[Path]: # pragma: no cov
33
+ yield from (file for file in path.rglob("*") if file.is_file())
32
34
 
33
- env = os.getenv
34
35
 
35
36
  __all__: TupleStr = (
37
+ "env",
36
38
  "get_logger",
39
+ "get_log",
37
40
  "Config",
38
41
  "SimLoad",
39
42
  "Loader",
@@ -52,6 +55,14 @@ def get_logger(name: str):
52
55
  :param name: A module name that want to log.
53
56
  """
54
57
  lg = logging.getLogger(name)
58
+
59
+ # NOTE: Developers using this package can then disable all logging just for
60
+ # this package by;
61
+ #
62
+ # `logging.getLogger('ddeutil.workflow').propagate = False`
63
+ #
64
+ lg.addHandler(logging.NullHandler())
65
+
55
66
  formatter = logging.Formatter(
56
67
  fmt=(
57
68
  "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
@@ -68,122 +79,141 @@ def get_logger(name: str):
68
79
  return lg
69
80
 
70
81
 
71
- class Config:
82
+ class Config: # pragma: no cov
72
83
  """Config object for keeping application configuration on current session
73
84
  without changing when if the application still running.
74
85
  """
75
86
 
76
87
  # NOTE: Core
77
- root_path: Path = Path(os.getenv("WORKFLOW_ROOT_PATH", "."))
78
- tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
79
- gen_id_simple_mode: bool = str2bool(
80
- os.getenv("WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE", "true")
81
- )
88
+ @property
89
+ def root_path(self) -> Path:
90
+ return Path(env("ROOT_PATH", "."))
91
+
92
+ @property
93
+ def conf_path(self) -> Path:
94
+ """Config path that use root_path class argument for this construction.
95
+
96
+ :rtype: Path
97
+ """
98
+ return self.root_path / env("CORE_PATH_CONF", "conf")
99
+
100
+ @property
101
+ def tz(self) -> ZoneInfo:
102
+ return ZoneInfo(env("CORE_TIMEZONE", "UTC"))
103
+
104
+ @property
105
+ def gen_id_simple_mode(self) -> bool:
106
+ return str2bool(env("CORE_GENERATE_ID_SIMPLE_MODE", "true"))
82
107
 
83
108
  # NOTE: Register
84
- regis_hook_str: str = os.getenv(
85
- "WORKFLOW_CORE_REGISTRY", "src,src.ddeutil.workflow,tests,tests.utils"
86
- )
87
- regis_filter_str: str = os.getenv(
88
- "WORKFLOW_CORE_REGISTRY_FILTER", "ddeutil.workflow.utils"
89
- )
109
+ @property
110
+ def regis_hook(self) -> list[str]:
111
+ regis_hook_str: str = env(
112
+ "CORE_REGISTRY", "src,src.ddeutil.workflow,tests,tests.utils"
113
+ )
114
+ return [r.strip() for r in regis_hook_str.split(",")]
115
+
116
+ @property
117
+ def regis_filter(self) -> list[str]:
118
+ regis_filter_str: str = env(
119
+ "CORE_REGISTRY_FILTER", "ddeutil.workflow.utils"
120
+ )
121
+ return [r.strip() for r in regis_filter_str.split(",")]
90
122
 
91
123
  # NOTE: Logging
92
- debug: bool = str2bool(os.getenv("WORKFLOW_LOG_DEBUG_MODE", "true"))
93
- enable_write_log: bool = str2bool(
94
- os.getenv("WORKFLOW_LOG_ENABLE_WRITE", "false")
95
- )
124
+ @property
125
+ def log_path(self) -> Path:
126
+ return Path(env("LOG_PATH", "./logs"))
127
+
128
+ @property
129
+ def debug(self) -> bool:
130
+ return str2bool(env("LOG_DEBUG_MODE", "true"))
131
+
132
+ @property
133
+ def enable_write_log(self) -> bool:
134
+ return str2bool(env("LOG_ENABLE_WRITE", "false"))
96
135
 
97
136
  # NOTE: Stage
98
- stage_raise_error: bool = str2bool(
99
- env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
100
- )
101
- stage_default_id: bool = str2bool(
102
- env("WORKFLOW_CORE_STAGE_DEFAULT_ID", "false")
103
- )
137
+ @property
138
+ def stage_raise_error(self) -> bool:
139
+ return str2bool(env("CORE_STAGE_RAISE_ERROR", "false"))
104
140
 
105
- # NOTE: Job
106
- job_raise_error: bool = str2bool(
107
- env("WORKFLOW_CORE_JOB_RAISE_ERROR", "true")
108
- )
109
- job_default_id: bool = str2bool(
110
- env("WORKFLOW_CORE_JOB_DEFAULT_ID", "false")
111
- )
141
+ @property
142
+ def stage_default_id(self) -> bool:
143
+ return str2bool(env("CORE_STAGE_DEFAULT_ID", "false"))
112
144
 
113
- # NOTE: Workflow
114
- max_job_parallel: int = int(env("WORKFLOW_CORE_MAX_JOB_PARALLEL", "2"))
115
- max_job_exec_timeout: int = int(
116
- env("WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT", "600")
117
- )
118
- max_poking_pool_worker: int = int(
119
- os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
120
- )
121
- max_on_per_workflow: int = int(
122
- env("WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW", "5")
123
- )
124
- max_queue_complete_hist: int = int(
125
- os.getenv("WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST", "16")
126
- )
145
+ # NOTE: Job
146
+ @property
147
+ def job_raise_error(self) -> bool:
148
+ return str2bool(env("CORE_JOB_RAISE_ERROR", "true"))
127
149
 
128
- # NOTE: Schedule App
129
- max_schedule_process: int = int(env("WORKFLOW_APP_MAX_PROCESS", "2"))
130
- max_schedule_per_process: int = int(
131
- env("WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS", "100")
132
- )
133
- stop_boundary_delta_str: str = env(
134
- "WORKFLOW_APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
135
- )
150
+ @property
151
+ def job_default_id(self) -> bool:
152
+ return str2bool(env("CORE_JOB_DEFAULT_ID", "false"))
136
153
 
137
- # NOTE: API
138
- prefix_path: str = env("WORKFLOW_API_PREFIX_PATH", "/api/v1")
139
- enable_route_workflow: bool = str2bool(
140
- env("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")
141
- )
142
- enable_route_schedule: bool = str2bool(
143
- env("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")
144
- )
154
+ # NOTE: Workflow
155
+ @property
156
+ def max_job_parallel(self) -> int:
157
+ max_job_parallel = int(env("CORE_MAX_JOB_PARALLEL", "2"))
145
158
 
146
- def __init__(self) -> None:
147
159
  # VALIDATE: the MAX_JOB_PARALLEL value should not less than 0.
148
- if self.max_job_parallel < 0:
160
+ if max_job_parallel < 0:
149
161
  raise ValueError(
150
- f"``MAX_JOB_PARALLEL`` should more than 0 but got "
151
- f"{self.max_job_parallel}."
162
+ f"``WORKFLOW_MAX_JOB_PARALLEL`` should more than 0 but got "
163
+ f"{max_job_parallel}."
152
164
  )
165
+ return max_job_parallel
166
+
167
+ @property
168
+ def max_job_exec_timeout(self) -> int:
169
+ return int(env("CORE_MAX_JOB_EXEC_TIMEOUT", "600"))
170
+
171
+ @property
172
+ def max_poking_pool_worker(self) -> int:
173
+ return int(env("CORE_MAX_NUM_POKING", "4"))
174
+
175
+ @property
176
+ def max_on_per_workflow(self) -> int:
177
+ return int(env("CORE_MAX_CRON_PER_WORKFLOW", "5"))
178
+
179
+ @property
180
+ def max_queue_complete_hist(self) -> int:
181
+ return int(env("CORE_MAX_QUEUE_COMPLETE_HIST", "16"))
182
+
183
+ # NOTE: Schedule App
184
+ @property
185
+ def max_schedule_process(self) -> int:
186
+ return int(env("APP_MAX_PROCESS", "2"))
187
+
188
+ @property
189
+ def max_schedule_per_process(self) -> int:
190
+ return int(env("APP_MAX_SCHEDULE_PER_PROCESS", "100"))
153
191
 
192
+ @property
193
+ def stop_boundary_delta(self) -> timedelta:
194
+ stop_boundary_delta_str: str = env(
195
+ "APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
196
+ )
154
197
  try:
155
- self.stop_boundary_delta: timedelta = timedelta(
156
- **json.loads(self.stop_boundary_delta_str)
157
- )
198
+ return timedelta(**json.loads(stop_boundary_delta_str))
158
199
  except Exception as err:
159
200
  raise ValueError(
160
201
  "Config ``WORKFLOW_APP_STOP_BOUNDARY_DELTA`` can not parsing to"
161
- f"timedelta with {self.stop_boundary_delta_str}."
202
+ f"timedelta with {stop_boundary_delta_str}."
162
203
  ) from err
163
204
 
164
- def refresh_dotenv(self) -> Self:
165
- """Reload environment variables from the current stage."""
166
- self.tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
167
- self.stage_raise_error: bool = str2bool(
168
- env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
169
- )
170
- return self
171
-
205
+ # NOTE: API
172
206
  @property
173
- def conf_path(self) -> Path:
174
- """Config path that use root_path class argument for this construction.
175
-
176
- :rtype: Path
177
- """
178
- return self.root_path / os.getenv("WORKFLOW_CORE_PATH_CONF", "conf")
207
+ def prefix_path(self) -> str:
208
+ return env("API_PREFIX_PATH", "/api/v1")
179
209
 
180
210
  @property
181
- def regis_hook(self) -> list[str]:
182
- return [r.strip() for r in self.regis_hook_str.split(",")]
211
+ def enable_route_workflow(self) -> bool:
212
+ return str2bool(env("API_ENABLE_ROUTE_WORKFLOW", "true"))
183
213
 
184
214
  @property
185
- def regis_filter(self) -> list[str]:
186
- return [r.strip() for r in self.regis_filter_str.split(",")]
215
+ def enable_route_schedule(self) -> bool:
216
+ return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
187
217
 
188
218
 
189
219
  class SimLoad:
@@ -213,7 +243,8 @@ class SimLoad:
213
243
  externals: DictData | None = None,
214
244
  ) -> None:
215
245
  self.data: DictData = {}
216
- for file in PathSearch(conf.conf_path).files:
246
+ for file in glob_files(conf.conf_path):
247
+
217
248
  if data := self.filter_suffix(file, name):
218
249
  self.data = data
219
250
 
@@ -246,7 +277,7 @@ class SimLoad:
246
277
  :rtype: Iterator[tuple[str, DictData]]
247
278
  """
248
279
  exclude: list[str] = excluded or []
249
- for file in PathSearch(conf.conf_path).files:
280
+ for file in glob_files(conf.conf_path):
250
281
 
251
282
  for key, data in cls.filter_suffix(file).items():
252
283
 
@@ -272,7 +303,7 @@ class SimLoad:
272
303
  """Return object of string type which implement on any registry. The
273
304
  object type.
274
305
 
275
- :rtype: AnyModelType
306
+ :rtype: str
276
307
  """
277
308
  if _typ := self.data.get("type"):
278
309
  return _typ
@@ -362,7 +393,7 @@ class FileLog(BaseLog):
362
393
  """
363
394
 
364
395
  filename_fmt: ClassVar[str] = (
365
- "./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
396
+ "workflow={name}/release={release:%Y%m%d%H%M%S}"
366
397
  )
367
398
 
368
399
  def do_before(self) -> None:
@@ -378,18 +409,16 @@ class FileLog(BaseLog):
378
409
 
379
410
  :rtype: Iterator[Self]
380
411
  """
381
- pointer: Path = config.root_path / f"./logs/workflow={name}"
412
+ pointer: Path = config.log_path / f"workflow={name}"
382
413
  if not pointer.exists():
383
- raise FileNotFoundError(
384
- f"Pointer: ./logs/workflow={name} does not found."
385
- )
414
+ raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
386
415
 
387
416
  for file in pointer.glob("./release=*/*.log"):
388
417
  with file.open(mode="r", encoding="utf-8") as f:
389
418
  yield cls.model_validate(obj=json.load(f))
390
419
 
391
420
  @classmethod
392
- def find_log_latest(
421
+ def find_log_with_release(
393
422
  cls,
394
423
  name: str,
395
424
  release: datetime | None = None,
@@ -410,8 +439,7 @@ class FileLog(BaseLog):
410
439
  raise NotImplementedError("Find latest log does not implement yet.")
411
440
 
412
441
  pointer: Path = (
413
- config.root_path
414
- / f"./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
442
+ config.log_path / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
415
443
  )
416
444
  if not pointer.exists():
417
445
  raise FileNotFoundError(
@@ -440,7 +468,7 @@ class FileLog(BaseLog):
440
468
  return False
441
469
 
442
470
  # NOTE: create pointer path that use the same logic of pointer method.
443
- pointer: Path = config.root_path / cls.filename_fmt.format(
471
+ pointer: Path = config.log_path / cls.filename_fmt.format(
444
472
  name=name, release=release
445
473
  )
446
474
 
@@ -451,7 +479,7 @@ class FileLog(BaseLog):
451
479
 
452
480
  :rtype: Path
453
481
  """
454
- return config.root_path / self.filename_fmt.format(
482
+ return config.log_path / self.filename_fmt.format(
455
483
  name=self.name, release=self.release
456
484
  )
457
485
 
@@ -482,6 +510,18 @@ class FileLog(BaseLog):
482
510
 
483
511
  class SQLiteLog(BaseLog): # pragma: no cov
484
512
 
513
+ table: str = "workflow_log"
514
+ ddl: str = """
515
+ workflow str,
516
+ release int,
517
+ type str,
518
+ context json,
519
+ parent_run_id int,
520
+ run_id int,
521
+ update datetime
522
+ primary key ( run_id )
523
+ """
524
+
485
525
  def save(self, excluded: list[str] | None) -> None:
486
526
  raise NotImplementedError("SQLiteLog does not implement yet.")
487
527
 
@@ -490,3 +530,9 @@ Log = Union[
490
530
  FileLog,
491
531
  SQLiteLog,
492
532
  ]
533
+
534
+
535
+ def get_log() -> Log: # pragma: no cov
536
+ if config.log_path.is_file():
537
+ return SQLiteLog
538
+ return FileLog
@@ -51,7 +51,7 @@ except ImportError: # pragma: no cov
51
51
 
52
52
  from .__cron import CronRunner
53
53
  from .__types import DictData, TupleStr
54
- from .conf import FileLog, Loader, Log, config, get_logger
54
+ from .conf import Loader, Log, config, get_log, get_logger
55
55
  from .cron import On
56
56
  from .exceptions import WorkflowException
57
57
  from .utils import (
@@ -493,7 +493,7 @@ def schedule_control(
493
493
  "Should install schedule package before use this module."
494
494
  ) from None
495
495
 
496
- log: type[Log] = log or FileLog
496
+ log: type[Log] = log or get_log()
497
497
  scheduler: Scheduler = Scheduler()
498
498
  start_date: datetime = datetime.now(tz=config.tz)
499
499
  stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
@@ -42,7 +42,7 @@ from typing_extensions import Self
42
42
 
43
43
  from .__cron import CronJob, CronRunner
44
44
  from .__types import DictData, TupleStr
45
- from .conf import FileLog, Loader, Log, config, get_logger
45
+ from .conf import Loader, Log, config, get_log, get_logger
46
46
  from .cron import On
47
47
  from .exceptions import JobException, WorkflowException
48
48
  from .job import Job
@@ -501,7 +501,7 @@ class Workflow(BaseModel):
501
501
 
502
502
  :rtype: Result
503
503
  """
504
- log: type[Log] = log or FileLog
504
+ log: type[Log] = log or get_log()
505
505
  name: str = override_log_name or self.name
506
506
  run_id: str = run_id or gen_id(name, unique=True)
507
507
  rs_release: Result = Result(run_id=run_id)
@@ -670,7 +670,7 @@ class Workflow(BaseModel):
670
670
  :rtype: list[Result]
671
671
  :return: A list of all results that return from ``self.release`` method.
672
672
  """
673
- log: type[Log] = log or FileLog
673
+ log: type[Log] = log or get_log()
674
674
  run_id: str = run_id or gen_id(self.name, unique=True)
675
675
 
676
676
  # NOTE: If this workflow does not set the on schedule, it will return
@@ -1151,7 +1151,7 @@ class WorkflowTask:
1151
1151
 
1152
1152
  :rtype: Result
1153
1153
  """
1154
- log: type[Log] = log or FileLog
1154
+ log: type[Log] = log or get_log()
1155
1155
 
1156
1156
  if release is None:
1157
1157
  if queue.check_queue(self.runner.date):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.26
3
+ Version: 0.0.26.post1
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -99,7 +99,7 @@ use-case.
99
99
  run-py-local:
100
100
 
101
101
  # Validate model that use to parsing exists for template file
102
- type: ddeutil.workflow.workflow.Workflow
102
+ type: Workflow
103
103
  on:
104
104
  # If workflow deploy to schedule, it will running every 5 minutes
105
105
  # with Asia/Bangkok timezone.
@@ -150,7 +150,7 @@ So, this package provide the `Schedule` template for this action.
150
150
  schedule-run-local-wf:
151
151
 
152
152
  # Validate model that use to parsing exists for template file
153
- type: ddeutil.workflow.scheduler.Schedule
153
+ type: Schedule
154
154
  workflows:
155
155
 
156
156
  # Map existing workflow that want to deploy with scheduler application.
@@ -168,35 +168,36 @@ The main configuration that use to dynamic changing with your propose of this
168
168
  application. If any configuration values do not set yet, it will use default value
169
169
  and do not raise any error to you.
170
170
 
171
- | Environment | Component | Default | Description | Remark |
172
- |:----------------------------------------|:----------:|:---------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
173
- | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
174
- | `WORKFLOW_CORE_REGISTRY` | Core | src,src.ddeutil.workflow,tests,tests.utils | List of importable string for the hook stage. | |
175
- | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | src.ddeutil.workflow.utils,ddeutil.workflow.utils | List of importable string for the filter template. | |
176
- | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
177
- | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
178
- | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
179
- | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
180
- | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
181
- | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
182
- | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
183
- | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
184
- | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
185
- | `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
186
- | `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
187
- | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
188
- | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
189
- | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
190
- | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
191
- | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
192
- | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
171
+ | Environment | Component | Default | Description | Remark |
172
+ |:-------------------------------------------|:---------:|:-----------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
173
+ | **WORKFLOW_ROOT_PATH** | Core | `.` | The root path of the workflow application. | |
174
+ | **WORKFLOW_CORE_REGISTRY** | Core | `src,src.ddeutil.workflow,tests,tests.utils` | List of importable string for the hook stage. | |
175
+ | **WORKFLOW_CORE_REGISTRY_FILTER** | Core | `src.ddeutil.workflow.utils,ddeutil.workflow.utils` | List of importable string for the filter template. | |
176
+ | **WORKFLOW_CORE_PATH_CONF** | Core | `conf` | The config path that keep all template `.yaml` files. | |
177
+ | **WORKFLOW_CORE_TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. | |
178
+ | **WORKFLOW_CORE_STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. | |
179
+ | **WORKFLOW_CORE_STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. | |
180
+ | **WORKFLOW_CORE_JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
181
+ | **WORKFLOW_CORE_JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. | |
182
+ | **WORKFLOW_CORE_MAX_NUM_POKING** | Core | `4` | . | |
183
+ | **WORKFLOW_CORE_MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. | |
184
+ | **WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT** | Core | `600` | | |
185
+ | **WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW** | Core | `5` | | |
186
+ | **WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST** | Core | `16` | | |
187
+ | **WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. | |
188
+ | **WORKFLOW_LOG_PATH** | Log | `./logs` | The log path of the workflow saving log. | |
189
+ | **WORKFLOW_LOG_DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. | |
190
+ | **WORKFLOW_LOG_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. | |
191
+ | **WORKFLOW_APP_MAX_PROCESS** | Schedule | `2` | The maximum process worker number that run in scheduler app module. | |
192
+ | **WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS** | Schedule | `100` | A schedule per process that run parallel. | |
193
+ | **WORKFLOW_APP_STOP_BOUNDARY_DELTA** | Schedule | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. | |
193
194
 
194
195
  **API Application**:
195
196
 
196
- | Environment | Component | Default | Description | Remark |
197
- |:--------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
198
- | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
199
- | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
197
+ | Environment | Component | Default | Description | Remark |
198
+ |:----------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
199
+ | **WORKFLOW_API_ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. | |
200
+ | **WORKFLOW_API_ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. | |
200
201
 
201
202
  ## :rocket: Deployment
202
203
 
@@ -13,18 +13,29 @@ from ddeutil.workflow.scheduler import Schedule
13
13
  from ddeutil.workflow.workflow import Workflow
14
14
 
15
15
 
16
- def test_config():
17
- with mock.patch.object(Config, "max_job_parallel", -1):
18
- with pytest.raises(ValueError):
19
- Config()
16
+ @pytest.fixture(scope="function")
17
+ def adjust_config():
18
+ origin_max_job = os.getenv("WORKFLOW_CORE_MAX_JOB_PARALLEL")
19
+ origin_stop = os.getenv("WORKFLOW_APP_STOP_BOUNDARY_DELTA")
20
+ os.environ["WORKFLOW_CORE_MAX_JOB_PARALLEL"] = "-1"
21
+ os.environ["WORKFLOW_APP_STOP_BOUNDARY_DELTA"] = "{"
20
22
 
21
- with mock.patch.object(Config, "stop_boundary_delta_str", "{"):
22
- with pytest.raises(ValueError):
23
- Config()
23
+ yield
24
+
25
+ os.environ["WORKFLOW_CORE_MAX_JOB_PARALLEL"] = origin_max_job
26
+ os.environ["WORKFLOW_APP_STOP_BOUNDARY_DELTA"] = origin_stop
27
+
28
+
29
+ def test_config(adjust_config):
30
+
31
+ with pytest.raises(ValueError):
32
+ _ = Config().max_job_parallel
33
+
34
+ with pytest.raises(ValueError):
35
+ _ = Config().stop_boundary_delta
24
36
 
25
37
  conf = Config()
26
38
  os.environ["WORKFLOW_CORE_TIMEZONE"] = "Asia/Bangkok"
27
- conf = conf.refresh_dotenv()
28
39
  assert conf.tz == ZoneInfo("Asia/Bangkok")
29
40
 
30
41
 
@@ -43,7 +43,7 @@ def test_conf_log_file_do_first():
43
43
  },
44
44
  )
45
45
  log.save(excluded=None)
46
- log = FileLog.find_log_latest(
46
+ log = FileLog.find_log_with_release(
47
47
  name="wf-demo-logging",
48
48
  release=datetime(2024, 1, 1, 1),
49
49
  )
@@ -79,6 +79,12 @@ def test_conf_log_file_find_logs(root_path):
79
79
 
80
80
  for log in FileLog.find_logs(name="wf-no-release-log"):
81
81
  assert isinstance(log, FileLog)
82
+ log.model_dump(
83
+ by_alias=True,
84
+ exclude_none=True,
85
+ exclude_unset=True,
86
+ exclude_defaults=True,
87
+ )
82
88
 
83
89
 
84
90
  def test_conf_log_file_find_logs_raise():
@@ -86,9 +92,9 @@ def test_conf_log_file_find_logs_raise():
86
92
  next(FileLog.find_logs(name="wf-file-not-found"))
87
93
 
88
94
 
89
- def test_conf_log_file_find_log_latest():
95
+ def test_conf_log_file_find_log_with_release():
90
96
  with pytest.raises(FileNotFoundError):
91
- FileLog.find_log_latest(
97
+ FileLog.find_log_with_release(
92
98
  name="wf-file-not-found",
93
99
  release=datetime(2024, 1, 1, 1),
94
100
  )
@@ -1,31 +1,31 @@
1
1
  from datetime import timedelta
2
2
  from unittest import mock
3
3
 
4
- from ddeutil.workflow.conf import Config, config
4
+ from ddeutil.workflow.conf import Config
5
5
  from ddeutil.workflow.scheduler import schedule_control
6
6
 
7
7
 
8
- @mock.patch.object(config, "stop_boundary_delta", timedelta(minutes=1))
8
+ @mock.patch.object(Config, "stop_boundary_delta", timedelta(minutes=1))
9
9
  @mock.patch.object(Config, "enable_write_log", False)
10
10
  def test_schedule_control():
11
11
  rs = schedule_control(["schedule-every-minute-wf"])
12
12
  assert rs == ["schedule-every-minute-wf"]
13
13
 
14
14
 
15
- @mock.patch.object(config, "stop_boundary_delta", timedelta(minutes=3))
15
+ @mock.patch.object(Config, "stop_boundary_delta", timedelta(minutes=3))
16
16
  @mock.patch.object(Config, "enable_write_log", False)
17
17
  def test_schedule_control_multi_on():
18
18
  rs = schedule_control(["schedule-multi-on-wf"])
19
19
  assert rs == ["schedule-multi-on-wf"]
20
20
 
21
21
 
22
- @mock.patch.object(config, "stop_boundary_delta", timedelta(minutes=0))
22
+ @mock.patch.object(Config, "stop_boundary_delta", timedelta(minutes=0))
23
23
  def test_schedule_control_stop():
24
24
  rs = schedule_control(["schedule-every-minute-wf"])
25
25
  assert rs == ["schedule-every-minute-wf"]
26
26
 
27
27
 
28
- @mock.patch.object(config, "stop_boundary_delta", timedelta(minutes=2))
28
+ @mock.patch.object(Config, "stop_boundary_delta", timedelta(minutes=2))
29
29
  @mock.patch.object(Config, "enable_write_log", False)
30
30
  def test_schedule_control_parallel():
31
31
  rs = schedule_control(["schedule-every-minute-wf-parallel"])
@@ -1,16 +1,27 @@
1
+ import os
1
2
  from pathlib import Path
2
- from unittest.mock import patch
3
3
 
4
4
  import ddeutil.workflow.utils as utils
5
5
  import pytest
6
6
 
7
7
 
8
+ @pytest.fixture(scope="function")
9
+ def adjust_config_gen_id():
10
+ origin_simple = os.getenv("WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE")
11
+ os.environ["WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE"] = "false"
12
+
13
+ yield
14
+
15
+ os.environ["WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE"] = origin_simple
16
+
17
+
8
18
  def test_gen_id():
9
19
  assert "1354680202" == utils.gen_id("{}")
10
20
  assert "1354680202" == utils.gen_id("{}", sensitive=False)
11
21
 
12
- with patch("ddeutil.workflow.utils.config.gen_id_simple_mode", False):
13
- assert "99914b932bd37a50b983c5e7c90ae93b" == utils.gen_id("{}")
22
+
23
+ def test_gen_id_not_simple(adjust_config_gen_id):
24
+ assert "99914b932bd37a50b983c5e7c90ae93b" == utils.gen_id("{}")
14
25
 
15
26
 
16
27
  def test_filter_func():
@@ -25,8 +25,8 @@ def raise_err(_: str) -> None: # pragma: no cov
25
25
  def test_make_registry_raise():
26
26
  with mock.patch.object(
27
27
  Config,
28
- "regis_filter_str",
29
- "ddeutil.workflow.utils,tests.test_utils_filter,foo.bar",
28
+ "regis_filter",
29
+ ["ddeutil.workflow.utils", "tests.test_utils_filter", "foo.bar"],
30
30
  ):
31
31
  assert isfunction(make_filter_registry()["foo"])
32
32
  assert "bar" == make_filter_registry()["foo"]("")
@@ -59,8 +59,8 @@ def test_get_args_const():
59
59
  def test_map_post_filter():
60
60
  with mock.patch.object(
61
61
  Config,
62
- "regis_filter_str",
63
- "ddeutil.workflow.utils,tests.test_utils_filter,foo.bar",
62
+ "regis_filter",
63
+ ["ddeutil.workflow.utils", "tests.test_utils_filter", "foo.bar"],
64
64
  ):
65
65
  assert "bar" == map_post_filter("demo", ["foo"], make_filter_registry())
66
66
  assert "'bar'" == map_post_filter(
@@ -1 +0,0 @@
1
- __version__: str = "0.0.26"