ddeutil-workflow 0.0.15__tar.gz → 0.0.16__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/PKG-INFO +39 -23
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/README.md +36 -20
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/pyproject.toml +7 -6
- ddeutil_workflow-0.0.16/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/__types.py +18 -6
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/api.py +3 -4
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/cli.py +2 -5
- ddeutil_workflow-0.0.16/src/ddeutil/workflow/conf.py +318 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/job.py +42 -25
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/log.py +5 -8
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/on.py +1 -1
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/repeat.py +2 -5
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/route.py +4 -11
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/scheduler.py +64 -46
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/stage.py +33 -28
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/utils.py +79 -266
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil_workflow.egg-info/PKG-INFO +39 -23
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil_workflow.egg-info/SOURCES.txt +2 -5
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil_workflow.egg-info/requires.txt +2 -2
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test__conf_exist.py +1 -1
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_scheduler.py +12 -7
- ddeutil_workflow-0.0.16/tests/test_stage.py +62 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_stage_bash.py +6 -12
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_stage_py.py +16 -31
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_stage_trigger.py +0 -2
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_utils.py +8 -2
- ddeutil_workflow-0.0.16/tests/test_utils_params.py +59 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_utils_result.py +5 -18
- ddeutil_workflow-0.0.16/tests/test_utils_tag.py +42 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_workflow.py +27 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_workflow_run.py +13 -2
- ddeutil_workflow-0.0.15/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.15/src/ddeutil/workflow/conf.py +0 -45
- ddeutil_workflow-0.0.15/tests/test_conf.py +0 -8
- ddeutil_workflow-0.0.15/tests/test_stage.py +0 -44
- ddeutil_workflow-0.0.15/tests/test_stage_condition.py +0 -14
- ddeutil_workflow-0.0.15/tests/test_workflow_desc.py +0 -11
- ddeutil_workflow-0.0.15/tests/test_workflow_if.py +0 -28
- ddeutil_workflow-0.0.15/tests/test_workflow_run_raise.py +0 -12
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/LICENSE +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/__init__.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/cron.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_cron.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_job_py.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_log.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_on.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_poke.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_stage_hook.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_utils_template.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_workflow_depends.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_workflow_matrix.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_workflow_on.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_workflow_params.py +0 -0
- {ddeutil_workflow-0.0.15 → ddeutil_workflow-0.0.16}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.16
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Requires-Python: >=3.9.13
|
23
23
|
Description-Content-Type: text/markdown
|
24
24
|
License-File: LICENSE
|
25
|
-
Requires-Dist: ddeutil>=0.4.
|
26
|
-
Requires-Dist: ddeutil-io>=0.
|
25
|
+
Requires-Dist: ddeutil>=0.4.3
|
26
|
+
Requires-Dist: ddeutil-io[yaml]>=0.2.3
|
27
27
|
Requires-Dist: python-dotenv==1.0.1
|
28
28
|
Requires-Dist: typer<1.0.0,==0.12.5
|
29
29
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
@@ -179,29 +179,31 @@ The main configuration that use to dynamic changing with your propose of this
|
|
179
179
|
application. If any configuration values do not set yet, it will use default value
|
180
180
|
and do not raise any error to you.
|
181
181
|
|
182
|
-
| Environment
|
183
|
-
|
184
|
-
| `WORKFLOW_ROOT_PATH`
|
185
|
-
| `WORKFLOW_CORE_REGISTRY`
|
186
|
-
| `WORKFLOW_CORE_REGISTRY_FILTER`
|
187
|
-
| `WORKFLOW_CORE_PATH_CONF`
|
188
|
-
| `WORKFLOW_CORE_TIMEZONE`
|
189
|
-
| `WORKFLOW_CORE_STAGE_DEFAULT_ID`
|
190
|
-
| `WORKFLOW_CORE_STAGE_RAISE_ERROR`
|
191
|
-
| `
|
192
|
-
| `
|
193
|
-
| `
|
194
|
-
| `
|
195
|
-
| `
|
196
|
-
| `
|
197
|
-
| `
|
182
|
+
| Environment | Component | Default | Description | Remark |
|
183
|
+
|:----------------------------------------|-----------|----------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
|
184
|
+
| `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application | |
|
185
|
+
| `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage | |
|
186
|
+
| `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template | |
|
187
|
+
| `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files | |
|
188
|
+
| `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object | |
|
189
|
+
| `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output | |
|
190
|
+
| `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution | |
|
191
|
+
| `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
|
192
|
+
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | | |
|
193
|
+
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor | |
|
194
|
+
| `WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE` | Core | true | | |
|
195
|
+
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode | |
|
196
|
+
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination | |
|
197
|
+
| `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module | |
|
198
|
+
| `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel | |
|
199
|
+
| `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format | |
|
198
200
|
|
199
201
|
**API Application**:
|
200
202
|
|
201
|
-
| Environment
|
202
|
-
|
203
|
-
| `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW`
|
204
|
-
| `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE`
|
203
|
+
| Environment | Component | Default | Description | Remark |
|
204
|
+
|:--------------------------------------|-----------|---------|-----------------------------------------------------------------------------------|--------|
|
205
|
+
| `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging | |
|
206
|
+
| `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler | |
|
205
207
|
|
206
208
|
## :rocket: Deployment
|
207
209
|
|
@@ -224,3 +226,17 @@ like crontab job but via Python API.
|
|
224
226
|
> [!NOTE]
|
225
227
|
> If this package already deploy, it able to use
|
226
228
|
> `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
|
229
|
+
|
230
|
+
### Docker Container
|
231
|
+
|
232
|
+
Create Docker image;
|
233
|
+
|
234
|
+
```shell
|
235
|
+
$ docker build -t ddeutil-workflow:latest -f .container/Dockerfile .
|
236
|
+
```
|
237
|
+
|
238
|
+
Run the above Docker image;
|
239
|
+
|
240
|
+
```shell
|
241
|
+
$ docker run -i ddeutil-workflow:latest
|
242
|
+
```
|
@@ -147,29 +147,31 @@ The main configuration that use to dynamic changing with your propose of this
|
|
147
147
|
application. If any configuration values do not set yet, it will use default value
|
148
148
|
and do not raise any error to you.
|
149
149
|
|
150
|
-
| Environment
|
151
|
-
|
152
|
-
| `WORKFLOW_ROOT_PATH`
|
153
|
-
| `WORKFLOW_CORE_REGISTRY`
|
154
|
-
| `WORKFLOW_CORE_REGISTRY_FILTER`
|
155
|
-
| `WORKFLOW_CORE_PATH_CONF`
|
156
|
-
| `WORKFLOW_CORE_TIMEZONE`
|
157
|
-
| `WORKFLOW_CORE_STAGE_DEFAULT_ID`
|
158
|
-
| `WORKFLOW_CORE_STAGE_RAISE_ERROR`
|
159
|
-
| `
|
160
|
-
| `
|
161
|
-
| `
|
162
|
-
| `
|
163
|
-
| `
|
164
|
-
| `
|
165
|
-
| `
|
150
|
+
| Environment | Component | Default | Description | Remark |
|
151
|
+
|:----------------------------------------|-----------|----------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
|
152
|
+
| `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application | |
|
153
|
+
| `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage | |
|
154
|
+
| `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template | |
|
155
|
+
| `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files | |
|
156
|
+
| `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object | |
|
157
|
+
| `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output | |
|
158
|
+
| `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution | |
|
159
|
+
| `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
|
160
|
+
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | | |
|
161
|
+
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor | |
|
162
|
+
| `WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE` | Core | true | | |
|
163
|
+
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode | |
|
164
|
+
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination | |
|
165
|
+
| `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module | |
|
166
|
+
| `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel | |
|
167
|
+
| `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format | |
|
166
168
|
|
167
169
|
**API Application**:
|
168
170
|
|
169
|
-
| Environment
|
170
|
-
|
171
|
-
| `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW`
|
172
|
-
| `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE`
|
171
|
+
| Environment | Component | Default | Description | Remark |
|
172
|
+
|:--------------------------------------|-----------|---------|-----------------------------------------------------------------------------------|--------|
|
173
|
+
| `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging | |
|
174
|
+
| `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler | |
|
173
175
|
|
174
176
|
## :rocket: Deployment
|
175
177
|
|
@@ -192,3 +194,17 @@ like crontab job but via Python API.
|
|
192
194
|
> [!NOTE]
|
193
195
|
> If this package already deploy, it able to use
|
194
196
|
> `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
|
197
|
+
|
198
|
+
### Docker Container
|
199
|
+
|
200
|
+
Create Docker image;
|
201
|
+
|
202
|
+
```shell
|
203
|
+
$ docker build -t ddeutil-workflow:latest -f .container/Dockerfile .
|
204
|
+
```
|
205
|
+
|
206
|
+
Run the above Docker image;
|
207
|
+
|
208
|
+
```shell
|
209
|
+
$ docker run -i ddeutil-workflow:latest
|
210
|
+
```
|
@@ -26,8 +26,8 @@ classifiers = [
|
|
26
26
|
]
|
27
27
|
requires-python = ">=3.9.13"
|
28
28
|
dependencies = [
|
29
|
-
"ddeutil>=0.4.
|
30
|
-
"ddeutil-io>=0.
|
29
|
+
"ddeutil>=0.4.3",
|
30
|
+
"ddeutil-io[yaml]>=0.2.3",
|
31
31
|
"python-dotenv==1.0.1",
|
32
32
|
"typer==0.12.5,<1.0.0",
|
33
33
|
"schedule==1.2.2,<2.0.0",
|
@@ -60,7 +60,7 @@ changelog = "CHANGELOG.md"
|
|
60
60
|
branch = true
|
61
61
|
relative_files = true
|
62
62
|
concurrency = ["thread", "multiprocessing"]
|
63
|
-
source = ["ddeutil.workflow"
|
63
|
+
source = ["ddeutil.workflow"]
|
64
64
|
omit = [
|
65
65
|
"src/ddeutil/workflow/__about__.py",
|
66
66
|
# Omit this files because it does not ready to production.
|
@@ -68,12 +68,13 @@ omit = [
|
|
68
68
|
"src/ddeutil/workflow/cli.py",
|
69
69
|
"src/ddeutil/workflow/repeat.py",
|
70
70
|
"src/ddeutil/workflow/route.py",
|
71
|
-
"tests/utils.py",
|
72
|
-
"tests/tasks/dummy.py",
|
73
71
|
]
|
74
72
|
|
75
73
|
[tool.coverage.report]
|
76
|
-
exclude_lines = [
|
74
|
+
exclude_lines = [
|
75
|
+
"raise NotImplementedError",
|
76
|
+
"pragma: no cov",
|
77
|
+
]
|
77
78
|
|
78
79
|
[tool.pytest.ini_options]
|
79
80
|
pythonpath = ["src"]
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.16"
|
@@ -16,7 +16,7 @@ from re import (
|
|
16
16
|
Match,
|
17
17
|
Pattern,
|
18
18
|
)
|
19
|
-
from typing import Any, Optional, Union
|
19
|
+
from typing import Any, Optional, TypedDict, Union
|
20
20
|
|
21
21
|
from typing_extensions import Self
|
22
22
|
|
@@ -24,8 +24,11 @@ TupleStr = tuple[str, ...]
|
|
24
24
|
DictData = dict[str, Any]
|
25
25
|
DictStr = dict[str, str]
|
26
26
|
Matrix = dict[str, Union[list[str], list[int]]]
|
27
|
-
|
28
|
-
|
27
|
+
|
28
|
+
|
29
|
+
class Context(TypedDict):
|
30
|
+
params: dict[str, Any]
|
31
|
+
jobs: dict[str, Any]
|
29
32
|
|
30
33
|
|
31
34
|
@dataclass(frozen=True)
|
@@ -56,20 +59,24 @@ class Re:
|
|
56
59
|
# Regular expression:
|
57
60
|
# - Version 1:
|
58
61
|
# \${{\s*(?P<caller>[a-zA-Z0-9_.\s'\"\[\]\(\)\-\{}]+?)\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
|
59
|
-
# - Version 2 (2024-09-30):
|
62
|
+
# - Version 2: (2024-09-30):
|
60
63
|
# \${{\s*(?P<caller>(?P<caller_prefix>(?:[a-zA-Z_-]+\.)*)(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
|
64
|
+
# - Version 3: (2024-10-05):
|
65
|
+
# \${{\s*(?P<caller>(?P<caller_prefix>(?:[a-zA-Z_-]+\??\.)*)(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+\??))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
|
61
66
|
#
|
62
67
|
# Examples:
|
63
68
|
# - ${{ params.data_dt }}
|
64
69
|
# - ${{ params.source.table }}
|
70
|
+
# - ${{ params.datetime | fmt('%Y-%m-%d') }}
|
71
|
+
# - ${{ params.source?.schema }}
|
65
72
|
#
|
66
73
|
__re_caller: str = r"""
|
67
74
|
\$
|
68
75
|
{{
|
69
76
|
\s*
|
70
77
|
(?P<caller>
|
71
|
-
(?P<caller_prefix>(?:[a-zA-Z_-]
|
72
|
-
(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]
|
78
|
+
(?P<caller_prefix>(?:[a-zA-Z_-]+\??\.)*)
|
79
|
+
(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+\??)
|
73
80
|
)
|
74
81
|
\s*
|
75
82
|
(?P<post_filters>
|
@@ -109,5 +116,10 @@ class Re:
|
|
109
116
|
|
110
117
|
@classmethod
|
111
118
|
def finditer_caller(cls, value) -> Iterator[CallerRe]:
|
119
|
+
"""Generate CallerRe object that create from matching object that
|
120
|
+
extract with re.finditer function.
|
121
|
+
|
122
|
+
:rtype: Iterator[CallerRe]
|
123
|
+
"""
|
112
124
|
for found in cls.RE_CALLER.finditer(value):
|
113
125
|
yield CallerRe.from_regex(found)
|
@@ -7,7 +7,6 @@ from __future__ import annotations
|
|
7
7
|
|
8
8
|
import asyncio
|
9
9
|
import contextlib
|
10
|
-
import os
|
11
10
|
import uuid
|
12
11
|
from collections.abc import AsyncIterator
|
13
12
|
from datetime import datetime, timedelta
|
@@ -15,7 +14,6 @@ from queue import Empty, Queue
|
|
15
14
|
from threading import Thread
|
16
15
|
from typing import TypedDict
|
17
16
|
|
18
|
-
from ddeutil.core import str2bool
|
19
17
|
from dotenv import load_dotenv
|
20
18
|
from fastapi import FastAPI
|
21
19
|
from fastapi.middleware.gzip import GZipMiddleware
|
@@ -23,6 +21,7 @@ from fastapi.responses import UJSONResponse
|
|
23
21
|
from pydantic import BaseModel
|
24
22
|
|
25
23
|
from .__about__ import __version__
|
24
|
+
from .conf import config
|
26
25
|
from .log import get_logger
|
27
26
|
from .repeat import repeat_at, repeat_every
|
28
27
|
from .scheduler import WorkflowTaskData
|
@@ -131,12 +130,12 @@ async def message_upper(payload: Payload):
|
|
131
130
|
return await get_result(request_id)
|
132
131
|
|
133
132
|
|
134
|
-
if
|
133
|
+
if config.enable_route_workflow:
|
135
134
|
from .route import workflow
|
136
135
|
|
137
136
|
app.include_router(workflow)
|
138
137
|
|
139
|
-
if
|
138
|
+
if config.enable_route_schedule:
|
140
139
|
from .route import schedule
|
141
140
|
from .scheduler import workflow_task
|
142
141
|
|
@@ -6,15 +6,14 @@
|
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
8
|
import json
|
9
|
-
import os
|
10
9
|
from datetime import datetime
|
11
10
|
from enum import Enum
|
12
11
|
from typing import Annotated, Optional
|
13
|
-
from zoneinfo import ZoneInfo
|
14
12
|
|
15
13
|
from ddeutil.core import str2list
|
16
14
|
from typer import Argument, Option, Typer
|
17
15
|
|
16
|
+
from .conf import config
|
18
17
|
from .log import get_logger
|
19
18
|
|
20
19
|
logger = get_logger("ddeutil.workflow")
|
@@ -73,9 +72,7 @@ def schedule(
|
|
73
72
|
excluded: list[str] = str2list(excluded) if excluded else []
|
74
73
|
externals: str = externals or "{}"
|
75
74
|
if stop:
|
76
|
-
stop: datetime = stop.astimezone(
|
77
|
-
tz=ZoneInfo(os.getenv("WORKFLOW_CORE_TIMEZONE", "UTC"))
|
78
|
-
)
|
75
|
+
stop: datetime = stop.astimezone(tz=config.tz)
|
79
76
|
|
80
77
|
from .scheduler import workflow_runner
|
81
78
|
|
@@ -0,0 +1,318 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
import json
|
9
|
+
import os
|
10
|
+
from collections.abc import Iterator
|
11
|
+
from datetime import timedelta
|
12
|
+
from functools import cached_property
|
13
|
+
from pathlib import Path
|
14
|
+
from typing import Any, TypeVar
|
15
|
+
from zoneinfo import ZoneInfo
|
16
|
+
|
17
|
+
from ddeutil.core import import_string, str2bool
|
18
|
+
from ddeutil.io import Paths, PathSearch, YamlFlResolve
|
19
|
+
from dotenv import load_dotenv
|
20
|
+
from pydantic import BaseModel, Field
|
21
|
+
from pydantic.functional_validators import model_validator
|
22
|
+
|
23
|
+
load_dotenv()
|
24
|
+
env = os.getenv
|
25
|
+
DictData = dict[str, Any]
|
26
|
+
AnyModel = TypeVar("AnyModel", bound=BaseModel)
|
27
|
+
AnyModelType = type[AnyModel]
|
28
|
+
|
29
|
+
|
30
|
+
class Config:
|
31
|
+
"""Config object for keeping application configuration on current session
|
32
|
+
without changing when if the application still running.
|
33
|
+
"""
|
34
|
+
|
35
|
+
# NOTE: Core
|
36
|
+
root_path: Path = Path(os.getenv("WORKFLOW_ROOT_PATH", "."))
|
37
|
+
tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
|
38
|
+
workflow_id_simple_mode: bool = str2bool(
|
39
|
+
os.getenv("WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE", "true")
|
40
|
+
)
|
41
|
+
|
42
|
+
# NOTE: Logging
|
43
|
+
debug: bool = str2bool(os.getenv("WORKFLOW_LOG_DEBUG_MODE", "true"))
|
44
|
+
enable_write_log: bool = str2bool(
|
45
|
+
os.getenv("WORKFLOW_LOG_ENABLE_WRITE", "false")
|
46
|
+
)
|
47
|
+
|
48
|
+
# NOTE: Stage
|
49
|
+
stage_raise_error: bool = str2bool(
|
50
|
+
env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
|
51
|
+
)
|
52
|
+
stage_default_id: bool = str2bool(
|
53
|
+
env("WORKFLOW_CORE_STAGE_DEFAULT_ID", "false")
|
54
|
+
)
|
55
|
+
|
56
|
+
# NOTE: Job
|
57
|
+
job_default_id: bool = str2bool(
|
58
|
+
env("WORKFLOW_CORE_JOB_DEFAULT_ID", "false")
|
59
|
+
)
|
60
|
+
|
61
|
+
# NOTE: Workflow
|
62
|
+
max_job_parallel: int = int(env("WORKFLOW_CORE_MAX_JOB_PARALLEL", "2"))
|
63
|
+
max_poking_pool_worker: int = int(
|
64
|
+
os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
|
65
|
+
)
|
66
|
+
|
67
|
+
# NOTE: Schedule App
|
68
|
+
max_schedule_process: int = int(env("WORKFLOW_APP_MAX_PROCESS", "2"))
|
69
|
+
max_schedule_per_process: int = int(
|
70
|
+
env("WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS", "100")
|
71
|
+
)
|
72
|
+
__stop_boundary_delta: str = env(
|
73
|
+
"WORKFLOW_APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
|
74
|
+
)
|
75
|
+
|
76
|
+
# NOTE: API
|
77
|
+
enable_route_workflow: bool = str2bool(
|
78
|
+
os.getenv("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")
|
79
|
+
)
|
80
|
+
enable_route_schedule: bool = str2bool(
|
81
|
+
os.getenv("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")
|
82
|
+
)
|
83
|
+
|
84
|
+
def __init__(self):
|
85
|
+
if self.max_job_parallel < 0:
|
86
|
+
raise ValueError(
|
87
|
+
f"``MAX_JOB_PARALLEL`` should more than 0 but got "
|
88
|
+
f"{self.max_job_parallel}."
|
89
|
+
)
|
90
|
+
try:
|
91
|
+
self.stop_boundary_delta: timedelta = timedelta(
|
92
|
+
**json.loads(self.__stop_boundary_delta)
|
93
|
+
)
|
94
|
+
except Exception as err:
|
95
|
+
raise ValueError(
|
96
|
+
"Config ``WORKFLOW_APP_STOP_BOUNDARY_DELTA`` can not parsing to"
|
97
|
+
f"timedelta with {self.__stop_boundary_delta}."
|
98
|
+
) from err
|
99
|
+
|
100
|
+
def refresh_dotenv(self):
|
101
|
+
"""Reload environment variables from the current stage."""
|
102
|
+
self.tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
|
103
|
+
self.stage_raise_error: bool = str2bool(
|
104
|
+
env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
|
105
|
+
)
|
106
|
+
|
107
|
+
|
108
|
+
class Engine(BaseModel):
|
109
|
+
"""Engine Pydantic Model for keeping application path."""
|
110
|
+
|
111
|
+
paths: Paths = Field(default_factory=Paths)
|
112
|
+
registry: list[str] = Field(
|
113
|
+
default_factory=lambda: ["ddeutil.workflow"], # pragma: no cover
|
114
|
+
)
|
115
|
+
registry_filter: list[str] = Field(
|
116
|
+
default_factory=lambda: ["ddeutil.workflow.utils"], # pragma: no cover
|
117
|
+
)
|
118
|
+
|
119
|
+
@model_validator(mode="before")
|
120
|
+
def __prepare_registry(cls, values: DictData) -> DictData:
|
121
|
+
"""Prepare registry value that passing with string type. It convert the
|
122
|
+
string type to list of string.
|
123
|
+
"""
|
124
|
+
if (_regis := values.get("registry")) and isinstance(_regis, str):
|
125
|
+
values["registry"] = [_regis]
|
126
|
+
if (_regis_filter := values.get("registry_filter")) and isinstance(
|
127
|
+
_regis_filter, str
|
128
|
+
):
|
129
|
+
values["registry_filter"] = [_regis_filter]
|
130
|
+
return values
|
131
|
+
|
132
|
+
|
133
|
+
class ConfParams(BaseModel):
|
134
|
+
"""Params Model"""
|
135
|
+
|
136
|
+
engine: Engine = Field(
|
137
|
+
default_factory=Engine,
|
138
|
+
description="A engine mapping values.",
|
139
|
+
)
|
140
|
+
|
141
|
+
|
142
|
+
def load_config() -> ConfParams:
|
143
|
+
"""Load Config data from ``workflows-conf.yaml`` file.
|
144
|
+
|
145
|
+
Configuration Docs:
|
146
|
+
---
|
147
|
+
:var engine.registry:
|
148
|
+
:var engine.registry_filter:
|
149
|
+
:var paths.root:
|
150
|
+
:var paths.conf:
|
151
|
+
"""
|
152
|
+
root_path: str = config.root_path
|
153
|
+
|
154
|
+
regis: list[str] = ["ddeutil.workflow"]
|
155
|
+
if regis_env := os.getenv("WORKFLOW_CORE_REGISTRY"):
|
156
|
+
regis = [r.strip() for r in regis_env.split(",")]
|
157
|
+
|
158
|
+
regis_filter: list[str] = ["ddeutil.workflow.utils"]
|
159
|
+
if regis_filter_env := os.getenv("WORKFLOW_CORE_REGISTRY_FILTER"):
|
160
|
+
regis_filter = [r.strip() for r in regis_filter_env.split(",")]
|
161
|
+
|
162
|
+
conf_path: str = (
|
163
|
+
f"{root_path}/{conf_env}"
|
164
|
+
if (conf_env := os.getenv("WORKFLOW_CORE_PATH_CONF"))
|
165
|
+
else None
|
166
|
+
)
|
167
|
+
return ConfParams.model_validate(
|
168
|
+
obj={
|
169
|
+
"engine": {
|
170
|
+
"registry": regis,
|
171
|
+
"registry_filter": regis_filter,
|
172
|
+
"paths": {
|
173
|
+
"root": root_path,
|
174
|
+
"conf": conf_path,
|
175
|
+
},
|
176
|
+
},
|
177
|
+
}
|
178
|
+
)
|
179
|
+
|
180
|
+
|
181
|
+
class SimLoad:
|
182
|
+
"""Simple Load Object that will search config data by given some identity
|
183
|
+
value like name of workflow or on.
|
184
|
+
|
185
|
+
:param name: A name of config data that will read by Yaml Loader object.
|
186
|
+
:param params: A Params model object.
|
187
|
+
:param externals: An external parameters
|
188
|
+
|
189
|
+
Noted:
|
190
|
+
|
191
|
+
The config data should have ``type`` key for modeling validation that
|
192
|
+
make this loader know what is config should to do pass to.
|
193
|
+
|
194
|
+
... <identity-key>:
|
195
|
+
... type: <importable-object>
|
196
|
+
... <key-data>: <value-data>
|
197
|
+
... ...
|
198
|
+
|
199
|
+
"""
|
200
|
+
|
201
|
+
def __init__(
|
202
|
+
self,
|
203
|
+
name: str,
|
204
|
+
params: ConfParams,
|
205
|
+
externals: DictData | None = None,
|
206
|
+
) -> None:
|
207
|
+
self.data: DictData = {}
|
208
|
+
for file in PathSearch(params.engine.paths.conf).files:
|
209
|
+
if any(file.suffix.endswith(s) for s in (".yml", ".yaml")) and (
|
210
|
+
data := YamlFlResolve(file).read().get(name, {})
|
211
|
+
):
|
212
|
+
self.data = data
|
213
|
+
|
214
|
+
# VALIDATE: check the data that reading should not empty.
|
215
|
+
if not self.data:
|
216
|
+
raise ValueError(f"Config {name!r} does not found on conf path")
|
217
|
+
|
218
|
+
self.conf_params: ConfParams = params
|
219
|
+
self.externals: DictData = externals or {}
|
220
|
+
self.data.update(self.externals)
|
221
|
+
|
222
|
+
@classmethod
|
223
|
+
def finds(
|
224
|
+
cls,
|
225
|
+
obj: object,
|
226
|
+
params: ConfParams,
|
227
|
+
*,
|
228
|
+
include: list[str] | None = None,
|
229
|
+
exclude: list[str] | None = None,
|
230
|
+
) -> Iterator[tuple[str, DictData]]:
|
231
|
+
"""Find all data that match with object type in config path. This class
|
232
|
+
method can use include and exclude list of identity name for filter and
|
233
|
+
adds-on.
|
234
|
+
|
235
|
+
:param obj: A object that want to validate matching before return.
|
236
|
+
:param params:
|
237
|
+
:param include:
|
238
|
+
:param exclude:
|
239
|
+
:rtype: Iterator[tuple[str, DictData]]
|
240
|
+
"""
|
241
|
+
exclude: list[str] = exclude or []
|
242
|
+
for file in PathSearch(params.engine.paths.conf).files:
|
243
|
+
if any(file.suffix.endswith(s) for s in (".yml", ".yaml")) and (
|
244
|
+
values := YamlFlResolve(file).read()
|
245
|
+
):
|
246
|
+
for key, data in values.items():
|
247
|
+
if key in exclude:
|
248
|
+
continue
|
249
|
+
if issubclass(get_type(data["type"], params), obj) and (
|
250
|
+
include is None or all(i in data for i in include)
|
251
|
+
):
|
252
|
+
yield key, data
|
253
|
+
|
254
|
+
@cached_property
|
255
|
+
def type(self) -> AnyModelType:
|
256
|
+
"""Return object of string type which implement on any registry. The
|
257
|
+
object type.
|
258
|
+
|
259
|
+
:rtype: AnyModelType
|
260
|
+
"""
|
261
|
+
if not (_typ := self.data.get("type")):
|
262
|
+
raise ValueError(
|
263
|
+
f"the 'type' value: {_typ} does not exists in config data."
|
264
|
+
)
|
265
|
+
return get_type(_typ, self.conf_params)
|
266
|
+
|
267
|
+
|
268
|
+
class Loader(SimLoad):
|
269
|
+
"""Loader Object that get the config `yaml` file from current path.
|
270
|
+
|
271
|
+
:param name: A name of config data that will read by Yaml Loader object.
|
272
|
+
:param externals: An external parameters
|
273
|
+
"""
|
274
|
+
|
275
|
+
@classmethod
|
276
|
+
def finds(
|
277
|
+
cls,
|
278
|
+
obj: object,
|
279
|
+
*,
|
280
|
+
include: list[str] | None = None,
|
281
|
+
exclude: list[str] | None = None,
|
282
|
+
**kwargs,
|
283
|
+
) -> DictData:
|
284
|
+
"""Override the find class method from the Simple Loader object.
|
285
|
+
|
286
|
+
:param obj: A object that want to validate matching before return.
|
287
|
+
:param include:
|
288
|
+
:param exclude:
|
289
|
+
"""
|
290
|
+
return super().finds(
|
291
|
+
obj=obj, params=load_config(), include=include, exclude=exclude
|
292
|
+
)
|
293
|
+
|
294
|
+
def __init__(self, name: str, externals: DictData) -> None:
|
295
|
+
super().__init__(name, load_config(), externals)
|
296
|
+
|
297
|
+
|
298
|
+
def get_type(t: str, params: ConfParams) -> AnyModelType:
|
299
|
+
"""Return import type from string importable value in the type key.
|
300
|
+
|
301
|
+
:param t: A importable type string.
|
302
|
+
:param params: A config parameters that use registry to search this
|
303
|
+
type.
|
304
|
+
:rtype: AnyModelType
|
305
|
+
"""
|
306
|
+
try:
|
307
|
+
# NOTE: Auto adding module prefix if it does not set
|
308
|
+
return import_string(f"ddeutil.workflow.{t}")
|
309
|
+
except ModuleNotFoundError:
|
310
|
+
for registry in params.engine.registry:
|
311
|
+
try:
|
312
|
+
return import_string(f"{registry}.{t}")
|
313
|
+
except ModuleNotFoundError:
|
314
|
+
continue
|
315
|
+
return import_string(f"{t}")
|
316
|
+
|
317
|
+
|
318
|
+
config = Config()
|