ddeutil-workflow 0.0.41__tar.gz → 0.0.43__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/PKG-INFO +29 -32
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/README.md +27 -30
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/pyproject.toml +3 -1
- ddeutil_workflow-0.0.43/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/__init__.py +5 -1
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/api.py +7 -7
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/routes/schedules.py +5 -5
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/routes/workflows.py +2 -2
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/conf.py +39 -28
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/cron.py +12 -13
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/exceptions.py +13 -3
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/job.py +40 -42
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/logs.py +33 -6
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/params.py +77 -18
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/result.py +36 -8
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/reusables.py +16 -13
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/scheduler.py +32 -37
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/stages.py +285 -120
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/utils.py +0 -1
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/workflow.py +127 -90
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil_workflow.egg-info/PKG-INFO +29 -32
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -2
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil_workflow.egg-info/requires.txt +1 -1
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_conf.py +18 -4
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_cron_on.py +20 -20
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_job.py +3 -3
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_job_exec.py +11 -23
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_job_exec_strategy.py +6 -18
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_job_strategy.py +1 -3
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_params.py +27 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_reusables_call_tag.py +1 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_schedule.py +8 -8
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_schedule_pending.py +1 -1
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_schedule_tasks.py +3 -3
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_stage.py +16 -1
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_stage_handler_exec.py +107 -25
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_workflow.py +18 -9
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_workflow_exec.py +134 -82
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_workflow_exec_poke.py +5 -5
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_workflow_exec_release.py +4 -4
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_workflow_task.py +7 -9
- ddeutil_workflow-0.0.41/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.41/src/ddeutil/workflow/context.py +0 -61
- ddeutil_workflow-0.0.41/tests/test_context.py +0 -136
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/LICENSE +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/log.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/repeat.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/routes/job.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/routes/logs.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_logs_audit.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_logs_trace.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_release.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_release_queue.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_reusables_template.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_reusables_template_filter.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_schedule_workflow.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_scheduler_control.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_utils.py +0 -0
- {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/tests/test_workflow_exec_job.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.43
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -22,7 +22,7 @@ Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Requires-Python: >=3.9.13
|
23
23
|
Description-Content-Type: text/markdown
|
24
24
|
License-File: LICENSE
|
25
|
-
Requires-Dist: ddeutil>=0.4.6
|
25
|
+
Requires-Dist: ddeutil[checksum]>=0.4.6
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.10
|
27
27
|
Requires-Dist: pydantic==2.11.1
|
28
28
|
Requires-Dist: python-dotenv==1.1.0
|
@@ -212,7 +212,7 @@ execution time such as `run-date` should change base on that workflow running da
|
|
212
212
|
```python
|
213
213
|
from ddeutil.workflow import Workflow, Result
|
214
214
|
|
215
|
-
workflow: Workflow = Workflow.
|
215
|
+
workflow: Workflow = Workflow.from_conf('run-py-local')
|
216
216
|
result: Result = workflow.execute(
|
217
217
|
params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
|
218
218
|
)
|
@@ -246,7 +246,7 @@ from ddeutil.workflow import Schedule
|
|
246
246
|
|
247
247
|
(
|
248
248
|
Schedule
|
249
|
-
.
|
249
|
+
.from_conf("schedule-run-local-wf")
|
250
250
|
.pending(stop=None)
|
251
251
|
)
|
252
252
|
```
|
@@ -261,34 +261,31 @@ it will use default value and do not raise any error to you.
|
|
261
261
|
> The config value that you will set on the environment should combine with
|
262
262
|
> prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
|
263
263
|
|
264
|
-
| Name | Component | Default | Description |
|
265
|
-
|
266
|
-
| **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
|
267
|
-
| **
|
268
|
-
| **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
|
269
|
-
| **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
|
270
|
-
| **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
|
271
|
-
| **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
|
272
|
-
| **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
|
273
|
-
| **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
|
274
|
-
| **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
|
275
|
-
| **
|
276
|
-
| **
|
277
|
-
| **
|
278
|
-
| **
|
279
|
-
| **
|
280
|
-
| **
|
281
|
-
| **
|
282
|
-
| **
|
283
|
-
| **
|
284
|
-
| **
|
285
|
-
| **
|
286
|
-
| **
|
287
|
-
| **
|
288
|
-
| **
|
289
|
-
| **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
|
290
|
-
| **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
|
291
|
-
| **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
|
264
|
+
| Name | Component | Default | Override | Description |
|
265
|
+
|:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
|
266
|
+
| **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
|
267
|
+
| **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
|
268
|
+
| **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
|
269
|
+
| **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
|
270
|
+
| **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
|
271
|
+
| **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
|
272
|
+
| **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
|
273
|
+
| **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
|
274
|
+
| **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
|
275
|
+
| **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
|
276
|
+
| **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
|
277
|
+
| **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
|
278
|
+
| **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
|
279
|
+
| **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
|
280
|
+
| **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
|
281
|
+
| **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
|
282
|
+
| **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
|
283
|
+
| **TRACE_ENABLE_WRITE** | Log | `false` | No | |
|
284
|
+
| **AUDIT_PATH** | Log | `./audits` | No | |
|
285
|
+
| **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
|
286
|
+
| **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
|
287
|
+
| **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
|
288
|
+
| **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
|
292
289
|
|
293
290
|
**API Application**:
|
294
291
|
|
@@ -168,7 +168,7 @@ execution time such as `run-date` should change base on that workflow running da
|
|
168
168
|
```python
|
169
169
|
from ddeutil.workflow import Workflow, Result
|
170
170
|
|
171
|
-
workflow: Workflow = Workflow.
|
171
|
+
workflow: Workflow = Workflow.from_conf('run-py-local')
|
172
172
|
result: Result = workflow.execute(
|
173
173
|
params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
|
174
174
|
)
|
@@ -202,7 +202,7 @@ from ddeutil.workflow import Schedule
|
|
202
202
|
|
203
203
|
(
|
204
204
|
Schedule
|
205
|
-
.
|
205
|
+
.from_conf("schedule-run-local-wf")
|
206
206
|
.pending(stop=None)
|
207
207
|
)
|
208
208
|
```
|
@@ -217,34 +217,31 @@ it will use default value and do not raise any error to you.
|
|
217
217
|
> The config value that you will set on the environment should combine with
|
218
218
|
> prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
|
219
219
|
|
220
|
-
| Name | Component | Default | Description |
|
221
|
-
|
222
|
-
| **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
|
223
|
-
| **
|
224
|
-
| **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
|
225
|
-
| **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
|
226
|
-
| **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
|
227
|
-
| **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
|
228
|
-
| **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
|
229
|
-
| **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
|
230
|
-
| **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
|
231
|
-
| **
|
232
|
-
| **
|
233
|
-
| **
|
234
|
-
| **
|
235
|
-
| **
|
236
|
-
| **
|
237
|
-
| **
|
238
|
-
| **
|
239
|
-
| **
|
240
|
-
| **
|
241
|
-
| **
|
242
|
-
| **
|
243
|
-
| **
|
244
|
-
| **
|
245
|
-
| **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
|
246
|
-
| **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
|
247
|
-
| **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
|
220
|
+
| Name | Component | Default | Override | Description |
|
221
|
+
|:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
|
222
|
+
| **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
|
223
|
+
| **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
|
224
|
+
| **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
|
225
|
+
| **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
|
226
|
+
| **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
|
227
|
+
| **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
|
228
|
+
| **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
|
229
|
+
| **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
|
230
|
+
| **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
|
231
|
+
| **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
|
232
|
+
| **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
|
233
|
+
| **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
|
234
|
+
| **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
|
235
|
+
| **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
|
236
|
+
| **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
|
237
|
+
| **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
|
238
|
+
| **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
|
239
|
+
| **TRACE_ENABLE_WRITE** | Log | `false` | No | |
|
240
|
+
| **AUDIT_PATH** | Log | `./audits` | No | |
|
241
|
+
| **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
|
242
|
+
| **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
|
243
|
+
| **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
|
244
|
+
| **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
|
248
245
|
|
249
246
|
**API Application**:
|
250
247
|
|
@@ -26,7 +26,7 @@ classifiers = [
|
|
26
26
|
]
|
27
27
|
requires-python = ">=3.9.13"
|
28
28
|
dependencies = [
|
29
|
-
"ddeutil>=0.4.6",
|
29
|
+
"ddeutil[checksum]>=0.4.6",
|
30
30
|
"ddeutil-io[yaml,toml]>=0.2.10",
|
31
31
|
"pydantic==2.11.1",
|
32
32
|
"python-dotenv==1.1.0",
|
@@ -99,11 +99,13 @@ exclude_lines = [
|
|
99
99
|
|
100
100
|
[tool.pytest.ini_options]
|
101
101
|
pythonpath = ["src"]
|
102
|
+
asyncio_default_fixture_loop_scope = "fuction"
|
102
103
|
# NOTE: You can deslect multiple markers by '-m "not (poke or api)"'
|
103
104
|
markers = [
|
104
105
|
"poke: marks tests as slow by poking (deselect with '-m \"not poke\"')",
|
105
106
|
"schedule: marks tests as schedule (deselect with '-m \"not schedule\"')",
|
106
107
|
"api: marks tests as api (deselect with '-m \"not api\"')",
|
108
|
+
"asyncio: marks async testcases",
|
107
109
|
]
|
108
110
|
console_output_style = "count"
|
109
111
|
addopts = [
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.43"
|
@@ -4,7 +4,7 @@
|
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from .__cron import CronJob, CronRunner
|
7
|
-
from .__types import Re
|
7
|
+
from .__types import DictData, DictStr, Matrix, Re, TupleStr
|
8
8
|
from .conf import (
|
9
9
|
Config,
|
10
10
|
Loader,
|
@@ -47,6 +47,10 @@ from .params import (
|
|
47
47
|
StrParam,
|
48
48
|
)
|
49
49
|
from .result import (
|
50
|
+
FAILED,
|
51
|
+
SKIP,
|
52
|
+
SUCCESS,
|
53
|
+
WAIT,
|
50
54
|
Result,
|
51
55
|
Status,
|
52
56
|
)
|
@@ -20,7 +20,7 @@ from fastapi.middleware.gzip import GZipMiddleware
|
|
20
20
|
from fastapi.responses import UJSONResponse
|
21
21
|
|
22
22
|
from ..__about__ import __version__
|
23
|
-
from ..conf import config, get_logger
|
23
|
+
from ..conf import api_config, config, get_logger
|
24
24
|
from ..scheduler import ReleaseThread, ReleaseThreads
|
25
25
|
from ..workflow import ReleaseQueue, WorkflowTask
|
26
26
|
from .repeat import repeat_at
|
@@ -96,24 +96,24 @@ async def health():
|
|
96
96
|
|
97
97
|
|
98
98
|
# NOTE Add the jobs and logs routes by default.
|
99
|
-
app.include_router(job, prefix=
|
100
|
-
app.include_router(log, prefix=
|
99
|
+
app.include_router(job, prefix=api_config.prefix_path)
|
100
|
+
app.include_router(log, prefix=api_config.prefix_path)
|
101
101
|
|
102
102
|
|
103
103
|
# NOTE: Enable the workflows route.
|
104
|
-
if
|
104
|
+
if api_config.enable_route_workflow:
|
105
105
|
from .routes import workflow
|
106
106
|
|
107
|
-
app.include_router(workflow, prefix=
|
107
|
+
app.include_router(workflow, prefix=api_config.prefix_path)
|
108
108
|
|
109
109
|
|
110
110
|
# NOTE: Enable the schedules route.
|
111
|
-
if
|
111
|
+
if api_config.enable_route_schedule:
|
112
112
|
from ..logs import get_audit
|
113
113
|
from ..scheduler import schedule_task
|
114
114
|
from .routes import schedule
|
115
115
|
|
116
|
-
app.include_router(schedule, prefix=
|
116
|
+
app.include_router(schedule, prefix=api_config.prefix_path)
|
117
117
|
|
118
118
|
@schedule.on_event("startup")
|
119
119
|
@repeat_at(cron="* * * * *", delay=2)
|
{ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/routes/schedules.py
RENAMED
@@ -28,7 +28,7 @@ schedule_route = APIRouter(
|
|
28
28
|
async def get_schedules(name: str):
|
29
29
|
"""Get schedule object."""
|
30
30
|
try:
|
31
|
-
schedule: Schedule = Schedule.
|
31
|
+
schedule: Schedule = Schedule.from_conf(name=name, extras={})
|
32
32
|
except ValueError:
|
33
33
|
raise HTTPException(
|
34
34
|
status_code=st.HTTP_404_NOT_FOUND,
|
@@ -51,7 +51,7 @@ async def get_deploy_schedulers(request: Request):
|
|
51
51
|
@schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
|
52
52
|
async def get_deploy_scheduler(request: Request, name: str):
|
53
53
|
if name in request.state.scheduler:
|
54
|
-
schedule = Schedule.
|
54
|
+
schedule = Schedule.from_conf(name)
|
55
55
|
getter: list[dict[str, dict[str, list[datetime]]]] = []
|
56
56
|
for workflow in schedule.workflows:
|
57
57
|
getter.append(
|
@@ -94,7 +94,7 @@ async def add_deploy_scheduler(request: Request, name: str):
|
|
94
94
|
|
95
95
|
# NOTE: Create a pair of workflow and on from schedule model.
|
96
96
|
try:
|
97
|
-
schedule: Schedule = Schedule.
|
97
|
+
schedule: Schedule = Schedule.from_conf(name)
|
98
98
|
except ValueError as err:
|
99
99
|
request.state.scheduler.remove(name)
|
100
100
|
logger.exception(err)
|
@@ -107,7 +107,7 @@ async def add_deploy_scheduler(request: Request, name: str):
|
|
107
107
|
schedule.tasks(
|
108
108
|
start_date_waiting,
|
109
109
|
queue=request.state.workflow_queue,
|
110
|
-
|
110
|
+
extras={},
|
111
111
|
),
|
112
112
|
)
|
113
113
|
return {
|
@@ -124,7 +124,7 @@ async def del_deploy_scheduler(request: Request, name: str):
|
|
124
124
|
# NOTE: Remove current schedule name from the state.
|
125
125
|
request.state.scheduler.remove(name)
|
126
126
|
|
127
|
-
schedule: Schedule = Schedule.
|
127
|
+
schedule: Schedule = Schedule.from_conf(name)
|
128
128
|
|
129
129
|
for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
|
130
130
|
if task in request.state.workflow_tasks:
|
{ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.43}/src/ddeutil/workflow/api/routes/workflows.py
RENAMED
@@ -44,7 +44,7 @@ async def get_workflows() -> DictData:
|
|
44
44
|
async def get_workflow_by_name(name: str) -> DictData:
|
45
45
|
"""Return model of workflow that passing an input workflow name."""
|
46
46
|
try:
|
47
|
-
workflow: Workflow = Workflow.
|
47
|
+
workflow: Workflow = Workflow.from_conf(name=name, extras={})
|
48
48
|
except ValueError as err:
|
49
49
|
logger.exception(err)
|
50
50
|
raise HTTPException(
|
@@ -69,7 +69,7 @@ class ExecutePayload(BaseModel):
|
|
69
69
|
async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
|
70
70
|
"""Return model of workflow that passing an input workflow name."""
|
71
71
|
try:
|
72
|
-
workflow: Workflow = Workflow.
|
72
|
+
workflow: Workflow = Workflow.from_conf(name=name, extras={})
|
73
73
|
except ValueError:
|
74
74
|
raise HTTPException(
|
75
75
|
status_code=st.HTTP_404_NOT_FOUND,
|
@@ -12,6 +12,7 @@ from collections.abc import Iterator
|
|
12
12
|
from datetime import timedelta
|
13
13
|
from functools import cached_property, lru_cache
|
14
14
|
from pathlib import Path
|
15
|
+
from typing import Optional, TypeVar
|
15
16
|
from zoneinfo import ZoneInfo
|
16
17
|
|
17
18
|
from ddeutil.core import str2bool
|
@@ -20,6 +21,7 @@ from ddeutil.io.paths import glob_files, is_ignored, read_ignore
|
|
20
21
|
|
21
22
|
from .__types import DictData, TupleStr
|
22
23
|
|
24
|
+
T = TypeVar("T")
|
23
25
|
PREFIX: str = "WORKFLOW"
|
24
26
|
|
25
27
|
|
@@ -29,12 +31,14 @@ def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
|
|
29
31
|
|
30
32
|
|
31
33
|
__all__: TupleStr = (
|
34
|
+
"api_config",
|
32
35
|
"env",
|
33
36
|
"get_logger",
|
34
37
|
"Config",
|
35
38
|
"SimLoad",
|
36
39
|
"Loader",
|
37
40
|
"config",
|
41
|
+
"dynamic",
|
38
42
|
)
|
39
43
|
|
40
44
|
|
@@ -99,7 +103,7 @@ class Config(BaseConfig): # pragma: no cov
|
|
99
103
|
|
100
104
|
:rtype: list[str]
|
101
105
|
"""
|
102
|
-
regis_call_str: str = env("
|
106
|
+
regis_call_str: str = env("CORE_REGISTRY_CALLER", ".")
|
103
107
|
return [r.strip() for r in regis_call_str.split(",")]
|
104
108
|
|
105
109
|
@property
|
@@ -116,7 +120,7 @@ class Config(BaseConfig): # pragma: no cov
|
|
116
120
|
# NOTE: Log
|
117
121
|
@property
|
118
122
|
def log_path(self) -> Path:
|
119
|
-
return Path(env("
|
123
|
+
return Path(env("LOG_TRACE_PATH", "./logs"))
|
120
124
|
|
121
125
|
@property
|
122
126
|
def debug(self) -> bool:
|
@@ -145,16 +149,15 @@ class Config(BaseConfig): # pragma: no cov
|
|
145
149
|
|
146
150
|
@property
|
147
151
|
def enable_write_log(self) -> bool:
|
148
|
-
return str2bool(env("
|
152
|
+
return str2bool(env("LOG_TRACE_ENABLE_WRITE", "false"))
|
149
153
|
|
150
|
-
# NOTE: Audit Log
|
151
154
|
@property
|
152
155
|
def audit_path(self) -> Path:
|
153
|
-
return Path(env("
|
156
|
+
return Path(env("LOG_AUDIT_PATH", "./audits"))
|
154
157
|
|
155
158
|
@property
|
156
159
|
def enable_write_audit(self) -> bool:
|
157
|
-
return str2bool(env("
|
160
|
+
return str2bool(env("LOG_AUDIT_ENABLE_WRITE", "false"))
|
158
161
|
|
159
162
|
@property
|
160
163
|
def log_datetime_format(self) -> str:
|
@@ -178,29 +181,12 @@ class Config(BaseConfig): # pragma: no cov
|
|
178
181
|
def job_default_id(self) -> bool:
|
179
182
|
return str2bool(env("CORE_JOB_DEFAULT_ID", "false"))
|
180
183
|
|
181
|
-
# NOTE: Workflow
|
182
|
-
@property
|
183
|
-
def max_job_parallel(self) -> int:
|
184
|
-
max_job_parallel = int(env("CORE_MAX_JOB_PARALLEL", "2"))
|
185
|
-
|
186
|
-
# VALIDATE: the MAX_JOB_PARALLEL value should not less than 0.
|
187
|
-
if max_job_parallel < 0:
|
188
|
-
raise ValueError(
|
189
|
-
f"``WORKFLOW_MAX_JOB_PARALLEL`` should more than 0 but got "
|
190
|
-
f"{max_job_parallel}."
|
191
|
-
)
|
192
|
-
return max_job_parallel
|
193
|
-
|
194
|
-
@property
|
195
|
-
def max_job_exec_timeout(self) -> int:
|
196
|
-
return int(env("CORE_MAX_JOB_EXEC_TIMEOUT", "600"))
|
197
|
-
|
198
|
-
@property
|
199
|
-
def max_poking_pool_worker(self) -> int:
|
200
|
-
return int(env("CORE_MAX_NUM_POKING", "4"))
|
201
|
-
|
202
184
|
@property
|
203
185
|
def max_on_per_workflow(self) -> int:
|
186
|
+
"""The maximum on value that store in workflow model.
|
187
|
+
|
188
|
+
:rtype: int
|
189
|
+
"""
|
204
190
|
return int(env("CORE_MAX_CRON_PER_WORKFLOW", "5"))
|
205
191
|
|
206
192
|
@property
|
@@ -229,7 +215,9 @@ class Config(BaseConfig): # pragma: no cov
|
|
229
215
|
f"timedelta with {stop_boundary_delta_str}."
|
230
216
|
) from err
|
231
217
|
|
232
|
-
|
218
|
+
|
219
|
+
class APIConfig:
|
220
|
+
|
233
221
|
@property
|
234
222
|
def prefix_path(self) -> str:
|
235
223
|
return env("API_PREFIX_PATH", "/api/v1")
|
@@ -389,6 +377,29 @@ class Loader(SimLoad):
|
|
389
377
|
|
390
378
|
|
391
379
|
config: Config = Config()
|
380
|
+
api_config: APIConfig = APIConfig()
|
381
|
+
|
382
|
+
|
383
|
+
def dynamic(
|
384
|
+
key: Optional[str] = None,
|
385
|
+
*,
|
386
|
+
f: Optional[T] = None,
|
387
|
+
extras: Optional[DictData] = None,
|
388
|
+
) -> Optional[T]:
|
389
|
+
"""Dynamic get config if extra value was passed at run-time.
|
390
|
+
|
391
|
+
:param key: (str) A config key that get from Config object.
|
392
|
+
:param f: An inner config function scope.
|
393
|
+
:param extras: An extra values that pass at run-time.
|
394
|
+
"""
|
395
|
+
rsx: Optional[T] = extras[key] if extras and key in extras else None
|
396
|
+
rs: Optional[T] = f or getattr(config, key, None)
|
397
|
+
if rsx is not None and not isinstance(rsx, type(rs)):
|
398
|
+
raise TypeError(
|
399
|
+
f"Type of config {key!r} from extras: {rsx!r} does not valid "
|
400
|
+
f"as config {type(rs)}."
|
401
|
+
)
|
402
|
+
return rsx or rs
|
392
403
|
|
393
404
|
|
394
405
|
@lru_cache
|
@@ -72,17 +72,16 @@ class On(BaseModel):
|
|
72
72
|
|
73
73
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
74
74
|
|
75
|
-
# NOTE: This is fields of the base schedule.
|
76
75
|
extras: Annotated[
|
77
76
|
DictData,
|
78
77
|
Field(
|
79
78
|
default_factory=dict,
|
80
|
-
description="An extras mapping parameters",
|
79
|
+
description="An extras mapping parameters.",
|
81
80
|
),
|
82
81
|
]
|
83
82
|
cronjob: Annotated[
|
84
83
|
CronJob,
|
85
|
-
Field(description="
|
84
|
+
Field(description="A Cronjob object of this schedule."),
|
86
85
|
]
|
87
86
|
tz: Annotated[
|
88
87
|
str,
|
@@ -93,12 +92,12 @@ class On(BaseModel):
|
|
93
92
|
] = "Etc/UTC"
|
94
93
|
|
95
94
|
@classmethod
|
96
|
-
def from_value(cls, value: DictStr,
|
95
|
+
def from_value(cls, value: DictStr, extras: DictData) -> Self:
|
97
96
|
"""Constructor from values that will generate crontab by function.
|
98
97
|
|
99
98
|
:param value: A mapping value that will generate crontab before create
|
100
99
|
schedule model.
|
101
|
-
:param
|
100
|
+
:param extras: An extras parameter that will keep in extras.
|
102
101
|
"""
|
103
102
|
passing: DictStr = {}
|
104
103
|
if "timezone" in value:
|
@@ -106,22 +105,22 @@ class On(BaseModel):
|
|
106
105
|
passing["cronjob"] = interval2crontab(
|
107
106
|
**{v: value[v] for v in value if v in ("interval", "day", "time")}
|
108
107
|
)
|
109
|
-
return cls(extras=
|
108
|
+
return cls(extras=extras | passing.pop("extras", {}), **passing)
|
110
109
|
|
111
110
|
@classmethod
|
112
|
-
def
|
111
|
+
def from_conf(
|
113
112
|
cls,
|
114
113
|
name: str,
|
115
|
-
|
114
|
+
extras: DictData | None = None,
|
116
115
|
) -> Self:
|
117
116
|
"""Constructor from the name of config that will use loader object for
|
118
117
|
getting the data.
|
119
118
|
|
120
119
|
:param name: A name of config that will get from loader.
|
121
|
-
:param
|
120
|
+
:param extras: An extra parameter that will keep in extras.
|
122
121
|
"""
|
123
|
-
|
124
|
-
loader: Loader = Loader(name, externals=
|
122
|
+
extras: DictData = extras or {}
|
123
|
+
loader: Loader = Loader(name, externals=extras)
|
125
124
|
|
126
125
|
# NOTE: Validate the config type match with current connection model
|
127
126
|
if loader.type != cls.__name__:
|
@@ -138,7 +137,7 @@ class On(BaseModel):
|
|
138
137
|
if v in ("interval", "day", "time")
|
139
138
|
}
|
140
139
|
),
|
141
|
-
extras=
|
140
|
+
extras=extras | loader_data.pop("extras", {}),
|
142
141
|
**loader_data,
|
143
142
|
)
|
144
143
|
)
|
@@ -149,7 +148,7 @@ class On(BaseModel):
|
|
149
148
|
return cls.model_validate(
|
150
149
|
obj=dict(
|
151
150
|
cronjob=loader_data.pop("cronjob"),
|
152
|
-
extras=
|
151
|
+
extras=extras | loader_data.pop("extras", {}),
|
153
152
|
**loader_data,
|
154
153
|
)
|
155
154
|
)
|
@@ -9,10 +9,20 @@ annotate for handle error only.
|
|
9
9
|
"""
|
10
10
|
from __future__ import annotations
|
11
11
|
|
12
|
-
from typing import
|
12
|
+
from typing import TypedDict
|
13
13
|
|
14
|
+
ErrorData = TypedDict(
|
15
|
+
"ErrorData",
|
16
|
+
{
|
17
|
+
"class": Exception,
|
18
|
+
"name": str,
|
19
|
+
"message": str,
|
20
|
+
},
|
21
|
+
)
|
14
22
|
|
15
|
-
|
23
|
+
|
24
|
+
def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
|
25
|
+
"""Create dict data from exception instance."""
|
16
26
|
return {
|
17
27
|
"class": exception,
|
18
28
|
"name": exception.__class__.__name__,
|
@@ -22,7 +32,7 @@ def to_dict(exception: Exception) -> dict[str, Any]: # pragma: no cov
|
|
22
32
|
|
23
33
|
class BaseWorkflowException(Exception):
|
24
34
|
|
25
|
-
def to_dict(self) ->
|
35
|
+
def to_dict(self) -> ErrorData:
|
26
36
|
return to_dict(self)
|
27
37
|
|
28
38
|
|