ddeutil-workflow 0.0.31__tar.gz → 0.0.33__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. {ddeutil_workflow-0.0.31/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.33}/PKG-INFO +46 -27
  2. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/README.md +45 -26
  3. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/pyproject.toml +1 -0
  4. ddeutil_workflow-0.0.33/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/__init__.py +4 -2
  6. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/api/api.py +4 -6
  7. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/api/route.py +8 -7
  8. ddeutil_workflow-0.0.33/src/ddeutil/workflow/audit.py +261 -0
  9. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/conf.py +122 -265
  10. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/job.py +61 -52
  11. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/params.py +5 -2
  12. ddeutil_workflow-0.0.33/src/ddeutil/workflow/result.py +155 -0
  13. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/scheduler.py +118 -45
  14. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/stage.py +75 -56
  15. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/templates.py +13 -4
  16. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/workflow.py +63 -64
  17. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33/src/ddeutil_workflow.egg-info}/PKG-INFO +46 -27
  18. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil_workflow.egg-info/SOURCES.txt +5 -3
  19. ddeutil_workflow-0.0.31/tests/test_conf_log.py → ddeutil_workflow-0.0.33/tests/test_audit.py +14 -13
  20. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_conf.py +7 -6
  21. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_hook_tag.py +22 -0
  22. ddeutil_workflow-0.0.31/tests/test_job_exec_py.py → ddeutil_workflow-0.0.33/tests/test_job_exec.py +88 -12
  23. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_result.py +14 -21
  24. ddeutil_workflow-0.0.33/tests/test_schedule_pending.py +13 -0
  25. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_workflow_task.py +3 -2
  26. ddeutil_workflow-0.0.31/src/ddeutil/workflow/__about__.py +0 -1
  27. ddeutil_workflow-0.0.31/src/ddeutil/workflow/result.py +0 -103
  28. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/LICENSE +0 -0
  29. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/setup.cfg +0 -0
  30. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/__cron.py +0 -0
  31. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/__types.py +0 -0
  32. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/api/__init__.py +0 -0
  33. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/api/repeat.py +0 -0
  34. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/cron.py +0 -0
  35. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/exceptions.py +0 -0
  36. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/hook.py +0 -0
  37. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil/workflow/utils.py +0 -0
  38. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  39. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  40. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  41. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test__cron.py +0 -0
  42. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test__regex.py +0 -0
  43. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_cron_on.py +0 -0
  44. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_job.py +0 -0
  45. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_job_exec_strategy.py +0 -0
  46. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_job_strategy.py +0 -0
  47. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_params.py +0 -0
  48. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_release.py +0 -0
  49. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_release_queue.py +0 -0
  50. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_schedule.py +0 -0
  51. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_schedule_control.py +0 -0
  52. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_schedule_tasks.py +0 -0
  53. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_schedule_workflow.py +0 -0
  54. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_stage.py +0 -0
  55. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_stage_handler_exec.py +0 -0
  56. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_templates.py +0 -0
  57. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_templates_filter.py +0 -0
  58. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_utils.py +0 -0
  59. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_workflow.py +0 -0
  60. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_workflow_exec.py +0 -0
  61. /ddeutil_workflow-0.0.31/tests/test_workflow_job_exec.py → /ddeutil_workflow-0.0.33/tests/test_workflow_exec_job.py +0 -0
  62. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_workflow_exec_poke.py +0 -0
  63. {ddeutil_workflow-0.0.31 → ddeutil_workflow-0.0.33}/tests/test_workflow_exec_release.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.31
3
+ Version: 0.0.33
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -92,7 +92,7 @@ flowchart LR
92
92
 
93
93
  subgraph Data Context
94
94
  D@{ shape: processes, label: "Logs" }
95
- E@{ shape: lin-cyl, label: "Metadata" }
95
+ E@{ shape: lin-cyl, label: "Audit<br>Logs" }
96
96
  end
97
97
 
98
98
  subgraph Git Context
@@ -110,7 +110,7 @@ flowchart LR
110
110
  E -.->|read| G
111
111
  ```
112
112
 
113
- > [!NOTE]
113
+ > [!WARNING]
114
114
  > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
115
115
  > with `.yml` files and all configs file from several data orchestration framework
116
116
  > tools from my experience on Data Engineer. :grimacing:
@@ -201,7 +201,8 @@ result: Result = workflow.execute(
201
201
  )
202
202
  ```
203
203
 
204
- So, this package provide the `Schedule` template for this action.
204
+ So, this package provide the `Schedule` template for this action, and you can dynamic
205
+ pass the parameters for changing align with that running time by the `release` prefix.
205
206
 
206
207
  ```yaml
207
208
  schedule-run-local-wf:
@@ -219,6 +220,20 @@ schedule-run-local-wf:
219
220
  asat-dt: "${{ release.logical_date }}"
220
221
  ```
221
222
 
223
+ The main method of the `Schedule` model that use to running is `pending`. If you
224
+ do not pass the `stop` date on this method, it will use config with `WORKFLOW_APP_STOP_BOUNDARY_DELTA`
225
+ key for generate this stop date.
226
+
227
+ ```python
228
+ from ddeutil.workflow import Schedule
229
+
230
+ (
231
+ Schedule
232
+ .from_loader("schedule-run-local-wf")
233
+ .pending(stop=None)
234
+ )
235
+ ```
236
+
222
237
  ## :cookie: Configuration
223
238
 
224
239
  The main configuration that use to dynamic changing this workflow engine for your
@@ -229,29 +244,33 @@ it will use default value and do not raise any error to you.
229
244
  > The config value that you will set on the environment should combine with
230
245
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
231
246
 
232
- | Name | Component | Default | Description |
233
- |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
234
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
235
- | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
236
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
237
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
238
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
239
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
240
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
241
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
242
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
243
- | **MAX_NUM_POKING** | Core | `4` | . |
244
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
245
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
246
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
247
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
248
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
249
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
250
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
251
- | **ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
252
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
253
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
254
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
247
+ | Name | Component | Default | Description |
248
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
249
+ | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
250
+ | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
251
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
252
+ | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
253
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
254
+ | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
255
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
256
+ | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
257
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
258
+ | **MAX_NUM_POKING** | Core | `4` | . |
259
+ | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
260
+ | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
261
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
262
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
263
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
264
+ | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
265
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
266
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
267
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
268
+ | **ENABLE_ROTATED_FILE** | Log | `false` | |
269
+ | **PATH** | Audit | `./logs` | |
270
+ | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
271
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
272
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
273
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
255
274
 
256
275
  **API Application**:
257
276
 
@@ -60,7 +60,7 @@ flowchart LR
60
60
 
61
61
  subgraph Data Context
62
62
  D@{ shape: processes, label: "Logs" }
63
- E@{ shape: lin-cyl, label: "Metadata" }
63
+ E@{ shape: lin-cyl, label: "Audit<br>Logs" }
64
64
  end
65
65
 
66
66
  subgraph Git Context
@@ -78,7 +78,7 @@ flowchart LR
78
78
  E -.->|read| G
79
79
  ```
80
80
 
81
- > [!NOTE]
81
+ > [!WARNING]
82
82
  > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
83
83
  > with `.yml` files and all configs file from several data orchestration framework
84
84
  > tools from my experience on Data Engineer. :grimacing:
@@ -169,7 +169,8 @@ result: Result = workflow.execute(
169
169
  )
170
170
  ```
171
171
 
172
- So, this package provide the `Schedule` template for this action.
172
+ So, this package provide the `Schedule` template for this action, and you can dynamic
173
+ pass the parameters for changing align with that running time by the `release` prefix.
173
174
 
174
175
  ```yaml
175
176
  schedule-run-local-wf:
@@ -187,6 +188,20 @@ schedule-run-local-wf:
187
188
  asat-dt: "${{ release.logical_date }}"
188
189
  ```
189
190
 
191
+ The main method of the `Schedule` model that use to running is `pending`. If you
192
+ do not pass the `stop` date on this method, it will use config with `WORKFLOW_APP_STOP_BOUNDARY_DELTA`
193
+ key for generate this stop date.
194
+
195
+ ```python
196
+ from ddeutil.workflow import Schedule
197
+
198
+ (
199
+ Schedule
200
+ .from_loader("schedule-run-local-wf")
201
+ .pending(stop=None)
202
+ )
203
+ ```
204
+
190
205
  ## :cookie: Configuration
191
206
 
192
207
  The main configuration that use to dynamic changing this workflow engine for your
@@ -197,29 +212,33 @@ it will use default value and do not raise any error to you.
197
212
  > The config value that you will set on the environment should combine with
198
213
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
199
214
 
200
- | Name | Component | Default | Description |
201
- |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
202
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
203
- | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
204
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
205
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
206
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
207
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
208
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
209
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
210
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
211
- | **MAX_NUM_POKING** | Core | `4` | . |
212
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
213
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
214
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
215
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
216
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
217
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
218
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
219
- | **ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
220
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
221
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
222
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
215
+ | Name | Component | Default | Description |
216
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
217
+ | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
218
+ | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
219
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
220
+ | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
221
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
222
+ | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
223
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
224
+ | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
225
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
226
+ | **MAX_NUM_POKING** | Core | `4` | . |
227
+ | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
228
+ | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
229
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
230
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
231
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
232
+ | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
233
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
234
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
235
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
236
+ | **ENABLE_ROTATED_FILE** | Log | `false` | |
237
+ | **PATH** | Audit | `./logs` | |
238
+ | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
239
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
240
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
241
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
223
242
 
224
243
  **API Application**:
225
244
 
@@ -67,6 +67,7 @@ omit = [
67
67
  "src/ddeutil/workflow/api/api.py",
68
68
  "src/ddeutil/workflow/api/repeat.py",
69
69
  "src/ddeutil/workflow/api/route.py",
70
+ "app.py",
70
71
  ]
71
72
 
72
73
  [tool.coverage.report]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.33"
@@ -5,13 +5,15 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
7
  from .__types import Re
8
+ from .audit import (
9
+ Audit,
10
+ get_audit,
11
+ )
8
12
  from .conf import (
9
13
  Config,
10
14
  Loader,
11
- Log,
12
15
  config,
13
16
  env,
14
- get_log,
15
17
  get_logger,
16
18
  )
17
19
  from .cron import (
@@ -43,9 +43,7 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
43
43
  a.state.workflow_queue = {}
44
44
 
45
45
  yield {
46
- "upper_queue": a.state.upper_queue,
47
- "upper_result": a.state.upper_result,
48
- # NOTE: Scheduler value should be contain a key of workflow workflow and
46
+ # NOTE: Scheduler value should be contained a key of workflow and
49
47
  # list of datetime of queue and running.
50
48
  #
51
49
  # ... {
@@ -88,7 +86,7 @@ if config.enable_route_workflow:
88
86
 
89
87
  # NOTE: Enable the schedule route.
90
88
  if config.enable_route_schedule:
91
- from ..conf import FileLog
89
+ from ..audit import get_audit
92
90
  from ..scheduler import schedule_task
93
91
  from .route import schedule_route
94
92
 
@@ -108,11 +106,11 @@ if config.enable_route_schedule:
108
106
  stop=datetime.now(config.tz) + timedelta(minutes=1),
109
107
  queue=app.state.workflow_queue,
110
108
  threads=app.state.workflow_threads,
111
- log=FileLog,
109
+ log=get_audit(),
112
110
  )
113
111
 
114
112
  @schedule_route.on_event("startup")
115
- @repeat_at(cron="*/5 * * * *")
113
+ @repeat_at(cron="*/5 * * * *", delay=10)
116
114
  def monitoring():
117
115
  logger.debug("[MONITOR]: Start monitoring threading.")
118
116
  snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
@@ -16,7 +16,8 @@ from fastapi.responses import UJSONResponse
16
16
  from pydantic import BaseModel
17
17
 
18
18
  from ..__types import DictData
19
- from ..conf import FileLog, Loader, config, get_logger
19
+ from ..audit import Audit, get_audit
20
+ from ..conf import Loader, config, get_logger
20
21
  from ..result import Result
21
22
  from ..scheduler import Schedule
22
23
  from ..workflow import Workflow
@@ -109,7 +110,7 @@ async def get_workflow_logs(name: str):
109
110
  exclude_unset=True,
110
111
  exclude_defaults=True,
111
112
  )
112
- for log in FileLog.find_logs(name=name)
113
+ for log in get_audit().find_logs(name=name)
113
114
  ],
114
115
  }
115
116
  except FileNotFoundError:
@@ -122,7 +123,7 @@ async def get_workflow_logs(name: str):
122
123
  @workflow_route.get(path="/{name}/logs/{release}")
123
124
  async def get_workflow_release_log(name: str, release: str):
124
125
  try:
125
- log: FileLog = FileLog.find_log_with_release(
126
+ log: Audit = get_audit().find_log_with_release(
126
127
  name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
127
128
  )
128
129
  except FileNotFoundError:
@@ -169,7 +170,7 @@ async def get_schedules(name: str):
169
170
  )
170
171
 
171
172
 
172
- @schedule_route.get(path="/deploy")
173
+ @schedule_route.get(path="/deploy/")
173
174
  async def get_deploy_schedulers(request: Request):
174
175
  snapshot = copy.deepcopy(request.state.scheduler)
175
176
  return {"schedule": snapshot}
@@ -178,9 +179,9 @@ async def get_deploy_schedulers(request: Request):
178
179
  @schedule_route.get(path="/deploy/{name}")
179
180
  async def get_deploy_scheduler(request: Request, name: str):
180
181
  if name in request.state.scheduler:
181
- sch = Schedule.from_loader(name)
182
+ schedule = Schedule.from_loader(name)
182
183
  getter: list[dict[str, dict[str, list[datetime]]]] = []
183
- for workflow in sch.workflows:
184
+ for workflow in schedule.workflows:
184
185
  getter.append(
185
186
  {
186
187
  workflow.name: {
@@ -219,7 +220,7 @@ async def add_deploy_scheduler(request: Request, name: str):
219
220
  second=0, microsecond=0
220
221
  )
221
222
 
222
- # NOTE: Create pair of workflow and on from schedule model.
223
+ # NOTE: Create a pair of workflow and on from schedule model.
223
224
  try:
224
225
  schedule: Schedule = Schedule.from_loader(name)
225
226
  except ValueError as err:
@@ -0,0 +1,261 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ """Audit Log module."""
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ import os
11
+ from abc import ABC, abstractmethod
12
+ from collections.abc import Iterator
13
+ from datetime import datetime
14
+ from pathlib import Path
15
+ from typing import Any, ClassVar, Optional, Union
16
+
17
+ from pydantic import BaseModel, Field
18
+ from pydantic.functional_validators import model_validator
19
+ from typing_extensions import Self
20
+
21
+ from .__types import DictData, TupleStr
22
+ from .conf import config, get_logger
23
+
24
+ logger = get_logger("ddeutil.workflow")
25
+
26
+ __all__: TupleStr = (
27
+ "get_audit",
28
+ "FileAudit",
29
+ "SQLiteAudit",
30
+ "Audit",
31
+ )
32
+
33
+
34
+ class BaseAudit(BaseModel, ABC):
35
+ """Base Audit Pydantic Model with abstraction class property that implement
36
+ only model fields. This model should to use with inherit to logging
37
+ subclass like file, sqlite, etc.
38
+ """
39
+
40
+ name: str = Field(description="A workflow name.")
41
+ release: datetime = Field(description="A release datetime.")
42
+ type: str = Field(description="A running type before logging.")
43
+ context: DictData = Field(
44
+ default_factory=dict,
45
+ description="A context that receive from a workflow execution result.",
46
+ )
47
+ parent_run_id: Optional[str] = Field(default=None)
48
+ run_id: str
49
+ update: datetime = Field(default_factory=datetime.now)
50
+ execution_time: float = Field(default=0)
51
+
52
+ @model_validator(mode="after")
53
+ def __model_action(self) -> Self:
54
+ """Do before the Audit action with WORKFLOW_AUDIT_ENABLE_WRITE env variable.
55
+
56
+ :rtype: Self
57
+ """
58
+ if config.enable_write_log:
59
+ self.do_before()
60
+ return self
61
+
62
+ def do_before(self) -> None: # pragma: no cov
63
+ """To something before end up of initial log model."""
64
+
65
+ @abstractmethod
66
+ def save(self, excluded: list[str] | None) -> None: # pragma: no cov
67
+ """Save this model logging to target logging store."""
68
+ raise NotImplementedError("Audit should implement ``save`` method.")
69
+
70
+
71
+ class FileAudit(BaseAudit):
72
+ """File Audit Pydantic Model that use to saving log data from result of
73
+ workflow execution. It inherits from BaseAudit model that implement the
74
+ ``self.save`` method for file.
75
+ """
76
+
77
+ filename_fmt: ClassVar[str] = (
78
+ "workflow={name}/release={release:%Y%m%d%H%M%S}"
79
+ )
80
+
81
+ def do_before(self) -> None:
82
+ """Create directory of release before saving log file."""
83
+ self.pointer().mkdir(parents=True, exist_ok=True)
84
+
85
+ @classmethod
86
+ def find_logs(cls, name: str) -> Iterator[Self]:
87
+ """Generate the logging data that found from logs path with specific a
88
+ workflow name.
89
+
90
+ :param name: A workflow name that want to search release logging data.
91
+
92
+ :rtype: Iterator[Self]
93
+ """
94
+ pointer: Path = config.audit_path / f"workflow={name}"
95
+ if not pointer.exists():
96
+ raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
97
+
98
+ for file in pointer.glob("./release=*/*.log"):
99
+ with file.open(mode="r", encoding="utf-8") as f:
100
+ yield cls.model_validate(obj=json.load(f))
101
+
102
+ @classmethod
103
+ def find_log_with_release(
104
+ cls,
105
+ name: str,
106
+ release: datetime | None = None,
107
+ ) -> Self:
108
+ """Return the logging data that found from logs path with specific
109
+ workflow name and release values. If a release does not pass to an input
110
+ argument, it will return the latest release from the current log path.
111
+
112
+ :param name: A workflow name that want to search log.
113
+ :param release: A release datetime that want to search log.
114
+
115
+ :raise FileNotFoundError:
116
+ :raise NotImplementedError:
117
+
118
+ :rtype: Self
119
+ """
120
+ if release is None:
121
+ raise NotImplementedError("Find latest log does not implement yet.")
122
+
123
+ pointer: Path = (
124
+ config.audit_path
125
+ / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
126
+ )
127
+ if not pointer.exists():
128
+ raise FileNotFoundError(
129
+ f"Pointer: ./logs/workflow={name}/"
130
+ f"release={release:%Y%m%d%H%M%S} does not found."
131
+ )
132
+
133
+ with max(pointer.glob("./*.log"), key=os.path.getctime).open(
134
+ mode="r", encoding="utf-8"
135
+ ) as f:
136
+ return cls.model_validate(obj=json.load(f))
137
+
138
+ @classmethod
139
+ def is_pointed(cls, name: str, release: datetime) -> bool:
140
+ """Check the release log already pointed or created at the destination
141
+ log path.
142
+
143
+ :param name: A workflow name.
144
+ :param release: A release datetime.
145
+
146
+ :rtype: bool
147
+ :return: Return False if the release log was not pointed or created.
148
+ """
149
+ # NOTE: Return False if enable writing log flag does not set.
150
+ if not config.enable_write_log:
151
+ return False
152
+
153
+ # NOTE: create pointer path that use the same logic of pointer method.
154
+ pointer: Path = config.audit_path / cls.filename_fmt.format(
155
+ name=name, release=release
156
+ )
157
+
158
+ return pointer.exists()
159
+
160
+ def pointer(self) -> Path:
161
+ """Return release directory path that was generated from model data.
162
+
163
+ :rtype: Path
164
+ """
165
+ return config.audit_path / self.filename_fmt.format(
166
+ name=self.name, release=self.release
167
+ )
168
+
169
+ def save(self, excluded: list[str] | None) -> Self:
170
+ """Save logging data that receive a context data from a workflow
171
+ execution result.
172
+
173
+ :param excluded: An excluded list of key name that want to pass in the
174
+ model_dump method.
175
+
176
+ :rtype: Self
177
+ """
178
+ from .utils import cut_id
179
+
180
+ # NOTE: Check environ variable was set for real writing.
181
+ if not config.enable_write_log:
182
+ logger.debug(
183
+ f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
184
+ f"config was set"
185
+ )
186
+ return self
187
+
188
+ log_file: Path = self.pointer() / f"{self.run_id}.log"
189
+ log_file.write_text(
190
+ json.dumps(
191
+ self.model_dump(exclude=excluded),
192
+ default=str,
193
+ indent=2,
194
+ ),
195
+ encoding="utf-8",
196
+ )
197
+ return self
198
+
199
+
200
+ class SQLiteAudit(BaseAudit): # pragma: no cov
201
+ """SQLite Audit Pydantic Model."""
202
+
203
+ @staticmethod
204
+ def meta() -> dict[str, Any]:
205
+ return {
206
+ "table": "workflow_log",
207
+ "ddl": """
208
+ workflow str,
209
+ release int,
210
+ type str,
211
+ context json,
212
+ parent_run_id int,
213
+ run_id int,
214
+ update datetime
215
+ primary key ( run_id )
216
+ """,
217
+ }
218
+
219
+ def save(self, excluded: list[str] | None) -> SQLiteAudit:
220
+ """Save logging data that receive a context data from a workflow
221
+ execution result.
222
+ """
223
+ from .utils import cut_id
224
+
225
+ # NOTE: Check environ variable was set for real writing.
226
+ if not config.enable_write_log:
227
+ logger.debug(
228
+ f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
229
+ f"config was set"
230
+ )
231
+ return self
232
+
233
+ raise NotImplementedError("SQLiteAudit does not implement yet.")
234
+
235
+
236
+ class RemoteFileAudit(FileAudit): # pragma: no cov
237
+ """Remote File Audit Pydantic Model."""
238
+
239
+ def save(self, excluded: list[str] | None) -> RemoteFileAudit: ...
240
+
241
+
242
+ class RedisAudit(BaseAudit): # pragma: no cov
243
+ """Redis Audit Pydantic Model."""
244
+
245
+ def save(self, excluded: list[str] | None) -> RedisAudit: ...
246
+
247
+
248
+ Audit = Union[
249
+ FileAudit,
250
+ SQLiteAudit,
251
+ ]
252
+
253
+
254
+ def get_audit() -> type[Audit]: # pragma: no cov
255
+ """Get an audit class that dynamic base on the config audit path value.
256
+
257
+ :rtype: type[Audit]
258
+ """
259
+ if config.audit_path.is_file():
260
+ return SQLiteAudit
261
+ return FileAudit