ddeutil-workflow 0.0.32__tar.gz → 0.0.34__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {ddeutil_workflow-0.0.32/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.34}/PKG-INFO +43 -38
  2. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/README.md +42 -37
  3. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/pyproject.toml +1 -0
  4. ddeutil_workflow-0.0.34/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/__init__.py +20 -12
  6. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/api/api.py +2 -2
  7. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/api/route.py +4 -3
  8. ddeutil_workflow-0.0.34/src/ddeutil/workflow/audit.py +252 -0
  9. ddeutil_workflow-0.0.32/src/ddeutil/workflow/hook.py → ddeutil_workflow-0.0.34/src/ddeutil/workflow/call.py +27 -27
  10. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/conf.py +163 -271
  11. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/job.py +113 -144
  12. ddeutil_workflow-0.0.34/src/ddeutil/workflow/result.py +256 -0
  13. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/scheduler.py +167 -151
  14. ddeutil_workflow-0.0.32/src/ddeutil/workflow/stage.py → ddeutil_workflow-0.0.34/src/ddeutil/workflow/stages.py +174 -89
  15. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/utils.py +20 -2
  16. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/workflow.py +172 -148
  17. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34/src/ddeutil_workflow.egg-info}/PKG-INFO +43 -38
  18. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil_workflow.egg-info/SOURCES.txt +8 -7
  19. ddeutil_workflow-0.0.32/tests/test_conf_log.py → ddeutil_workflow-0.0.34/tests/test_audit.py +18 -17
  20. ddeutil_workflow-0.0.32/tests/test_hook_tag.py → ddeutil_workflow-0.0.34/tests/test_call_tag.py +26 -4
  21. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_conf.py +13 -6
  22. ddeutil_workflow-0.0.32/tests/test_job_exec_py.py → ddeutil_workflow-0.0.34/tests/test_job_exec.py +102 -17
  23. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_job_exec_strategy.py +18 -11
  24. ddeutil_workflow-0.0.34/tests/test_result.py +56 -0
  25. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_schedule_pending.py +1 -1
  26. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_schedule_tasks.py +1 -1
  27. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_schedule_workflow.py +1 -1
  28. ddeutil_workflow-0.0.34/tests/test_scheduler_control.py +49 -0
  29. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_stage.py +1 -1
  30. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_stage_handler_exec.py +76 -54
  31. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_utils.py +9 -0
  32. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_workflow_exec.py +32 -26
  33. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_workflow_exec_poke.py +31 -24
  34. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_workflow_exec_release.py +12 -10
  35. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_workflow_task.py +17 -7
  36. ddeutil_workflow-0.0.32/src/ddeutil/workflow/__about__.py +0 -1
  37. ddeutil_workflow-0.0.32/src/ddeutil/workflow/result.py +0 -103
  38. ddeutil_workflow-0.0.32/tests/test_result.py +0 -77
  39. ddeutil_workflow-0.0.32/tests/test_schedule_control.py +0 -38
  40. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/LICENSE +0 -0
  41. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/setup.cfg +0 -0
  42. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/__cron.py +0 -0
  43. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/__types.py +0 -0
  44. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/api/__init__.py +0 -0
  45. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/api/repeat.py +0 -0
  46. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/cron.py +0 -0
  47. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/exceptions.py +0 -0
  48. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/params.py +0 -0
  49. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil/workflow/templates.py +0 -0
  50. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  51. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  52. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  53. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test__cron.py +0 -0
  54. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test__regex.py +0 -0
  55. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_cron_on.py +0 -0
  56. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_job.py +0 -0
  57. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_job_strategy.py +0 -0
  58. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_params.py +0 -0
  59. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_release.py +0 -0
  60. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_release_queue.py +0 -0
  61. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_schedule.py +0 -0
  62. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_templates.py +0 -0
  63. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_templates_filter.py +0 -0
  64. {ddeutil_workflow-0.0.32 → ddeutil_workflow-0.0.34}/tests/test_workflow.py +0 -0
  65. /ddeutil_workflow-0.0.32/tests/test_workflow_job_exec.py → /ddeutil_workflow-0.0.34/tests/test_workflow_exec_job.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.32
3
+ Version: 0.0.34
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -74,25 +74,25 @@ you should to set the data layer separate this core program before run this appl
74
74
 
75
75
  ```mermaid
76
76
  flowchart LR
77
- subgraph Interface
78
- A((User))
79
- subgraph Docker Container
77
+ A((fa:fa-user User))
78
+
79
+ subgraph Docker Container
80
+ direction TB
80
81
  G@{ shape: rounded, label: "Observe<br>Application" }
81
- end
82
82
  end
83
83
 
84
- A --->|action| B(Workflow<br>Application)
85
- B ---> |response| A
86
- B -..-> |response| G
87
- G -..-> |request| B
88
-
89
84
  subgraph Docker Container
90
- B
85
+ direction TB
86
+ B@{ shape: rounded, label: "Workflow<br>Application" }
91
87
  end
92
88
 
89
+ A <--->|action &<br>response| B
90
+ B -....-> |response| G
91
+ G -....-> |request| B
92
+
93
93
  subgraph Data Context
94
94
  D@{ shape: processes, label: "Logs" }
95
- E@{ shape: lin-cyl, label: "Metadata" }
95
+ E@{ shape: lin-cyl, label: "Audit<br>Logs" }
96
96
  end
97
97
 
98
98
  subgraph Git Context
@@ -138,9 +138,9 @@ This is examples that use workflow file for running common Data Engineering
138
138
  use-case.
139
139
 
140
140
  > [!IMPORTANT]
141
- > I recommend you to use the `hook` stage for all actions that you want to do
141
+ > I recommend you to use the `call` stage for all actions that you want to do
142
142
  > with workflow activity that you want to orchestrate. Because it is able to
143
- > dynamic an input argument with the same hook function that make you use less
143
+ > dynamic an input argument with the same call function that make you use less
144
144
  > time to maintenance your data workflows.
145
145
 
146
146
  ```yaml
@@ -182,7 +182,7 @@ run-py-local:
182
182
  writing_mode: flatten
183
183
  aws_s3_path: my-data/open-data/${{ params.source-extract }}
184
184
 
185
- # This Authentication code should implement with your custom hook
185
+ # This Authentication code should implement with your custom call
186
186
  # function. The template allow you to use environment variable.
187
187
  aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
188
188
  aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
@@ -244,29 +244,34 @@ it will use default value and do not raise any error to you.
244
244
  > The config value that you will set on the environment should combine with
245
245
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
246
246
 
247
- | Name | Component | Default | Description |
248
- |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
249
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
250
- | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
251
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
252
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
253
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
254
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
255
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
256
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
257
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
258
- | **MAX_NUM_POKING** | Core | `4` | . |
259
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
260
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
261
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
262
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
263
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
264
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
265
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
266
- | **ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
267
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
268
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
269
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
247
+ | Name | Component | Default | Description |
248
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
249
+ | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
250
+ | **REGISTRY** | Core | `.` | List of importable string for the call stage. |
251
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
252
+ | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
253
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
254
+ | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
255
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
256
+ | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
257
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
258
+ | **MAX_NUM_POKING** | Core | `4` | . |
259
+ | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
260
+ | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
261
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
262
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
263
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
264
+ | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
265
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
266
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
267
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
268
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
269
+ | **ENABLE_WRITE** | Log | `false` | |
270
+ | **PATH** | Audit | `./audits` | |
271
+ | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
272
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
273
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
274
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
270
275
 
271
276
  **API Application**:
272
277
 
@@ -42,25 +42,25 @@ you should to set the data layer separate this core program before run this appl
42
42
 
43
43
  ```mermaid
44
44
  flowchart LR
45
- subgraph Interface
46
- A((User))
47
- subgraph Docker Container
45
+ A((fa:fa-user User))
46
+
47
+ subgraph Docker Container
48
+ direction TB
48
49
  G@{ shape: rounded, label: "Observe<br>Application" }
49
- end
50
50
  end
51
51
 
52
- A --->|action| B(Workflow<br>Application)
53
- B ---> |response| A
54
- B -..-> |response| G
55
- G -..-> |request| B
56
-
57
52
  subgraph Docker Container
58
- B
53
+ direction TB
54
+ B@{ shape: rounded, label: "Workflow<br>Application" }
59
55
  end
60
56
 
57
+ A <--->|action &<br>response| B
58
+ B -....-> |response| G
59
+ G -....-> |request| B
60
+
61
61
  subgraph Data Context
62
62
  D@{ shape: processes, label: "Logs" }
63
- E@{ shape: lin-cyl, label: "Metadata" }
63
+ E@{ shape: lin-cyl, label: "Audit<br>Logs" }
64
64
  end
65
65
 
66
66
  subgraph Git Context
@@ -106,9 +106,9 @@ This is examples that use workflow file for running common Data Engineering
106
106
  use-case.
107
107
 
108
108
  > [!IMPORTANT]
109
- > I recommend you to use the `hook` stage for all actions that you want to do
109
+ > I recommend you to use the `call` stage for all actions that you want to do
110
110
  > with workflow activity that you want to orchestrate. Because it is able to
111
- > dynamic an input argument with the same hook function that make you use less
111
+ > dynamic an input argument with the same call function that make you use less
112
112
  > time to maintenance your data workflows.
113
113
 
114
114
  ```yaml
@@ -150,7 +150,7 @@ run-py-local:
150
150
  writing_mode: flatten
151
151
  aws_s3_path: my-data/open-data/${{ params.source-extract }}
152
152
 
153
- # This Authentication code should implement with your custom hook
153
+ # This Authentication code should implement with your custom call
154
154
  # function. The template allow you to use environment variable.
155
155
  aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
156
156
  aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
@@ -212,29 +212,34 @@ it will use default value and do not raise any error to you.
212
212
  > The config value that you will set on the environment should combine with
213
213
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
214
214
 
215
- | Name | Component | Default | Description |
216
- |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
217
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
218
- | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
219
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
220
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
221
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
222
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
223
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
224
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
225
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
226
- | **MAX_NUM_POKING** | Core | `4` | . |
227
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
228
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
229
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
230
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
231
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
232
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
233
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
234
- | **ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
235
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
236
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
237
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
215
+ | Name | Component | Default | Description |
216
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
217
+ | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
218
+ | **REGISTRY** | Core | `.` | List of importable string for the call stage. |
219
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
220
+ | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
221
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
222
+ | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
223
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
224
+ | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
225
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
226
+ | **MAX_NUM_POKING** | Core | `4` | . |
227
+ | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
228
+ | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
229
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
230
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
231
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
232
+ | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
233
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
234
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
235
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
236
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
237
+ | **ENABLE_WRITE** | Log | `false` | |
238
+ | **PATH** | Audit | `./audits` | |
239
+ | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
240
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
241
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
242
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
238
243
 
239
244
  **API Application**:
240
245
 
@@ -67,6 +67,7 @@ omit = [
67
67
  "src/ddeutil/workflow/api/api.py",
68
68
  "src/ddeutil/workflow/api/repeat.py",
69
69
  "src/ddeutil/workflow/api/route.py",
70
+ "app.py",
70
71
  ]
71
72
 
72
73
  [tool.coverage.report]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.34"
@@ -5,13 +5,22 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
7
  from .__types import Re
8
+ from .audit import (
9
+ Audit,
10
+ get_audit,
11
+ )
12
+ from .call import (
13
+ ReturnTagFunc,
14
+ TagFunc,
15
+ extract_call,
16
+ make_registry,
17
+ tag,
18
+ )
8
19
  from .conf import (
9
20
  Config,
10
21
  Loader,
11
- Log,
12
22
  config,
13
23
  env,
14
- get_log,
15
24
  get_logger,
16
25
  )
17
26
  from .cron import (
@@ -26,13 +35,6 @@ from .exceptions import (
26
35
  UtilException,
27
36
  WorkflowException,
28
37
  )
29
- from .hook import (
30
- ReturnTagFunc,
31
- TagFunc,
32
- extract_hook,
33
- make_registry,
34
- tag,
35
- )
36
38
  from .job import (
37
39
  Job,
38
40
  Strategy,
@@ -44,7 +46,13 @@ from .params import (
44
46
  Param,
45
47
  StrParam,
46
48
  )
47
- from .result import Result
49
+ from .result import (
50
+ Result,
51
+ Status,
52
+ TraceLog,
53
+ default_gen_id,
54
+ get_dt_tznow,
55
+ )
48
56
  from .scheduler import (
49
57
  Schedule,
50
58
  ScheduleWorkflow,
@@ -52,10 +60,10 @@ from .scheduler import (
52
60
  schedule_runner,
53
61
  schedule_task,
54
62
  )
55
- from .stage import (
63
+ from .stages import (
56
64
  BashStage,
65
+ CallStage,
57
66
  EmptyStage,
58
- HookStage,
59
67
  PyStage,
60
68
  Stage,
61
69
  TriggerStage,
@@ -86,7 +86,7 @@ if config.enable_route_workflow:
86
86
 
87
87
  # NOTE: Enable the schedule route.
88
88
  if config.enable_route_schedule:
89
- from ..conf import get_log
89
+ from ..audit import get_audit
90
90
  from ..scheduler import schedule_task
91
91
  from .route import schedule_route
92
92
 
@@ -106,7 +106,7 @@ if config.enable_route_schedule:
106
106
  stop=datetime.now(config.tz) + timedelta(minutes=1),
107
107
  queue=app.state.workflow_queue,
108
108
  threads=app.state.workflow_threads,
109
- log=get_log(),
109
+ log=get_audit(),
110
110
  )
111
111
 
112
112
  @schedule_route.on_event("startup")
@@ -16,7 +16,8 @@ from fastapi.responses import UJSONResponse
16
16
  from pydantic import BaseModel
17
17
 
18
18
  from ..__types import DictData
19
- from ..conf import FileLog, Loader, config, get_logger
19
+ from ..audit import Audit, get_audit
20
+ from ..conf import Loader, config, get_logger
20
21
  from ..result import Result
21
22
  from ..scheduler import Schedule
22
23
  from ..workflow import Workflow
@@ -109,7 +110,7 @@ async def get_workflow_logs(name: str):
109
110
  exclude_unset=True,
110
111
  exclude_defaults=True,
111
112
  )
112
- for log in FileLog.find_logs(name=name)
113
+ for log in get_audit().find_audits(name=name)
113
114
  ],
114
115
  }
115
116
  except FileNotFoundError:
@@ -122,7 +123,7 @@ async def get_workflow_logs(name: str):
122
123
  @workflow_route.get(path="/{name}/logs/{release}")
123
124
  async def get_workflow_release_log(name: str, release: str):
124
125
  try:
125
- log: FileLog = FileLog.find_log_with_release(
126
+ log: Audit = get_audit().find_audit_with_release(
126
127
  name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
127
128
  )
128
129
  except FileNotFoundError:
@@ -0,0 +1,252 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ """Audit Log module."""
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ import os
11
+ from abc import ABC, abstractmethod
12
+ from collections.abc import Iterator
13
+ from datetime import datetime
14
+ from pathlib import Path
15
+ from typing import ClassVar, Optional, Union
16
+
17
+ from pydantic import BaseModel, Field
18
+ from pydantic.functional_validators import model_validator
19
+ from typing_extensions import Self
20
+
21
+ from .__types import DictData, TupleStr
22
+ from .conf import config
23
+ from .result import TraceLog
24
+
25
+ __all__: TupleStr = (
26
+ "get_audit",
27
+ "FileAudit",
28
+ "SQLiteAudit",
29
+ "Audit",
30
+ )
31
+
32
+
33
+ class BaseAudit(BaseModel, ABC):
34
+ """Base Audit Pydantic Model with abstraction class property that implement
35
+ only model fields. This model should to use with inherit to logging
36
+ subclass like file, sqlite, etc.
37
+ """
38
+
39
+ name: str = Field(description="A workflow name.")
40
+ release: datetime = Field(description="A release datetime.")
41
+ type: str = Field(description="A running type before logging.")
42
+ context: DictData = Field(
43
+ default_factory=dict,
44
+ description="A context that receive from a workflow execution result.",
45
+ )
46
+ parent_run_id: Optional[str] = Field(default=None)
47
+ run_id: str
48
+ update: datetime = Field(default_factory=datetime.now)
49
+ execution_time: float = Field(default=0)
50
+
51
+ @model_validator(mode="after")
52
+ def __model_action(self) -> Self:
53
+ """Do before the Audit action with WORKFLOW_AUDIT_ENABLE_WRITE env variable.
54
+
55
+ :rtype: Self
56
+ """
57
+ if config.enable_write_audit:
58
+ self.do_before()
59
+ return self
60
+
61
+ def do_before(self) -> None: # pragma: no cov
62
+ """To something before end up of initial log model."""
63
+
64
+ @abstractmethod
65
+ def save(self, excluded: list[str] | None) -> None: # pragma: no cov
66
+ """Save this model logging to target logging store."""
67
+ raise NotImplementedError("Audit should implement ``save`` method.")
68
+
69
+
70
+ class FileAudit(BaseAudit):
71
+ """File Audit Pydantic Model that use to saving log data from result of
72
+ workflow execution. It inherits from BaseAudit model that implement the
73
+ ``self.save`` method for file.
74
+ """
75
+
76
+ filename_fmt: ClassVar[str] = (
77
+ "workflow={name}/release={release:%Y%m%d%H%M%S}"
78
+ )
79
+
80
+ def do_before(self) -> None:
81
+ """Create directory of release before saving log file."""
82
+ self.pointer().mkdir(parents=True, exist_ok=True)
83
+
84
+ @classmethod
85
+ def find_audits(cls, name: str) -> Iterator[Self]:
86
+ """Generate the audit data that found from logs path with specific a
87
+ workflow name.
88
+
89
+ :param name: A workflow name that want to search release logging data.
90
+
91
+ :rtype: Iterator[Self]
92
+ """
93
+ pointer: Path = config.audit_path / f"workflow={name}"
94
+ if not pointer.exists():
95
+ raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
96
+
97
+ for file in pointer.glob("./release=*/*.log"):
98
+ with file.open(mode="r", encoding="utf-8") as f:
99
+ yield cls.model_validate(obj=json.load(f))
100
+
101
+ @classmethod
102
+ def find_audit_with_release(
103
+ cls,
104
+ name: str,
105
+ release: datetime | None = None,
106
+ ) -> Self:
107
+ """Return the audit data that found from logs path with specific
108
+ workflow name and release values. If a release does not pass to an input
109
+ argument, it will return the latest release from the current log path.
110
+
111
+ :param name: A workflow name that want to search log.
112
+ :param release: A release datetime that want to search log.
113
+
114
+ :raise FileNotFoundError:
115
+ :raise NotImplementedError:
116
+
117
+ :rtype: Self
118
+ """
119
+ if release is None:
120
+ raise NotImplementedError("Find latest log does not implement yet.")
121
+
122
+ pointer: Path = (
123
+ config.audit_path
124
+ / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
125
+ )
126
+ if not pointer.exists():
127
+ raise FileNotFoundError(
128
+ f"Pointer: ./logs/workflow={name}/"
129
+ f"release={release:%Y%m%d%H%M%S} does not found."
130
+ )
131
+
132
+ with max(pointer.glob("./*.log"), key=os.path.getctime).open(
133
+ mode="r", encoding="utf-8"
134
+ ) as f:
135
+ return cls.model_validate(obj=json.load(f))
136
+
137
+ @classmethod
138
+ def is_pointed(cls, name: str, release: datetime) -> bool:
139
+ """Check the release log already pointed or created at the destination
140
+ log path.
141
+
142
+ :param name: A workflow name.
143
+ :param release: A release datetime.
144
+
145
+ :rtype: bool
146
+ :return: Return False if the release log was not pointed or created.
147
+ """
148
+ # NOTE: Return False if enable writing log flag does not set.
149
+ if not config.enable_write_audit:
150
+ return False
151
+
152
+ # NOTE: create pointer path that use the same logic of pointer method.
153
+ pointer: Path = config.audit_path / cls.filename_fmt.format(
154
+ name=name, release=release
155
+ )
156
+
157
+ return pointer.exists()
158
+
159
+ def pointer(self) -> Path:
160
+ """Return release directory path that was generated from model data.
161
+
162
+ :rtype: Path
163
+ """
164
+ return config.audit_path / self.filename_fmt.format(
165
+ name=self.name, release=self.release
166
+ )
167
+
168
+ def save(self, excluded: list[str] | None) -> Self:
169
+ """Save logging data that receive a context data from a workflow
170
+ execution result.
171
+
172
+ :param excluded: An excluded list of key name that want to pass in the
173
+ model_dump method.
174
+
175
+ :rtype: Self
176
+ """
177
+ trace: TraceLog = TraceLog(self.run_id, self.parent_run_id)
178
+
179
+ # NOTE: Check environ variable was set for real writing.
180
+ if not config.enable_write_audit:
181
+ trace.debug("[LOG]: Skip writing log cause config was set")
182
+ return self
183
+
184
+ log_file: Path = self.pointer() / f"{self.run_id}.log"
185
+ log_file.write_text(
186
+ json.dumps(
187
+ self.model_dump(exclude=excluded),
188
+ default=str,
189
+ indent=2,
190
+ ),
191
+ encoding="utf-8",
192
+ )
193
+ return self
194
+
195
+
196
+ class SQLiteAudit(BaseAudit): # pragma: no cov
197
+ """SQLite Audit Pydantic Model."""
198
+
199
+ table_name: ClassVar[str] = "workflow_log"
200
+ schemas: ClassVar[
201
+ str
202
+ ] = """
203
+ workflow str,
204
+ release int,
205
+ type str,
206
+ context json,
207
+ parent_run_id int,
208
+ run_id int,
209
+ update datetime
210
+ primary key ( run_id )
211
+ """
212
+
213
+ def save(self, excluded: list[str] | None) -> SQLiteAudit:
214
+ """Save logging data that receive a context data from a workflow
215
+ execution result.
216
+ """
217
+ trace: TraceLog = TraceLog(self.run_id, self.parent_run_id)
218
+
219
+ # NOTE: Check environ variable was set for real writing.
220
+ if not config.enable_write_audit:
221
+ trace.debug("[LOG]: Skip writing log cause config was set")
222
+ return self
223
+
224
+ raise NotImplementedError("SQLiteAudit does not implement yet.")
225
+
226
+
227
+ class RemoteFileAudit(FileAudit): # pragma: no cov
228
+ """Remote File Audit Pydantic Model."""
229
+
230
+ def save(self, excluded: list[str] | None) -> RemoteFileAudit: ...
231
+
232
+
233
+ class RedisAudit(BaseAudit): # pragma: no cov
234
+ """Redis Audit Pydantic Model."""
235
+
236
+ def save(self, excluded: list[str] | None) -> RedisAudit: ...
237
+
238
+
239
+ Audit = Union[
240
+ FileAudit,
241
+ SQLiteAudit,
242
+ ]
243
+
244
+
245
+ def get_audit() -> type[Audit]: # pragma: no cov
246
+ """Get an audit class that dynamic base on the config audit path value.
247
+
248
+ :rtype: type[Audit]
249
+ """
250
+ if config.audit_path.is_file():
251
+ return SQLiteAudit
252
+ return FileAudit