ddeutil-workflow 0.0.9__tar.gz → 0.0.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. {ddeutil_workflow-0.0.9/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.10}/PKG-INFO +17 -108
  2. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/README.md +14 -103
  3. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/pyproject.toml +4 -7
  4. ddeutil_workflow-0.0.10/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/api.py +16 -16
  6. ddeutil_workflow-0.0.10/src/ddeutil/workflow/cli.py +134 -0
  7. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/cron.py +116 -26
  8. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/exceptions.py +3 -0
  9. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/log.py +66 -59
  10. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/on.py +10 -4
  11. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/pipeline.py +267 -223
  12. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/repeat.py +66 -39
  13. ddeutil_workflow-0.0.10/src/ddeutil/workflow/route.py +92 -0
  14. ddeutil_workflow-0.0.10/src/ddeutil/workflow/scheduler.py +620 -0
  15. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/stage.py +15 -11
  16. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/utils.py +142 -6
  17. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10/src/ddeutil_workflow.egg-info}/PKG-INFO +17 -108
  18. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil_workflow.egg-info/SOURCES.txt +2 -1
  19. ddeutil_workflow-0.0.10/src/ddeutil_workflow.egg-info/entry_points.txt +2 -0
  20. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil_workflow.egg-info/requires.txt +2 -4
  21. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test__conf_exist.py +1 -1
  22. ddeutil_workflow-0.0.10/tests/test_conf.py +8 -0
  23. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_cron.py +56 -0
  24. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_log.py +3 -13
  25. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_on.py +8 -0
  26. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_poke.py +3 -9
  27. ddeutil_workflow-0.0.10/tests/test_scheduler.py +12 -0
  28. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_stage_trigger.py +2 -2
  29. ddeutil_workflow-0.0.9/src/ddeutil/workflow/__about__.py +0 -1
  30. ddeutil_workflow-0.0.9/src/ddeutil/workflow/cli.py +0 -51
  31. ddeutil_workflow-0.0.9/src/ddeutil/workflow/loader.py +0 -132
  32. ddeutil_workflow-0.0.9/src/ddeutil/workflow/route.py +0 -71
  33. ddeutil_workflow-0.0.9/src/ddeutil/workflow/scheduler.py +0 -452
  34. ddeutil_workflow-0.0.9/src/ddeutil_workflow.egg-info/entry_points.txt +0 -2
  35. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/LICENSE +0 -0
  36. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/setup.cfg +0 -0
  37. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/__init__.py +0 -0
  38. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil/workflow/__types.py +0 -0
  39. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  40. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  41. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test__local_and_global.py +0 -0
  42. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test__regex.py +0 -0
  43. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_job.py +0 -0
  44. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_job_py.py +0 -0
  45. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_params.py +0 -0
  46. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline.py +0 -0
  47. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline_desc.py +0 -0
  48. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline_if.py +0 -0
  49. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline_matrix.py +0 -0
  50. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline_on.py +0 -0
  51. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline_params.py +0 -0
  52. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline_run.py +0 -0
  53. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline_run_raise.py +0 -0
  54. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_pipeline_task.py +0 -0
  55. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_stage.py +0 -0
  56. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_stage_bash.py +0 -0
  57. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_stage_condition.py +0 -0
  58. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_stage_hook.py +0 -0
  59. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_stage_py.py +0 -0
  60. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_utils.py +0 -0
  61. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_utils_result.py +0 -0
  62. {ddeutil_workflow-0.0.9 → ddeutil_workflow-0.0.10}/tests/test_utils_template.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.9
3
+ Version: 0.0.10
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -21,15 +21,13 @@ Classifier: Programming Language :: Python :: 3.12
21
21
  Requires-Python: >=3.9.13
22
22
  Description-Content-Type: text/markdown
23
23
  License-File: LICENSE
24
- Requires-Dist: fmtutil
25
24
  Requires-Dist: ddeutil-io
26
25
  Requires-Dist: python-dotenv==1.0.1
27
- Requires-Dist: typer==0.12.4
26
+ Requires-Dist: typer<1.0.0,==0.12.5
28
27
  Provides-Extra: schedule
29
28
  Requires-Dist: schedule<2.0.0,==1.2.2; extra == "schedule"
30
29
  Provides-Extra: api
31
- Requires-Dist: fastapi[standard]==0.112.1; extra == "api"
32
- Requires-Dist: croniter==3.0.3; extra == "api"
30
+ Requires-Dist: fastapi[standard]<1.0.0,==0.112.2; extra == "api"
33
31
 
34
32
  # Workflow
35
33
 
@@ -39,17 +37,6 @@ Requires-Dist: croniter==3.0.3; extra == "api"
39
37
  [![gh license](https://img.shields.io/github/license/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow/blob/main/LICENSE)
40
38
  [![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
41
39
 
42
- **Table of Contents**:
43
-
44
- - [Installation](#installation)
45
- - [Getting Started](#getting-started)
46
- - [On](#on)
47
- - [Pipeline](#pipeline)
48
- - [Usage](#usage)
49
- - [Configuration](#configuration)
50
- - [Future](#future)
51
- - [Deployment](#deployment)
52
-
53
40
  The **Lightweight workflow orchestration** with less dependencies the was created
54
41
  for easy to make a simple metadata driven for data pipeline orchestration.
55
42
  It can to use for data operator by a `.yaml` template.
@@ -103,82 +90,6 @@ this package with application add-ons, you should add `app` in installation;
103
90
  > | ddeutil-workflow:python3.11 | `3.11` | :x: |
104
91
  > | ddeutil-workflow:python3.12 | `3.12` | :x: |
105
92
 
106
- ## Getting Started
107
-
108
- The main feature of this project is the `Pipeline` object that can call any
109
- registries function. The pipeline can handle everything that you want to do, it
110
- will passing parameters and catching the output for re-use it to next step.
111
-
112
- ### On
113
-
114
- The **On** is schedule object that receive crontab value and able to generate
115
- datetime value with next or previous with any start point of an input datetime.
116
-
117
- ```yaml
118
- # This file should keep under this path: `./root-path/conf-path/*`
119
- on_every_5_min:
120
- type: on.On
121
- cron: "*/5 * * * *"
122
- ```
123
-
124
- ```python
125
- from ddeutil.workflow.on import On
126
-
127
- # NOTE: Start load the on data from `.yaml` template file with this key.
128
- schedule = On.from_loader(name='on_every_5_min', externals={})
129
-
130
- assert '*/5 * * * *' == str(schedule.cronjob)
131
-
132
- cron_iter = schedule.generate('2022-01-01 00:00:00')
133
-
134
- assert "2022-01-01 00:05:00" f"{cron_iter.next:%Y-%m-%d %H:%M:%S}"
135
- assert "2022-01-01 00:10:00" f"{cron_iter.next:%Y-%m-%d %H:%M:%S}"
136
- assert "2022-01-01 00:15:00" f"{cron_iter.next:%Y-%m-%d %H:%M:%S}"
137
- ```
138
-
139
- ### Pipeline
140
-
141
- The **Pipeline** object that is the core feature of this project.
142
-
143
- ```yaml
144
- # This file should keep under this path: `./root-path/conf-path/*`
145
- pipeline-name:
146
- type: ddeutil.workflow.pipeline.Pipeline
147
- on: 'on_every_5_min'
148
- params:
149
- author-run:
150
- type: str
151
- run-date:
152
- type: datetime
153
- jobs:
154
- first-job:
155
- stages:
156
- - name: "Empty stage do logging to console only!!"
157
- ```
158
-
159
- ```python
160
- from ddeutil.workflow.pipeline import Pipeline
161
-
162
- pipe = Pipeline.from_loader(name='pipeline-name', externals={})
163
- pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
164
- ```
165
-
166
- > [!NOTE]
167
- > The above parameter can use short declarative statement. You can pass a parameter
168
- > type to the key of a parameter name but it does not handler default value if you
169
- > run this pipeline workflow with schedule.
170
- >
171
- > ```yaml
172
- > ...
173
- > params:
174
- > author-run: str
175
- > run-date: datetime
176
- > ...
177
- > ```
178
- >
179
- > And for the type, you can remove `ddeutil.workflow` prefix because we can find
180
- > it by looping search from `WORKFLOW_CORE_REGISTRY` value.
181
-
182
93
  ## Usage
183
94
 
184
95
  This is examples that use workflow file for running common Data Engineering
@@ -209,7 +120,9 @@ run_py_local:
209
120
  url: https://open-data/
210
121
  auth: ${API_ACCESS_REFRESH_TOKEN}
211
122
  aws_s3_path: my-data/open-data/
212
- # This Authentication code should implement with your custom hook function
123
+
124
+ # This Authentication code should implement with your custom hook function.
125
+ # The template allow you to use environment variable.
213
126
  aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
214
127
  aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
215
128
  ```
@@ -227,28 +140,24 @@ run_py_local:
227
140
  | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | true | A flag that all stage raise StageException from stage execution |
228
141
  | `WORKFLOW_CORE_MAX_PIPELINE_POKING` | Core | 4 | |
229
142
  | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in pipeline executor |
143
+ | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode |
230
144
  | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination |
231
145
 
232
146
 
233
147
  **Application**:
234
148
 
235
- | Environment | Default | Description |
236
- |-------------------------------------|---------|-------------|
237
- | `WORKFLOW_APP_PROCESS_WORKER` | 2 | |
238
- | `WORKFLOW_APP_PIPELINE_PER_PROCESS` | 100 | |
149
+ | Environment | Default | Description |
150
+ |-------------------------------------|----------------------------------|-------------------------------------------------------------------------|
151
+ | `WORKFLOW_APP_PROCESS_WORKER` | 2 | The maximum process worker number that run in scheduler app module |
152
+ | `WORKFLOW_APP_SCHEDULE_PER_PROCESS` | 100 | A schedule per process that run parallel |
153
+ | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format |
239
154
 
240
155
  **API server**:
241
156
 
242
- | Environment | Default | Description |
243
- |-----------------------|--------------------------------------------------------|--------------------------------------------------------------------|
244
- | `WORKFLOW_API_DB_URL` | postgresql+asyncpg://user:pass@localhost:5432/schedule | A Database URL that will pass to SQLAlchemy create_engine function |
245
-
246
- ## Future
247
-
248
- The current milestone that will develop and necessary features that should to
249
- implement on this project.
250
-
251
- - ...
157
+ | Environment | Default | Description |
158
+ |--------------------------------------|---------|-----------------------------------------------------------------------------------|
159
+ | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | true | A flag that enable workflow route to manage execute manually and workflow logging |
160
+ | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | true | A flag that enable run scheduler |
252
161
 
253
162
  ## Deployment
254
163
 
@@ -270,4 +179,4 @@ like crontab job but via Python API.
270
179
 
271
180
  > [!NOTE]
272
181
  > If this package already deploy, it able to use
273
- > `uvicorn ddeutil.workflow.api:app --host 0.0.0.0 --port 80`
182
+ > `uvicorn ddeutil.workflow.api:app --host 0.0.0.0 --port 80 --workers 4`
@@ -6,17 +6,6 @@
6
6
  [![gh license](https://img.shields.io/github/license/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow/blob/main/LICENSE)
7
7
  [![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
8
8
 
9
- **Table of Contents**:
10
-
11
- - [Installation](#installation)
12
- - [Getting Started](#getting-started)
13
- - [On](#on)
14
- - [Pipeline](#pipeline)
15
- - [Usage](#usage)
16
- - [Configuration](#configuration)
17
- - [Future](#future)
18
- - [Deployment](#deployment)
19
-
20
9
  The **Lightweight workflow orchestration** with less dependencies the was created
21
10
  for easy to make a simple metadata driven for data pipeline orchestration.
22
11
  It can to use for data operator by a `.yaml` template.
@@ -70,82 +59,6 @@ this package with application add-ons, you should add `app` in installation;
70
59
  > | ddeutil-workflow:python3.11 | `3.11` | :x: |
71
60
  > | ddeutil-workflow:python3.12 | `3.12` | :x: |
72
61
 
73
- ## Getting Started
74
-
75
- The main feature of this project is the `Pipeline` object that can call any
76
- registries function. The pipeline can handle everything that you want to do, it
77
- will passing parameters and catching the output for re-use it to next step.
78
-
79
- ### On
80
-
81
- The **On** is schedule object that receive crontab value and able to generate
82
- datetime value with next or previous with any start point of an input datetime.
83
-
84
- ```yaml
85
- # This file should keep under this path: `./root-path/conf-path/*`
86
- on_every_5_min:
87
- type: on.On
88
- cron: "*/5 * * * *"
89
- ```
90
-
91
- ```python
92
- from ddeutil.workflow.on import On
93
-
94
- # NOTE: Start load the on data from `.yaml` template file with this key.
95
- schedule = On.from_loader(name='on_every_5_min', externals={})
96
-
97
- assert '*/5 * * * *' == str(schedule.cronjob)
98
-
99
- cron_iter = schedule.generate('2022-01-01 00:00:00')
100
-
101
- assert "2022-01-01 00:05:00" f"{cron_iter.next:%Y-%m-%d %H:%M:%S}"
102
- assert "2022-01-01 00:10:00" f"{cron_iter.next:%Y-%m-%d %H:%M:%S}"
103
- assert "2022-01-01 00:15:00" f"{cron_iter.next:%Y-%m-%d %H:%M:%S}"
104
- ```
105
-
106
- ### Pipeline
107
-
108
- The **Pipeline** object that is the core feature of this project.
109
-
110
- ```yaml
111
- # This file should keep under this path: `./root-path/conf-path/*`
112
- pipeline-name:
113
- type: ddeutil.workflow.pipeline.Pipeline
114
- on: 'on_every_5_min'
115
- params:
116
- author-run:
117
- type: str
118
- run-date:
119
- type: datetime
120
- jobs:
121
- first-job:
122
- stages:
123
- - name: "Empty stage do logging to console only!!"
124
- ```
125
-
126
- ```python
127
- from ddeutil.workflow.pipeline import Pipeline
128
-
129
- pipe = Pipeline.from_loader(name='pipeline-name', externals={})
130
- pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
131
- ```
132
-
133
- > [!NOTE]
134
- > The above parameter can use short declarative statement. You can pass a parameter
135
- > type to the key of a parameter name but it does not handler default value if you
136
- > run this pipeline workflow with schedule.
137
- >
138
- > ```yaml
139
- > ...
140
- > params:
141
- > author-run: str
142
- > run-date: datetime
143
- > ...
144
- > ```
145
- >
146
- > And for the type, you can remove `ddeutil.workflow` prefix because we can find
147
- > it by looping search from `WORKFLOW_CORE_REGISTRY` value.
148
-
149
62
  ## Usage
150
63
 
151
64
  This is examples that use workflow file for running common Data Engineering
@@ -176,7 +89,9 @@ run_py_local:
176
89
  url: https://open-data/
177
90
  auth: ${API_ACCESS_REFRESH_TOKEN}
178
91
  aws_s3_path: my-data/open-data/
179
- # This Authentication code should implement with your custom hook function
92
+
93
+ # This Authentication code should implement with your custom hook function.
94
+ # The template allow you to use environment variable.
180
95
  aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
181
96
  aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
182
97
  ```
@@ -194,28 +109,24 @@ run_py_local:
194
109
  | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | true | A flag that all stage raise StageException from stage execution |
195
110
  | `WORKFLOW_CORE_MAX_PIPELINE_POKING` | Core | 4 | |
196
111
  | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in pipeline executor |
112
+ | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode |
197
113
  | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination |
198
114
 
199
115
 
200
116
  **Application**:
201
117
 
202
- | Environment | Default | Description |
203
- |-------------------------------------|---------|-------------|
204
- | `WORKFLOW_APP_PROCESS_WORKER` | 2 | |
205
- | `WORKFLOW_APP_PIPELINE_PER_PROCESS` | 100 | |
118
+ | Environment | Default | Description |
119
+ |-------------------------------------|----------------------------------|-------------------------------------------------------------------------|
120
+ | `WORKFLOW_APP_PROCESS_WORKER` | 2 | The maximum process worker number that run in scheduler app module |
121
+ | `WORKFLOW_APP_SCHEDULE_PER_PROCESS` | 100 | A schedule per process that run parallel |
122
+ | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format |
206
123
 
207
124
  **API server**:
208
125
 
209
- | Environment | Default | Description |
210
- |-----------------------|--------------------------------------------------------|--------------------------------------------------------------------|
211
- | `WORKFLOW_API_DB_URL` | postgresql+asyncpg://user:pass@localhost:5432/schedule | A Database URL that will pass to SQLAlchemy create_engine function |
212
-
213
- ## Future
214
-
215
- The current milestone that will develop and necessary features that should to
216
- implement on this project.
217
-
218
- - ...
126
+ | Environment | Default | Description |
127
+ |--------------------------------------|---------|-----------------------------------------------------------------------------------|
128
+ | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | true | A flag that enable workflow route to manage execute manually and workflow logging |
129
+ | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | true | A flag that enable run scheduler |
219
130
 
220
131
  ## Deployment
221
132
 
@@ -237,4 +148,4 @@ like crontab job but via Python API.
237
148
 
238
149
  > [!NOTE]
239
150
  > If this package already deploy, it able to use
240
- > `uvicorn ddeutil.workflow.api:app --host 0.0.0.0 --port 80`
151
+ > `uvicorn ddeutil.workflow.api:app --host 0.0.0.0 --port 80 --workers 4`
@@ -25,10 +25,9 @@ classifiers = [
25
25
  ]
26
26
  requires-python = ">=3.9.13"
27
27
  dependencies = [
28
- "fmtutil",
29
28
  "ddeutil-io",
30
29
  "python-dotenv==1.0.1",
31
- "typer==0.12.4",
30
+ "typer==0.12.5,<1.0.0",
32
31
  ]
33
32
  dynamic = ["version"]
34
33
 
@@ -37,9 +36,7 @@ schedule = [
37
36
  "schedule==1.2.2,<2.0.0",
38
37
  ]
39
38
  api = [
40
- "fastapi[standard]==0.112.1",
41
- # TODO: This package can migrate to use /cron/
42
- "croniter==3.0.3",
39
+ "fastapi[standard]==0.112.2,<1.0.0",
43
40
  ]
44
41
 
45
42
  [project.urls]
@@ -47,7 +44,7 @@ Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
47
44
  "Source Code" = "https://github.com/ddeutils/ddeutil-workflow/"
48
45
 
49
46
  [project.scripts]
50
- workflow = "ddeutil.workflow.cli:app"
47
+ ddeutil-workflow = "ddeutil.workflow.cli:cli"
51
48
 
52
49
  [tool.setuptools.dynamic]
53
50
  version = {attr = "ddeutil.workflow.__about__.__version__"}
@@ -86,7 +83,7 @@ addopts = [
86
83
  filterwarnings = ["error"]
87
84
  log_cli = true
88
85
  log_cli_level = "DEBUG"
89
- log_cli_format = "%(asctime)s [%(levelname)-7s] %(message)-100s (%(filename)s:%(lineno)s)"
86
+ log_cli_format = "%(asctime)s [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)"
90
87
  log_cli_date_format = "%Y%m%d %H:%M:%S"
91
88
 
92
89
  [tool.black]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.10"
@@ -6,7 +6,6 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import asyncio
9
- import logging
10
9
  import os
11
10
  import uuid
12
11
  from queue import Empty, Queue
@@ -18,33 +17,33 @@ from fastapi.middleware.gzip import GZipMiddleware
18
17
  from fastapi.responses import UJSONResponse
19
18
  from pydantic import BaseModel
20
19
 
20
+ from .__about__ import __version__
21
+ from .log import get_logger
21
22
  from .repeat import repeat_every
22
23
 
23
24
  load_dotenv()
24
- logger = logging.getLogger(__name__)
25
- logging.basicConfig(
26
- level=logging.DEBUG,
27
- format=(
28
- "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, %(thread)-5d) "
29
- "[%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)"
30
- ),
31
- handlers=[logging.StreamHandler()],
32
- datefmt="%Y-%m-%d %H:%M:%S",
33
- )
25
+ logger = get_logger("ddeutil.workflow")
34
26
 
35
27
 
36
- app = FastAPI()
28
+ app = FastAPI(
29
+ titile="Workflow API",
30
+ description=(
31
+ "This is workflow FastAPI web application that use to manage manual "
32
+ "execute or schedule workflow via RestAPI."
33
+ ),
34
+ version=__version__,
35
+ )
37
36
  app.add_middleware(GZipMiddleware, minimum_size=1000)
38
37
  app.queue = Queue()
39
38
  app.output_dict = {}
40
- app.queue_limit = 2
39
+ app.queue_limit = 5
41
40
 
42
41
 
43
42
  @app.on_event("startup")
44
- @repeat_every(seconds=10, logger=logger)
43
+ @repeat_every(seconds=10)
45
44
  def broker_upper_messages():
46
45
  """Broker for receive message from the `/upper` path and change it to upper
47
- case.
46
+ case. This broker use interval running in background every 10 seconds.
48
47
  """
49
48
  for _ in range(app.queue_limit):
50
49
  try:
@@ -66,11 +65,12 @@ async def get_result(request_id):
66
65
  result = app.output_dict[request_id]
67
66
  del app.output_dict[request_id]
68
67
  return {"message": result}
69
- await asyncio.sleep(0.001)
68
+ await asyncio.sleep(0.0025)
70
69
 
71
70
 
72
71
  @app.post("/upper", response_class=UJSONResponse)
73
72
  async def message_upper(payload: Payload):
73
+ """Convert message from any case to the upper case."""
74
74
  request_id: str = str(uuid.uuid4())
75
75
  app.queue.put(
76
76
  {"text": payload.text, "request_id": request_id},
@@ -0,0 +1,134 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import json
9
+ import os
10
+ from datetime import datetime
11
+ from enum import Enum
12
+ from typing import Annotated, Optional
13
+ from zoneinfo import ZoneInfo
14
+
15
+ from ddeutil.core import str2list
16
+ from typer import Argument, Option, Typer
17
+
18
+ from .log import get_logger
19
+
20
+ logger = get_logger("ddeutil.workflow")
21
+ cli: Typer = Typer()
22
+ cli_log: Typer = Typer()
23
+ cli.add_typer(
24
+ cli_log,
25
+ name="log",
26
+ help="Logging of workflow CLI",
27
+ )
28
+
29
+
30
+ @cli.command()
31
+ def run(
32
+ pipeline: Annotated[
33
+ str,
34
+ Argument(help="A pipeline name that want to run manually"),
35
+ ],
36
+ params: Annotated[
37
+ str,
38
+ Argument(
39
+ help="A json string for parameters of this pipeline execution."
40
+ ),
41
+ ],
42
+ ):
43
+ """Run pipeline workflow manually with an input custom parameters that able
44
+ to receive with pipeline params config.
45
+ """
46
+ logger.info(f"Running pipeline name: {pipeline}")
47
+ logger.info(f"... with Parameters: {json.dumps(json.loads(params))}")
48
+
49
+
50
+ @cli.command()
51
+ def schedule(
52
+ stop: Annotated[
53
+ Optional[datetime],
54
+ Argument(
55
+ formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S"],
56
+ help="A stopping datetime that want to stop on schedule app.",
57
+ ),
58
+ ] = None,
59
+ excluded: Annotated[
60
+ Optional[str],
61
+ Argument(help="A list of exclude workflow name in str."),
62
+ ] = None,
63
+ externals: Annotated[
64
+ Optional[str],
65
+ Argument(
66
+ help="A json string for parameters of this pipeline execution."
67
+ ),
68
+ ] = None,
69
+ ):
70
+ """Start workflow scheduler that will call workflow function from scheduler
71
+ module.
72
+ """
73
+ excluded: list[str] = str2list(excluded) if excluded else []
74
+ externals: str = externals or "{}"
75
+ if stop:
76
+ stop: datetime = stop.astimezone(
77
+ tz=ZoneInfo(os.getenv("WORKFLOW_CORE_TIMEZONE", "UTC"))
78
+ )
79
+
80
+ from .scheduler import workflow
81
+
82
+ # NOTE: Start running workflow scheduler application.
83
+ workflow_rs: list[str] = workflow(
84
+ stop=stop, excluded=excluded, externals=json.loads(externals)
85
+ )
86
+ logger.info(f"Application run success: {workflow_rs}")
87
+
88
+
89
+ @cli_log.command("pipeline-get")
90
+ def pipeline_log_get(
91
+ name: Annotated[
92
+ str,
93
+ Argument(help="A pipeline name that want to getting log"),
94
+ ],
95
+ limit: Annotated[
96
+ int,
97
+ Argument(help="A number of the limitation of logging"),
98
+ ] = 100,
99
+ desc: Annotated[
100
+ bool,
101
+ Option(
102
+ "--desc",
103
+ help="A descending flag that order by logging release datetime.",
104
+ ),
105
+ ] = True,
106
+ ):
107
+ logger.info(f"{name} : limit {limit} : desc: {desc}")
108
+ return [""]
109
+
110
+
111
+ class LogMode(str, Enum):
112
+ get = "get"
113
+ delete = "delete"
114
+
115
+
116
+ @cli_log.command("pipeline-delete")
117
+ def pipeline_log_delete(
118
+ mode: Annotated[
119
+ LogMode,
120
+ Argument(case_sensitive=True),
121
+ ]
122
+ ):
123
+ logger.info(mode)
124
+
125
+
126
+ @cli.callback()
127
+ def main():
128
+ """
129
+ Manage workflow with CLI.
130
+ """
131
+
132
+
133
+ if __name__ == "__main__":
134
+ cli()