ddeutil-workflow 0.0.6__tar.gz → 0.0.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/PKG-INFO +61 -14
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/README.md +55 -11
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/pyproject.toml +14 -6
- ddeutil_workflow-0.0.7/src/ddeutil/workflow/__about__.py +1 -0
- ddeutil_workflow-0.0.7/src/ddeutil/workflow/__init__.py +31 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil/workflow/__types.py +11 -1
- ddeutil_workflow-0.0.7/src/ddeutil/workflow/api.py +120 -0
- ddeutil_workflow-0.0.7/src/ddeutil/workflow/app.py +41 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil/workflow/exceptions.py +3 -0
- ddeutil_workflow-0.0.7/src/ddeutil/workflow/log.py +30 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil/workflow/pipeline.py +341 -105
- ddeutil_workflow-0.0.7/src/ddeutil/workflow/repeat.py +134 -0
- ddeutil_workflow-0.0.7/src/ddeutil/workflow/route.py +78 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil/workflow/stage.py +41 -12
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil/workflow/utils.py +280 -56
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil_workflow.egg-info/PKG-INFO +61 -14
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil_workflow.egg-info/SOURCES.txt +6 -0
- ddeutil_workflow-0.0.7/src/ddeutil_workflow.egg-info/requires.txt +11 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test__regex.py +41 -5
- ddeutil_workflow-0.0.7/tests/test_pipeline_matrix.py +159 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_pipeline_task.py +16 -0
- ddeutil_workflow-0.0.7/tests/test_stage_trigger.py +32 -0
- ddeutil_workflow-0.0.7/tests/test_utils_param2template.py +71 -0
- ddeutil_workflow-0.0.6/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.6/src/ddeutil/workflow/__init__.py +0 -9
- ddeutil_workflow-0.0.6/src/ddeutil_workflow.egg-info/requires.txt +0 -7
- ddeutil_workflow-0.0.6/tests/test_pipeline_matrix.py +0 -87
- ddeutil_workflow-0.0.6/tests/test_stage_trigger.py +0 -10
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/LICENSE +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil/workflow/loader.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil/workflow/on.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil/workflow/scheduler.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test__conf_exist.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test__local_and_global.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_on.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_pipeline.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_pipeline_desc.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_pipeline_if.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_pipeline_on.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_pipeline_params.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_pipeline_run.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_scheduler.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_utils.py +0 -0
- {ddeutil_workflow-0.0.6 → ddeutil_workflow-0.0.7}/tests/test_utils_result.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.7
|
4
4
|
Summary: Data Developer & Engineer Workflow Utility Objects
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -25,8 +25,11 @@ Requires-Dist: fmtutil
|
|
25
25
|
Requires-Dist: ddeutil-io
|
26
26
|
Requires-Dist: python-dotenv==1.0.1
|
27
27
|
Provides-Extra: app
|
28
|
-
Requires-Dist:
|
29
|
-
|
28
|
+
Requires-Dist: schedule<2.0.0,==1.2.2; extra == "app"
|
29
|
+
Provides-Extra: api
|
30
|
+
Requires-Dist: fastapi[standard]==0.112.0; extra == "api"
|
31
|
+
Requires-Dist: apscheduler[sqlalchemy]<4.0.0,==3.10.4; extra == "api"
|
32
|
+
Requires-Dist: croniter==3.0.3; extra == "api"
|
30
33
|
|
31
34
|
# Workflow
|
32
35
|
|
@@ -39,7 +42,6 @@ Requires-Dist: apscheduler[sqlalchemy]==3.10.4; extra == "app"
|
|
39
42
|
|
40
43
|
- [Installation](#installation)
|
41
44
|
- [Getting Started](#getting-started)
|
42
|
-
- [Core Features](#core-features)
|
43
45
|
- [On](#on)
|
44
46
|
- [Pipeline](#pipeline)
|
45
47
|
- [Usage](#usage)
|
@@ -50,12 +52,14 @@ Requires-Dist: apscheduler[sqlalchemy]==3.10.4; extra == "app"
|
|
50
52
|
- [Deployment](#deployment)
|
51
53
|
|
52
54
|
This **Workflow** objects was created for easy to make a simple metadata
|
53
|
-
driven pipeline that able to **ETL, T, EL, or
|
55
|
+
driven for data pipeline orchestration that able to use for **ETL, T, EL, or
|
56
|
+
ELT** by a `.yaml` file template.
|
54
57
|
|
55
|
-
I think
|
56
|
-
write
|
57
|
-
|
58
|
-
|
58
|
+
In my opinion, I think it should not create duplicate pipeline codes if I can
|
59
|
+
write with dynamic input parameters on the one template pipeline that just change
|
60
|
+
the input parameters per use-case instead.
|
61
|
+
This way I can handle a lot of logical pipelines in our orgs with only metadata
|
62
|
+
configuration. It called **Metadata Driven Data Pipeline**.
|
59
63
|
|
60
64
|
Next, we should get some monitoring tools for manage logging that return from
|
61
65
|
pipeline running. Because it not show us what is a use-case that running data
|
@@ -79,6 +83,10 @@ this package with application add-ons, you should add `app` in installation;
|
|
79
83
|
pip install ddeutil-workflow[app]
|
80
84
|
```
|
81
85
|
|
86
|
+
```shell
|
87
|
+
pip install ddeutil-workflow[api]
|
88
|
+
```
|
89
|
+
|
82
90
|
## Getting Started
|
83
91
|
|
84
92
|
The first step, you should start create the connections and datasets for In and
|
@@ -240,6 +248,18 @@ pipe_el_pg_to_lake:
|
|
240
248
|
endpoint: "/${{ params.name }}"
|
241
249
|
```
|
242
250
|
|
251
|
+
Implement hook:
|
252
|
+
|
253
|
+
```python
|
254
|
+
from ddeutil.workflow.utils import tag
|
255
|
+
|
256
|
+
@tag('polars', alias='postgres-to-delta')
|
257
|
+
def postgres_to_delta(source, sink):
|
258
|
+
return {
|
259
|
+
"source": source, "sink": sink
|
260
|
+
}
|
261
|
+
```
|
262
|
+
|
243
263
|
### Hook (Transform)
|
244
264
|
|
245
265
|
```yaml
|
@@ -265,12 +285,30 @@ pipeline_hook_mssql_proc:
|
|
265
285
|
target: ${{ params.target_name }}
|
266
286
|
```
|
267
287
|
|
288
|
+
Implement hook:
|
289
|
+
|
290
|
+
```python
|
291
|
+
from ddeutil.workflow.utils import tag
|
292
|
+
|
293
|
+
@tag('odbc', alias='mssql-proc')
|
294
|
+
def odbc_mssql_procedure(_exec: str, params: dict):
|
295
|
+
return {
|
296
|
+
"exec": _exec, "params": params
|
297
|
+
}
|
298
|
+
```
|
299
|
+
|
268
300
|
## Configuration
|
269
301
|
|
270
302
|
```bash
|
271
303
|
export WORKFLOW_ROOT_PATH=.
|
272
304
|
export WORKFLOW_CORE_REGISTRY=ddeutil.workflow,tests.utils
|
305
|
+
export WORKFLOW_CORE_REGISTRY_FILTER=ddeutil.workflow.utils
|
273
306
|
export WORKFLOW_CORE_PATH_CONF=conf
|
307
|
+
export WORKFLOW_CORE_TIMEZONE=Asia/Bangkok
|
308
|
+
export WORKFLOW_CORE_DEFAULT_STAGE_ID=true
|
309
|
+
|
310
|
+
export WORKFLOW_CORE_MAX_PIPELINE_POKING=4
|
311
|
+
export WORKFLOW_CORE_MAX_JOB_PARALLEL=2
|
274
312
|
```
|
275
313
|
|
276
314
|
Application config:
|
@@ -283,12 +321,21 @@ export WORKFLOW_APP_INTERVAL=10
|
|
283
321
|
## Deployment
|
284
322
|
|
285
323
|
This package able to run as a application service for receive manual trigger
|
286
|
-
from the master node via RestAPI
|
324
|
+
from the master node via RestAPI or use to be Scheduler background service
|
325
|
+
like crontab job but via Python API.
|
326
|
+
|
327
|
+
### Schedule Service
|
287
328
|
|
288
|
-
|
289
|
-
|
290
|
-
|
329
|
+
```shell
|
330
|
+
(venv) $ python src.ddeutil.workflow.app
|
331
|
+
```
|
332
|
+
|
333
|
+
### API Server
|
291
334
|
|
292
335
|
```shell
|
293
|
-
(venv) $ workflow
|
336
|
+
(venv) $ uvicorn src.ddeutil.workflow.api:app --host 0.0.0.0 --port 80 --reload
|
294
337
|
```
|
338
|
+
|
339
|
+
> [!NOTE]
|
340
|
+
> If this package already deploy, it able to use
|
341
|
+
> `uvicorn ddeutil.workflow.api:app --host 0.0.0.0 --port 80`
|
@@ -9,7 +9,6 @@
|
|
9
9
|
|
10
10
|
- [Installation](#installation)
|
11
11
|
- [Getting Started](#getting-started)
|
12
|
-
- [Core Features](#core-features)
|
13
12
|
- [On](#on)
|
14
13
|
- [Pipeline](#pipeline)
|
15
14
|
- [Usage](#usage)
|
@@ -20,12 +19,14 @@
|
|
20
19
|
- [Deployment](#deployment)
|
21
20
|
|
22
21
|
This **Workflow** objects was created for easy to make a simple metadata
|
23
|
-
driven pipeline that able to **ETL, T, EL, or
|
22
|
+
driven for data pipeline orchestration that able to use for **ETL, T, EL, or
|
23
|
+
ELT** by a `.yaml` file template.
|
24
24
|
|
25
|
-
I think
|
26
|
-
write
|
27
|
-
|
28
|
-
|
25
|
+
In my opinion, I think it should not create duplicate pipeline codes if I can
|
26
|
+
write with dynamic input parameters on the one template pipeline that just change
|
27
|
+
the input parameters per use-case instead.
|
28
|
+
This way I can handle a lot of logical pipelines in our orgs with only metadata
|
29
|
+
configuration. It called **Metadata Driven Data Pipeline**.
|
29
30
|
|
30
31
|
Next, we should get some monitoring tools for manage logging that return from
|
31
32
|
pipeline running. Because it not show us what is a use-case that running data
|
@@ -49,6 +50,10 @@ this package with application add-ons, you should add `app` in installation;
|
|
49
50
|
pip install ddeutil-workflow[app]
|
50
51
|
```
|
51
52
|
|
53
|
+
```shell
|
54
|
+
pip install ddeutil-workflow[api]
|
55
|
+
```
|
56
|
+
|
52
57
|
## Getting Started
|
53
58
|
|
54
59
|
The first step, you should start create the connections and datasets for In and
|
@@ -210,6 +215,18 @@ pipe_el_pg_to_lake:
|
|
210
215
|
endpoint: "/${{ params.name }}"
|
211
216
|
```
|
212
217
|
|
218
|
+
Implement hook:
|
219
|
+
|
220
|
+
```python
|
221
|
+
from ddeutil.workflow.utils import tag
|
222
|
+
|
223
|
+
@tag('polars', alias='postgres-to-delta')
|
224
|
+
def postgres_to_delta(source, sink):
|
225
|
+
return {
|
226
|
+
"source": source, "sink": sink
|
227
|
+
}
|
228
|
+
```
|
229
|
+
|
213
230
|
### Hook (Transform)
|
214
231
|
|
215
232
|
```yaml
|
@@ -235,12 +252,30 @@ pipeline_hook_mssql_proc:
|
|
235
252
|
target: ${{ params.target_name }}
|
236
253
|
```
|
237
254
|
|
255
|
+
Implement hook:
|
256
|
+
|
257
|
+
```python
|
258
|
+
from ddeutil.workflow.utils import tag
|
259
|
+
|
260
|
+
@tag('odbc', alias='mssql-proc')
|
261
|
+
def odbc_mssql_procedure(_exec: str, params: dict):
|
262
|
+
return {
|
263
|
+
"exec": _exec, "params": params
|
264
|
+
}
|
265
|
+
```
|
266
|
+
|
238
267
|
## Configuration
|
239
268
|
|
240
269
|
```bash
|
241
270
|
export WORKFLOW_ROOT_PATH=.
|
242
271
|
export WORKFLOW_CORE_REGISTRY=ddeutil.workflow,tests.utils
|
272
|
+
export WORKFLOW_CORE_REGISTRY_FILTER=ddeutil.workflow.utils
|
243
273
|
export WORKFLOW_CORE_PATH_CONF=conf
|
274
|
+
export WORKFLOW_CORE_TIMEZONE=Asia/Bangkok
|
275
|
+
export WORKFLOW_CORE_DEFAULT_STAGE_ID=true
|
276
|
+
|
277
|
+
export WORKFLOW_CORE_MAX_PIPELINE_POKING=4
|
278
|
+
export WORKFLOW_CORE_MAX_JOB_PARALLEL=2
|
244
279
|
```
|
245
280
|
|
246
281
|
Application config:
|
@@ -253,12 +288,21 @@ export WORKFLOW_APP_INTERVAL=10
|
|
253
288
|
## Deployment
|
254
289
|
|
255
290
|
This package able to run as a application service for receive manual trigger
|
256
|
-
from the master node via RestAPI
|
291
|
+
from the master node via RestAPI or use to be Scheduler background service
|
292
|
+
like crontab job but via Python API.
|
293
|
+
|
294
|
+
### Schedule Service
|
257
295
|
|
258
|
-
|
259
|
-
|
260
|
-
|
296
|
+
```shell
|
297
|
+
(venv) $ python src.ddeutil.workflow.app
|
298
|
+
```
|
299
|
+
|
300
|
+
### API Server
|
261
301
|
|
262
302
|
```shell
|
263
|
-
(venv) $ workflow
|
303
|
+
(venv) $ uvicorn src.ddeutil.workflow.api:app --host 0.0.0.0 --port 80 --reload
|
264
304
|
```
|
305
|
+
|
306
|
+
> [!NOTE]
|
307
|
+
> If this package already deploy, it able to use
|
308
|
+
> `uvicorn ddeutil.workflow.api:app --host 0.0.0.0 --port 80`
|
@@ -33,8 +33,12 @@ dynamic = ["version"]
|
|
33
33
|
|
34
34
|
[project.optional-dependencies]
|
35
35
|
app = [
|
36
|
-
"
|
37
|
-
|
36
|
+
"schedule==1.2.2,<2.0.0",
|
37
|
+
]
|
38
|
+
api = [
|
39
|
+
"fastapi[standard]==0.112.0",
|
40
|
+
"apscheduler[sqlalchemy]==3.10.4,<4.0.0",
|
41
|
+
"croniter==3.0.3",
|
38
42
|
]
|
39
43
|
|
40
44
|
[project.urls]
|
@@ -55,8 +59,12 @@ changelog = "CHANGELOG.md"
|
|
55
59
|
branch = true
|
56
60
|
relative_files = true
|
57
61
|
concurrency = ["thread", "multiprocessing"]
|
58
|
-
source = ["ddeutil", "tests"]
|
59
|
-
omit = [
|
62
|
+
source = ["ddeutil.workflow", "tests"]
|
63
|
+
omit = [
|
64
|
+
"scripts/",
|
65
|
+
"tests/utils.py",
|
66
|
+
"tests/tasks/dummy.py",
|
67
|
+
]
|
60
68
|
|
61
69
|
[tool.coverage.report]
|
62
70
|
exclude_lines = ["raise NotImplementedError"]
|
@@ -71,8 +79,8 @@ addopts = [
|
|
71
79
|
filterwarnings = ["error"]
|
72
80
|
log_cli = true
|
73
81
|
log_cli_level = "DEBUG"
|
74
|
-
log_cli_format = "%(asctime)s [%(levelname)7s] %(message)
|
75
|
-
log_cli_date_format = "%Y
|
82
|
+
log_cli_format = "%(asctime)s [%(levelname)-7s] %(message)-75s (%(filename)s:%(lineno)s)"
|
83
|
+
log_cli_date_format = "%Y%m%d %H:%M:%S"
|
76
84
|
|
77
85
|
[tool.black]
|
78
86
|
line-length = 80
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.7"
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from .exceptions import (
|
7
|
+
JobException,
|
8
|
+
ParamValueException,
|
9
|
+
PipelineException,
|
10
|
+
StageException,
|
11
|
+
UtilException,
|
12
|
+
)
|
13
|
+
from .on import AwsOn, On
|
14
|
+
from .pipeline import Job, Pipeline
|
15
|
+
from .stage import (
|
16
|
+
BashStage,
|
17
|
+
EmptyStage,
|
18
|
+
HookStage,
|
19
|
+
PyStage,
|
20
|
+
Stage,
|
21
|
+
TriggerStage,
|
22
|
+
)
|
23
|
+
from .utils import (
|
24
|
+
ChoiceParam,
|
25
|
+
DatetimeParam,
|
26
|
+
IntParam,
|
27
|
+
Param,
|
28
|
+
StrParam,
|
29
|
+
dash2underscore,
|
30
|
+
param2template,
|
31
|
+
)
|
@@ -27,12 +27,21 @@ class Re:
|
|
27
27
|
"""Regular expression config."""
|
28
28
|
|
29
29
|
# NOTE: Search caller
|
30
|
+
# \${{\s*(?P<caller>[a-zA-Z0-9_.\s'\"\[\]\(\)\-\{}]+?)\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
|
30
31
|
__re_caller: str = r"""
|
31
32
|
\$
|
32
33
|
{{
|
33
|
-
\s*
|
34
|
+
\s*
|
35
|
+
(?P<caller>
|
34
36
|
[a-zA-Z0-9_.\s'\"\[\]\(\)\-\{}]+?
|
35
37
|
)\s*
|
38
|
+
(?P<post_filters>
|
39
|
+
(?:
|
40
|
+
\|\s*
|
41
|
+
(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]*)
|
42
|
+
\s*
|
43
|
+
)*
|
44
|
+
)
|
36
45
|
}}
|
37
46
|
"""
|
38
47
|
RE_CALLER: Pattern = re.compile(
|
@@ -40,6 +49,7 @@ class Re:
|
|
40
49
|
)
|
41
50
|
|
42
51
|
# NOTE: Search task
|
52
|
+
# ^(?P<path>[^/@]+)/(?P<func>[^@]+)@(?P<tag>.+)$
|
43
53
|
__re_task_fmt: str = r"""
|
44
54
|
^
|
45
55
|
(?P<path>[^/@]+)
|
@@ -0,0 +1,120 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
import asyncio
|
9
|
+
import queue
|
10
|
+
import time
|
11
|
+
import uuid
|
12
|
+
from contextlib import asynccontextmanager
|
13
|
+
from datetime import datetime
|
14
|
+
|
15
|
+
from apscheduler.executors.pool import ProcessPoolExecutor
|
16
|
+
from apscheduler.jobstores.memory import MemoryJobStore
|
17
|
+
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
18
|
+
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
19
|
+
from fastapi import BackgroundTasks, FastAPI
|
20
|
+
from fastapi.middleware.gzip import GZipMiddleware
|
21
|
+
from fastapi.responses import UJSONResponse
|
22
|
+
from pydantic import BaseModel
|
23
|
+
|
24
|
+
from .log import get_logger
|
25
|
+
from .repeat import repeat_every
|
26
|
+
from .route import schedule_route, workflow_route
|
27
|
+
|
28
|
+
logger = get_logger(__name__)
|
29
|
+
|
30
|
+
|
31
|
+
def broker_upper_messages():
|
32
|
+
for _ in range(app.queue_limit):
|
33
|
+
try:
|
34
|
+
obj = app.queue.get_nowait()
|
35
|
+
app.output_dict[obj["request_id"]] = obj["text"].upper()
|
36
|
+
logger.info(f"Upper message: {app.output_dict}")
|
37
|
+
except queue.Empty:
|
38
|
+
pass
|
39
|
+
|
40
|
+
|
41
|
+
jobstores = {
|
42
|
+
"default": MemoryJobStore(),
|
43
|
+
"sqlite": SQLAlchemyJobStore(url="sqlite:///jobs-store.sqlite"),
|
44
|
+
}
|
45
|
+
executors = {
|
46
|
+
"default": {"type": "threadpool", "max_workers": 5},
|
47
|
+
"processpool": ProcessPoolExecutor(max_workers=5),
|
48
|
+
}
|
49
|
+
scheduler = AsyncIOScheduler(
|
50
|
+
jobstores=jobstores,
|
51
|
+
executors=executors,
|
52
|
+
timezone="Asia/Bangkok",
|
53
|
+
)
|
54
|
+
|
55
|
+
|
56
|
+
@asynccontextmanager
|
57
|
+
async def lifespan(_: FastAPI):
|
58
|
+
scheduler.start()
|
59
|
+
yield
|
60
|
+
scheduler.shutdown(wait=False)
|
61
|
+
|
62
|
+
|
63
|
+
app = FastAPI(lifespan=lifespan)
|
64
|
+
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
65
|
+
app.include_router(schedule_route)
|
66
|
+
app.include_router(workflow_route)
|
67
|
+
|
68
|
+
app.scheduler = scheduler
|
69
|
+
app.scheduler.add_job(
|
70
|
+
broker_upper_messages,
|
71
|
+
"interval",
|
72
|
+
seconds=10,
|
73
|
+
)
|
74
|
+
app.queue = queue.Queue()
|
75
|
+
app.output_dict = {}
|
76
|
+
app.queue_limit = 2
|
77
|
+
|
78
|
+
|
79
|
+
def write_pipeline(task_id: str, message=""):
|
80
|
+
logger.info(f"{task_id} : {message}")
|
81
|
+
time.sleep(5)
|
82
|
+
logger.info(f"{task_id} : run task successfully!!!")
|
83
|
+
|
84
|
+
|
85
|
+
@app.post("/schedule/{name}", response_class=UJSONResponse)
|
86
|
+
async def send_schedule(name: str, background_tasks: BackgroundTasks):
|
87
|
+
background_tasks.add_task(
|
88
|
+
write_pipeline,
|
89
|
+
name,
|
90
|
+
message=f"some message for {name}",
|
91
|
+
)
|
92
|
+
await fetch_current_time()
|
93
|
+
return {"message": f"Schedule sent {name!r} in the background"}
|
94
|
+
|
95
|
+
|
96
|
+
@repeat_every(seconds=2, max_repetitions=3)
|
97
|
+
async def fetch_current_time():
|
98
|
+
logger.info(f"Fetch: {datetime.now()}")
|
99
|
+
|
100
|
+
|
101
|
+
class Payload(BaseModel):
|
102
|
+
text: str
|
103
|
+
|
104
|
+
|
105
|
+
async def get_result(request_id):
|
106
|
+
while 1:
|
107
|
+
if request_id in app.output_dict:
|
108
|
+
result = app.output_dict[request_id]
|
109
|
+
del app.output_dict[request_id]
|
110
|
+
return {"message": result}
|
111
|
+
await asyncio.sleep(0.001)
|
112
|
+
|
113
|
+
|
114
|
+
@app.post("/upper", response_class=UJSONResponse)
|
115
|
+
async def message_upper(payload: Payload):
|
116
|
+
request_id: str = str(uuid.uuid4())
|
117
|
+
app.queue.put(
|
118
|
+
{"text": payload.text, "request_id": request_id},
|
119
|
+
)
|
120
|
+
return await get_result(request_id)
|
@@ -0,0 +1,41 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
import functools
|
7
|
+
import time
|
8
|
+
|
9
|
+
import schedule
|
10
|
+
|
11
|
+
|
12
|
+
def catch_exceptions(cancel_on_failure=False):
|
13
|
+
def catch_exceptions_decorator(job_func):
|
14
|
+
@functools.wraps(job_func)
|
15
|
+
def wrapper(*args, **kwargs):
|
16
|
+
try:
|
17
|
+
return job_func(*args, **kwargs)
|
18
|
+
except Exception as err:
|
19
|
+
print(err)
|
20
|
+
|
21
|
+
if cancel_on_failure:
|
22
|
+
return schedule.CancelJob
|
23
|
+
|
24
|
+
return wrapper
|
25
|
+
|
26
|
+
return catch_exceptions_decorator
|
27
|
+
|
28
|
+
|
29
|
+
@catch_exceptions(cancel_on_failure=True)
|
30
|
+
def bad_task():
|
31
|
+
return 1 / 0
|
32
|
+
|
33
|
+
|
34
|
+
schedule.every(5).seconds.do(bad_task)
|
35
|
+
|
36
|
+
if __name__ == "__main__":
|
37
|
+
while True:
|
38
|
+
schedule.run_pending()
|
39
|
+
time.sleep(1)
|
40
|
+
if not schedule.get_jobs():
|
41
|
+
break
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
import logging
|
9
|
+
from functools import lru_cache
|
10
|
+
|
11
|
+
from rich.console import Console
|
12
|
+
from rich.logging import RichHandler
|
13
|
+
|
14
|
+
console = Console(color_system="256", width=200, style="blue")
|
15
|
+
|
16
|
+
|
17
|
+
@lru_cache
|
18
|
+
def get_logger(module_name):
|
19
|
+
logger = logging.getLogger(module_name)
|
20
|
+
handler = RichHandler(
|
21
|
+
rich_tracebacks=True, console=console, tracebacks_show_locals=True
|
22
|
+
)
|
23
|
+
handler.setFormatter(
|
24
|
+
logging.Formatter(
|
25
|
+
"[ %(threadName)s:%(funcName)s:%(process)d ] - %(message)s"
|
26
|
+
)
|
27
|
+
)
|
28
|
+
logger.addHandler(handler)
|
29
|
+
logger.setLevel(logging.DEBUG)
|
30
|
+
return logger
|