ddeutil-workflow 0.0.36__tar.gz → 0.0.37__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/PKG-INFO +5 -5
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/README.md +4 -4
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/pyproject.toml +3 -1
- ddeutil_workflow-0.0.37/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/api.py +3 -1
- ddeutil_workflow-0.0.37/src/ddeutil/workflow/api/log.py +59 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/repeat.py +1 -1
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/job.py +1 -1
- ddeutil_workflow-0.0.37/src/ddeutil/workflow/api/routes/logs.py +165 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/schedules.py +6 -6
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/workflows.py +9 -7
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/caller.py +3 -1
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/conf.py +0 -60
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/job.py +287 -233
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/scheduler.py +11 -4
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/stages.py +4 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/utils.py +33 -12
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/PKG-INFO +5 -5
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/SOURCES.txt +1 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_call_tag.py +52 -2
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_job.py +9 -3
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_job_exec.py +12 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_job_exec_strategy.py +7 -7
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/logs.py +0 -64
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/LICENSE +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/__init__.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/audit.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/cron.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/logs.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/params.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/result.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/templates.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/workflow.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_audit.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_cron_on.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_logs.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_release.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_release_queue.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_schedule.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_schedule_pending.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_schedule_tasks.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_schedule_workflow.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_scheduler_control.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_stage_handler_exec.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_templates.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_templates_filter.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_utils.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_workflow_exec_job.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_workflow_exec_poke.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_workflow_exec_release.py +0 -0
- {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.37
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -61,10 +61,10 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
61
61
|
|
62
62
|
**:pushpin: <u>Rules of This Workflow engine</u>**:
|
63
63
|
|
64
|
-
1. The Minimum frequency unit of scheduling is **1
|
65
|
-
2. Can not re-run only failed stage and its pending downstream
|
66
|
-
3. All parallel tasks inside workflow engine use Multi-Threading
|
67
|
-
(
|
64
|
+
1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
|
65
|
+
2. **Can not** re-run only failed stage and its pending downstream ↩️
|
66
|
+
3. All parallel tasks inside workflow engine use **Multi-Threading**
|
67
|
+
(Python 3.13 unlock GIL 🐍🔓)
|
68
68
|
|
69
69
|
---
|
70
70
|
|
@@ -27,10 +27,10 @@ configuration. It called **Metadata Driven Data Workflow**.
|
|
27
27
|
|
28
28
|
**:pushpin: <u>Rules of This Workflow engine</u>**:
|
29
29
|
|
30
|
-
1. The Minimum frequency unit of scheduling is **1
|
31
|
-
2. Can not re-run only failed stage and its pending downstream
|
32
|
-
3. All parallel tasks inside workflow engine use Multi-Threading
|
33
|
-
(
|
30
|
+
1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
|
31
|
+
2. **Can not** re-run only failed stage and its pending downstream ↩️
|
32
|
+
3. All parallel tasks inside workflow engine use **Multi-Threading**
|
33
|
+
(Python 3.13 unlock GIL 🐍🔓)
|
34
34
|
|
35
35
|
---
|
36
36
|
|
@@ -63,12 +63,13 @@ commit_prefix_force_fix = true
|
|
63
63
|
branch = true
|
64
64
|
relative_files = true
|
65
65
|
concurrency = ["thread", "multiprocessing"]
|
66
|
-
source = ["ddeutil.workflow"]
|
66
|
+
source = ["src.ddeutil.workflow"]
|
67
67
|
omit = [
|
68
68
|
"src/ddeutil/workflow/__about__.py",
|
69
69
|
"src/ddeutil/workflow/__cron.py",
|
70
70
|
"src/ddeutil/workflow/api/__init__.py",
|
71
71
|
"src/ddeutil/workflow/api/api.py",
|
72
|
+
"src/ddeutil/workflow/api/log.py",
|
72
73
|
"src/ddeutil/workflow/api/repeat.py",
|
73
74
|
"src/ddeutil/workflow/api/routes/__init__.py",
|
74
75
|
"src/ddeutil/workflow/api/routes/job.py",
|
@@ -100,6 +101,7 @@ addopts = [
|
|
100
101
|
filterwarnings = [
|
101
102
|
"error",
|
102
103
|
"ignore::DeprecationWarning",
|
104
|
+
"ignore::pytest.PytestUnraisableExceptionWarning",
|
103
105
|
]
|
104
106
|
log_cli = true
|
105
107
|
log_cli_level = "DEBUG"
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.37"
|
@@ -27,7 +27,7 @@ from .repeat import repeat_at
|
|
27
27
|
from .routes import job, log
|
28
28
|
|
29
29
|
load_dotenv()
|
30
|
-
logger = get_logger("
|
30
|
+
logger = get_logger("uvicorn.error")
|
31
31
|
|
32
32
|
|
33
33
|
class State(TypedDict):
|
@@ -151,6 +151,7 @@ if config.enable_route_schedule:
|
|
151
151
|
async def validation_exception_handler(
|
152
152
|
request: Request, exc: RequestValidationError
|
153
153
|
):
|
154
|
+
_ = request
|
154
155
|
return UJSONResponse(
|
155
156
|
status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
|
156
157
|
content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
|
@@ -164,4 +165,5 @@ if __name__ == "__main__":
|
|
164
165
|
app,
|
165
166
|
host="0.0.0.0",
|
166
167
|
port=80,
|
168
|
+
log_level="DEBUG",
|
167
169
|
)
|
@@ -0,0 +1,59 @@
|
|
1
|
+
from ..conf import config
|
2
|
+
|
3
|
+
LOGGING_CONFIG = { # pragma: no cov
|
4
|
+
"version": 1,
|
5
|
+
"disable_existing_loggers": False,
|
6
|
+
"formatters": {
|
7
|
+
"standard": {
|
8
|
+
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
9
|
+
},
|
10
|
+
"custom_formatter": {
|
11
|
+
"format": config.log_format,
|
12
|
+
"datefmt": config.log_datetime_format,
|
13
|
+
},
|
14
|
+
},
|
15
|
+
"root": {
|
16
|
+
"level": "DEBUG" if config.debug else "INFO",
|
17
|
+
},
|
18
|
+
"handlers": {
|
19
|
+
"default": {
|
20
|
+
"formatter": "standard",
|
21
|
+
"class": "logging.StreamHandler",
|
22
|
+
"stream": "ext://sys.stderr",
|
23
|
+
},
|
24
|
+
"stream_handler": {
|
25
|
+
"formatter": "custom_formatter",
|
26
|
+
"class": "logging.StreamHandler",
|
27
|
+
"stream": "ext://sys.stdout",
|
28
|
+
},
|
29
|
+
"file_handler": {
|
30
|
+
"formatter": "custom_formatter",
|
31
|
+
"class": "logging.handlers.RotatingFileHandler",
|
32
|
+
"filename": "logs/app.log",
|
33
|
+
"maxBytes": 1024 * 1024 * 1,
|
34
|
+
"backupCount": 3,
|
35
|
+
},
|
36
|
+
},
|
37
|
+
"loggers": {
|
38
|
+
"uvicorn": {
|
39
|
+
"handlers": ["default", "file_handler"],
|
40
|
+
"level": "DEBUG" if config.debug else "INFO",
|
41
|
+
"propagate": False,
|
42
|
+
},
|
43
|
+
"uvicorn.access": {
|
44
|
+
"handlers": ["stream_handler", "file_handler"],
|
45
|
+
"level": "DEBUG" if config.debug else "INFO",
|
46
|
+
"propagate": False,
|
47
|
+
},
|
48
|
+
"uvicorn.error": {
|
49
|
+
"handlers": ["stream_handler", "file_handler"],
|
50
|
+
"level": "DEBUG" if config.debug else "INFO",
|
51
|
+
"propagate": False,
|
52
|
+
},
|
53
|
+
# "uvicorn.asgi": {
|
54
|
+
# "handlers": ["stream_handler", "file_handler"],
|
55
|
+
# "level": "TRACE",
|
56
|
+
# "propagate": False,
|
57
|
+
# },
|
58
|
+
},
|
59
|
+
}
|
@@ -15,7 +15,7 @@ from starlette.concurrency import run_in_threadpool
|
|
15
15
|
from ..__cron import CronJob
|
16
16
|
from ..conf import config, get_logger
|
17
17
|
|
18
|
-
logger = get_logger("
|
18
|
+
logger = get_logger("uvicorn.error")
|
19
19
|
|
20
20
|
|
21
21
|
def get_cronjob_delta(cron: str) -> float:
|
@@ -0,0 +1,165 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
"""This route include audit and trace log paths."""
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
from fastapi import APIRouter
|
10
|
+
from fastapi import status as st
|
11
|
+
from fastapi.responses import UJSONResponse
|
12
|
+
|
13
|
+
from ...audit import get_audit
|
14
|
+
from ...logs import get_trace_obj
|
15
|
+
|
16
|
+
log_route = APIRouter(
|
17
|
+
prefix="/logs",
|
18
|
+
tags=["logs"],
|
19
|
+
default_response_class=UJSONResponse,
|
20
|
+
)
|
21
|
+
|
22
|
+
|
23
|
+
@log_route.get(
|
24
|
+
path="/traces/",
|
25
|
+
response_class=UJSONResponse,
|
26
|
+
status_code=st.HTTP_200_OK,
|
27
|
+
summary="Read all trace logs.",
|
28
|
+
tags=["trace"],
|
29
|
+
)
|
30
|
+
async def get_traces():
|
31
|
+
"""Return all trace logs from the current trace log path that config with
|
32
|
+
`WORKFLOW_LOG_PATH` environment variable name.
|
33
|
+
"""
|
34
|
+
return {
|
35
|
+
"message": "Getting trace logs",
|
36
|
+
"traces": [
|
37
|
+
trace.model_dump(
|
38
|
+
by_alias=True,
|
39
|
+
exclude_none=True,
|
40
|
+
exclude_unset=True,
|
41
|
+
exclude_defaults=True,
|
42
|
+
)
|
43
|
+
for trace in get_trace_obj().find_logs()
|
44
|
+
],
|
45
|
+
}
|
46
|
+
|
47
|
+
|
48
|
+
@log_route.get(
|
49
|
+
path="/traces/{run_id}",
|
50
|
+
response_class=UJSONResponse,
|
51
|
+
status_code=st.HTTP_200_OK,
|
52
|
+
summary="Read trace log with specific running ID.",
|
53
|
+
tags=["trace"],
|
54
|
+
)
|
55
|
+
async def get_trace_with_id(run_id: str):
|
56
|
+
"""Return trace log with specific running ID from the current trace log path
|
57
|
+
that config with `WORKFLOW_LOG_PATH` environment variable name.
|
58
|
+
|
59
|
+
- **run_id**: A running ID that want to search a trace log from the log
|
60
|
+
path.
|
61
|
+
"""
|
62
|
+
return {
|
63
|
+
"message": f"Getting trace log with specific running ID: {run_id}",
|
64
|
+
"trace": (
|
65
|
+
get_trace_obj()
|
66
|
+
.find_log_with_id(run_id)
|
67
|
+
.model_dump(
|
68
|
+
by_alias=True,
|
69
|
+
exclude_none=True,
|
70
|
+
exclude_unset=True,
|
71
|
+
exclude_defaults=True,
|
72
|
+
)
|
73
|
+
),
|
74
|
+
}
|
75
|
+
|
76
|
+
|
77
|
+
@log_route.get(
|
78
|
+
path="/audits/",
|
79
|
+
response_class=UJSONResponse,
|
80
|
+
status_code=st.HTTP_200_OK,
|
81
|
+
summary="Read all audit logs.",
|
82
|
+
tags=["audit"],
|
83
|
+
)
|
84
|
+
async def get_audits():
|
85
|
+
"""Return all audit logs from the current audit log path that config with
|
86
|
+
`WORKFLOW_AUDIT_PATH` environment variable name.
|
87
|
+
"""
|
88
|
+
return {
|
89
|
+
"message": "Getting audit logs",
|
90
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
91
|
+
}
|
92
|
+
|
93
|
+
|
94
|
+
@log_route.get(
|
95
|
+
path="/audits/{workflow}/",
|
96
|
+
response_class=UJSONResponse,
|
97
|
+
status_code=st.HTTP_200_OK,
|
98
|
+
summary="Read all audit logs with specific workflow name.",
|
99
|
+
tags=["audit"],
|
100
|
+
)
|
101
|
+
async def get_audit_with_workflow(workflow: str):
|
102
|
+
"""Return all audit logs with specific workflow name from the current audit
|
103
|
+
log path that config with `WORKFLOW_AUDIT_PATH` environment variable name.
|
104
|
+
|
105
|
+
- **workflow**: A specific workflow name that want to find audit logs.
|
106
|
+
"""
|
107
|
+
return {
|
108
|
+
"message": f"Getting audit logs with workflow name {workflow}",
|
109
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
110
|
+
}
|
111
|
+
|
112
|
+
|
113
|
+
@log_route.get(
|
114
|
+
path="/audits/{workflow}/{release}",
|
115
|
+
response_class=UJSONResponse,
|
116
|
+
status_code=st.HTTP_200_OK,
|
117
|
+
summary="Read all audit logs with specific workflow name and release date.",
|
118
|
+
tags=["audit"],
|
119
|
+
)
|
120
|
+
async def get_audit_with_workflow_release(workflow: str, release: str):
|
121
|
+
"""Return all audit logs with specific workflow name and release date from
|
122
|
+
the current audit log path that config with `WORKFLOW_AUDIT_PATH`
|
123
|
+
environment variable name.
|
124
|
+
|
125
|
+
- **workflow**: A specific workflow name that want to find audit logs.
|
126
|
+
- **release**: A release date with a string format `%Y%m%d%H%M%S`.
|
127
|
+
"""
|
128
|
+
return {
|
129
|
+
"message": (
|
130
|
+
f"Getting audit logs with workflow name {workflow} and release "
|
131
|
+
f"{release}"
|
132
|
+
),
|
133
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
134
|
+
}
|
135
|
+
|
136
|
+
|
137
|
+
@log_route.get(
|
138
|
+
path="/audits/{workflow}/{release}/{run_id}",
|
139
|
+
response_class=UJSONResponse,
|
140
|
+
status_code=st.HTTP_200_OK,
|
141
|
+
summary=(
|
142
|
+
"Read all audit logs with specific workflow name, release date "
|
143
|
+
"and running ID."
|
144
|
+
),
|
145
|
+
tags=["audit"],
|
146
|
+
)
|
147
|
+
async def get_audit_with_workflow_release_run_id(
|
148
|
+
workflow: str, release: str, run_id: str
|
149
|
+
):
|
150
|
+
"""Return all audit logs with specific workflow name and release date from
|
151
|
+
the current audit log path that config with `WORKFLOW_AUDIT_PATH`
|
152
|
+
environment variable name.
|
153
|
+
|
154
|
+
- **workflow**: A specific workflow name that want to find audit logs.
|
155
|
+
- **release**: A release date with a string format `%Y%m%d%H%M%S`.
|
156
|
+
- **run_id**: A running ID that want to search audit log from this release
|
157
|
+
date.
|
158
|
+
"""
|
159
|
+
return {
|
160
|
+
"message": (
|
161
|
+
f"Getting audit logs with workflow name {workflow}, release "
|
162
|
+
f"{release}, and running ID {run_id}"
|
163
|
+
),
|
164
|
+
"audits": list(get_audit().find_audits(name="demo")),
|
165
|
+
}
|
{ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/schedules.py
RENAMED
@@ -15,7 +15,7 @@ from fastapi.responses import UJSONResponse
|
|
15
15
|
from ...conf import config, get_logger
|
16
16
|
from ...scheduler import Schedule
|
17
17
|
|
18
|
-
logger = get_logger("
|
18
|
+
logger = get_logger("uvicorn.error")
|
19
19
|
|
20
20
|
schedule_route = APIRouter(
|
21
21
|
prefix="/schedules",
|
@@ -24,7 +24,7 @@ schedule_route = APIRouter(
|
|
24
24
|
)
|
25
25
|
|
26
26
|
|
27
|
-
@schedule_route.get(path="/{name}")
|
27
|
+
@schedule_route.get(path="/{name}", status_code=st.HTTP_200_OK)
|
28
28
|
async def get_schedules(name: str):
|
29
29
|
"""Get schedule object."""
|
30
30
|
try:
|
@@ -42,13 +42,13 @@ async def get_schedules(name: str):
|
|
42
42
|
)
|
43
43
|
|
44
44
|
|
45
|
-
@schedule_route.get(path="/deploy/")
|
45
|
+
@schedule_route.get(path="/deploy/", status_code=st.HTTP_200_OK)
|
46
46
|
async def get_deploy_schedulers(request: Request):
|
47
47
|
snapshot = copy.deepcopy(request.state.scheduler)
|
48
48
|
return {"schedule": snapshot}
|
49
49
|
|
50
50
|
|
51
|
-
@schedule_route.get(path="/deploy/{name}")
|
51
|
+
@schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
|
52
52
|
async def get_deploy_scheduler(request: Request, name: str):
|
53
53
|
if name in request.state.scheduler:
|
54
54
|
schedule = Schedule.from_loader(name)
|
@@ -76,7 +76,7 @@ async def get_deploy_scheduler(request: Request, name: str):
|
|
76
76
|
)
|
77
77
|
|
78
78
|
|
79
|
-
@schedule_route.post(path="/deploy/{name}")
|
79
|
+
@schedule_route.post(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
|
80
80
|
async def add_deploy_scheduler(request: Request, name: str):
|
81
81
|
"""Adding schedule name to application state store."""
|
82
82
|
if name in request.state.scheduler:
|
@@ -116,7 +116,7 @@ async def add_deploy_scheduler(request: Request, name: str):
|
|
116
116
|
}
|
117
117
|
|
118
118
|
|
119
|
-
@schedule_route.delete(path="/deploy/{name}")
|
119
|
+
@schedule_route.delete(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
|
120
120
|
async def del_deploy_scheduler(request: Request, name: str):
|
121
121
|
"""Delete workflow task on the schedule listener."""
|
122
122
|
if name in request.state.scheduler:
|
{ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/workflows.py
RENAMED
@@ -20,7 +20,7 @@ from ...conf import Loader, get_logger
|
|
20
20
|
from ...result import Result
|
21
21
|
from ...workflow import Workflow
|
22
22
|
|
23
|
-
logger = get_logger("
|
23
|
+
logger = get_logger("uvicorn.error")
|
24
24
|
|
25
25
|
workflow_route = APIRouter(
|
26
26
|
prefix="/workflows",
|
@@ -29,7 +29,7 @@ workflow_route = APIRouter(
|
|
29
29
|
)
|
30
30
|
|
31
31
|
|
32
|
-
@workflow_route.get(path="/")
|
32
|
+
@workflow_route.get(path="/", status_code=st.HTTP_200_OK)
|
33
33
|
async def get_workflows() -> DictData:
|
34
34
|
"""Return all workflow workflows that exists in config path."""
|
35
35
|
workflows: DictData = dict(Loader.finds(Workflow))
|
@@ -40,7 +40,7 @@ async def get_workflows() -> DictData:
|
|
40
40
|
}
|
41
41
|
|
42
42
|
|
43
|
-
@workflow_route.get(path="/{name}")
|
43
|
+
@workflow_route.get(path="/{name}", status_code=st.HTTP_200_OK)
|
44
44
|
async def get_workflow_by_name(name: str) -> DictData:
|
45
45
|
"""Return model of workflow that passing an input workflow name."""
|
46
46
|
try:
|
@@ -66,7 +66,7 @@ class ExecutePayload(BaseModel):
|
|
66
66
|
|
67
67
|
|
68
68
|
@workflow_route.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
|
69
|
-
async def
|
69
|
+
async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
|
70
70
|
"""Return model of workflow that passing an input workflow name."""
|
71
71
|
try:
|
72
72
|
workflow: Workflow = Workflow.from_loader(name=name, externals={})
|
@@ -90,7 +90,7 @@ async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
|
|
90
90
|
return asdict(result)
|
91
91
|
|
92
92
|
|
93
|
-
@workflow_route.get(path="/{name}/audits")
|
93
|
+
@workflow_route.get(path="/{name}/audits", status_code=st.HTTP_200_OK)
|
94
94
|
async def get_workflow_audits(name: str):
|
95
95
|
try:
|
96
96
|
return {
|
@@ -112,11 +112,13 @@ async def get_workflow_audits(name: str):
|
|
112
112
|
) from None
|
113
113
|
|
114
114
|
|
115
|
-
@workflow_route.get(path="/{name}/audits/{release}")
|
115
|
+
@workflow_route.get(path="/{name}/audits/{release}", status_code=st.HTTP_200_OK)
|
116
116
|
async def get_workflow_release_audit(name: str, release: str):
|
117
|
+
"""Get Workflow audit log with an input release value."""
|
117
118
|
try:
|
118
119
|
audit: Audit = get_audit().find_audit_with_release(
|
119
|
-
name=name,
|
120
|
+
name=name,
|
121
|
+
release=datetime.strptime(release, "%Y%m%d%H%M%S"),
|
120
122
|
)
|
121
123
|
except FileNotFoundError:
|
122
124
|
raise HTTPException(
|
@@ -91,7 +91,9 @@ def make_registry(submodule: str) -> dict[str, Registry]:
|
|
91
91
|
for fstr, func in inspect.getmembers(importer, inspect.isfunction):
|
92
92
|
# NOTE: check function attribute that already set tag by
|
93
93
|
# ``utils.tag`` decorator.
|
94
|
-
if not (
|
94
|
+
if not (
|
95
|
+
hasattr(func, "tag") and hasattr(func, "name")
|
96
|
+
): # pragma: no cov
|
95
97
|
continue
|
96
98
|
|
97
99
|
# NOTE: Define type of the func value.
|
@@ -31,7 +31,6 @@ def glob_files(path: Path) -> Iterator[Path]: # pragma: no cov
|
|
31
31
|
|
32
32
|
|
33
33
|
__all__: TupleStr = (
|
34
|
-
"LOGGING_CONFIG",
|
35
34
|
"env",
|
36
35
|
"get_logger",
|
37
36
|
"Config",
|
@@ -422,62 +421,3 @@ def get_logger(name: str):
|
|
422
421
|
|
423
422
|
logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
|
424
423
|
return logger
|
425
|
-
|
426
|
-
|
427
|
-
LOGGING_CONFIG = { # pragma: no cov
|
428
|
-
"version": 1,
|
429
|
-
"disable_existing_loggers": False,
|
430
|
-
"formatters": {
|
431
|
-
"standard": {
|
432
|
-
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
433
|
-
},
|
434
|
-
"custom_formatter": {
|
435
|
-
"format": config.log_format,
|
436
|
-
"datefmt": config.log_datetime_format,
|
437
|
-
},
|
438
|
-
},
|
439
|
-
"root": {
|
440
|
-
"level": "DEBUG" if config.debug else "INFO",
|
441
|
-
},
|
442
|
-
"handlers": {
|
443
|
-
"default": {
|
444
|
-
"formatter": "standard",
|
445
|
-
"class": "logging.StreamHandler",
|
446
|
-
"stream": "ext://sys.stderr",
|
447
|
-
},
|
448
|
-
"stream_handler": {
|
449
|
-
"formatter": "custom_formatter",
|
450
|
-
"class": "logging.StreamHandler",
|
451
|
-
"stream": "ext://sys.stdout",
|
452
|
-
},
|
453
|
-
"file_handler": {
|
454
|
-
"formatter": "custom_formatter",
|
455
|
-
"class": "logging.handlers.RotatingFileHandler",
|
456
|
-
"filename": "logs/app.log",
|
457
|
-
"maxBytes": 1024 * 1024 * 1,
|
458
|
-
"backupCount": 3,
|
459
|
-
},
|
460
|
-
},
|
461
|
-
"loggers": {
|
462
|
-
"uvicorn": {
|
463
|
-
"handlers": ["default", "file_handler"],
|
464
|
-
"level": "DEBUG" if config.debug else "INFO",
|
465
|
-
"propagate": False,
|
466
|
-
},
|
467
|
-
"uvicorn.access": {
|
468
|
-
"handlers": ["stream_handler", "file_handler"],
|
469
|
-
"level": "DEBUG" if config.debug else "INFO",
|
470
|
-
"propagate": False,
|
471
|
-
},
|
472
|
-
"uvicorn.error": {
|
473
|
-
"handlers": ["stream_handler", "file_handler"],
|
474
|
-
"level": "DEBUG" if config.debug else "INFO",
|
475
|
-
"propagate": False,
|
476
|
-
},
|
477
|
-
# "uvicorn.asgi": {
|
478
|
-
# "handlers": ["stream_handler", "file_handler"],
|
479
|
-
# "level": "TRACE",
|
480
|
-
# "propagate": False,
|
481
|
-
# },
|
482
|
-
},
|
483
|
-
}
|