ddeutil-workflow 0.0.36__tar.gz → 0.0.38__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/PKG-INFO +12 -6
  2. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/README.md +6 -4
  3. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/pyproject.toml +8 -1
  4. ddeutil_workflow-0.0.38/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/__init__.py +4 -1
  6. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/api/api.py +3 -1
  7. ddeutil_workflow-0.0.38/src/ddeutil/workflow/api/log.py +59 -0
  8. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/api/repeat.py +1 -1
  9. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/api/routes/job.py +4 -2
  10. ddeutil_workflow-0.0.38/src/ddeutil/workflow/api/routes/logs.py +173 -0
  11. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/api/routes/schedules.py +6 -6
  12. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/api/routes/workflows.py +9 -7
  13. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/caller.py +9 -3
  14. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/conf.py +0 -60
  15. ddeutil_workflow-0.0.38/src/ddeutil/workflow/context.py +59 -0
  16. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/exceptions.py +14 -1
  17. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/job.py +310 -277
  18. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/logs.py +6 -1
  19. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/result.py +1 -1
  20. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/scheduler.py +11 -4
  21. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/stages.py +368 -111
  22. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/utils.py +27 -49
  23. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/workflow.py +137 -72
  24. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil_workflow.egg-info/PKG-INFO +12 -6
  25. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil_workflow.egg-info/SOURCES.txt +3 -0
  26. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil_workflow.egg-info/requires.txt +4 -0
  27. ddeutil_workflow-0.0.38/tests/test_call_tag.py +153 -0
  28. ddeutil_workflow-0.0.38/tests/test_context.py +136 -0
  29. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_job.py +12 -9
  30. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_job_exec.py +36 -28
  31. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_job_exec_strategy.py +9 -11
  32. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_job_strategy.py +6 -0
  33. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_result.py +4 -1
  34. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_stage_handler_exec.py +165 -4
  35. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_utils.py +0 -9
  36. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_workflow.py +1 -2
  37. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_workflow_exec.py +38 -27
  38. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_workflow_exec_job.py +0 -1
  39. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_workflow_exec_poke.py +0 -1
  40. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_workflow_exec_release.py +0 -3
  41. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_workflow_task.py +0 -1
  42. ddeutil_workflow-0.0.36/src/ddeutil/workflow/__about__.py +0 -1
  43. ddeutil_workflow-0.0.36/src/ddeutil/workflow/api/routes/logs.py +0 -64
  44. ddeutil_workflow-0.0.36/tests/test_call_tag.py +0 -72
  45. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/LICENSE +0 -0
  46. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/setup.cfg +0 -0
  47. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/__cron.py +0 -0
  48. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/__types.py +0 -0
  49. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/api/__init__.py +0 -0
  50. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  51. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/audit.py +0 -0
  52. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/cron.py +0 -0
  53. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/params.py +0 -0
  54. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil/workflow/templates.py +0 -0
  55. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  56. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  57. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test__cron.py +0 -0
  58. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test__regex.py +0 -0
  59. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_audit.py +0 -0
  60. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_conf.py +0 -0
  61. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_cron_on.py +0 -0
  62. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_logs.py +0 -0
  63. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_params.py +0 -0
  64. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_release.py +0 -0
  65. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_release_queue.py +0 -0
  66. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_schedule.py +0 -0
  67. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_schedule_pending.py +0 -0
  68. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_schedule_tasks.py +0 -0
  69. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_schedule_workflow.py +0 -0
  70. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_scheduler_control.py +0 -0
  71. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_stage.py +0 -0
  72. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_templates.py +0 -0
  73. {ddeutil_workflow-0.0.36 → ddeutil_workflow-0.0.38}/tests/test_templates_filter.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.36
3
+ Version: 0.0.38
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -31,6 +31,10 @@ Provides-Extra: api
31
31
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
32
32
  Requires-Dist: httpx; extra == "api"
33
33
  Requires-Dist: ujson; extra == "api"
34
+ Provides-Extra: async
35
+ Requires-Dist: aiofiles; extra == "async"
36
+ Requires-Dist: aiohttp; extra == "async"
37
+ Dynamic: license-file
34
38
 
35
39
  # Workflow Orchestration
36
40
 
@@ -61,10 +65,10 @@ configuration. It called **Metadata Driven Data Workflow**.
61
65
 
62
66
  **:pushpin: <u>Rules of This Workflow engine</u>**:
63
67
 
64
- 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
65
- 2. Can not re-run only failed stage and its pending downstream :rotating_light:
66
- 3. All parallel tasks inside workflow engine use Multi-Threading
67
- (🐍 Python 3.13 unlock GIL :unlock:)
68
+ 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
69
+ 2. **Can not** re-run only failed stage and its pending downstream ↩️
70
+ 3. All parallel tasks inside workflow engine use **Multi-Threading**
71
+ (Python 3.13 unlock GIL 🐍🔓)
68
72
 
69
73
  ---
70
74
 
@@ -165,6 +169,8 @@ run-py-local:
165
169
  run-date: datetime
166
170
  jobs:
167
171
  getting-api-data:
172
+ runs-on:
173
+ type: local
168
174
  stages:
169
175
  - name: "Retrieve API Data"
170
176
  id: retrieve-api
@@ -27,10 +27,10 @@ configuration. It called **Metadata Driven Data Workflow**.
27
27
 
28
28
  **:pushpin: <u>Rules of This Workflow engine</u>**:
29
29
 
30
- 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
31
- 2. Can not re-run only failed stage and its pending downstream :rotating_light:
32
- 3. All parallel tasks inside workflow engine use Multi-Threading
33
- (🐍 Python 3.13 unlock GIL :unlock:)
30
+ 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
31
+ 2. **Can not** re-run only failed stage and its pending downstream ↩️
32
+ 3. All parallel tasks inside workflow engine use **Multi-Threading**
33
+ (Python 3.13 unlock GIL 🐍🔓)
34
34
 
35
35
  ---
36
36
 
@@ -131,6 +131,8 @@ run-py-local:
131
131
  run-date: datetime
132
132
  jobs:
133
133
  getting-api-data:
134
+ runs-on:
135
+ type: local
134
136
  stages:
135
137
  - name: "Retrieve API Data"
136
138
  id: retrieve-api
@@ -40,6 +40,10 @@ api = [
40
40
  "httpx",
41
41
  "ujson",
42
42
  ]
43
+ async = [
44
+ "aiofiles",
45
+ "aiohttp",
46
+ ]
43
47
 
44
48
  [project.urls]
45
49
  Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
@@ -63,12 +67,14 @@ commit_prefix_force_fix = true
63
67
  branch = true
64
68
  relative_files = true
65
69
  concurrency = ["thread", "multiprocessing"]
66
- source = ["ddeutil.workflow"]
70
+ source = ["src.ddeutil.workflow"]
67
71
  omit = [
68
72
  "src/ddeutil/workflow/__about__.py",
69
73
  "src/ddeutil/workflow/__cron.py",
74
+ "src/ddeutil/workflow/context.py",
70
75
  "src/ddeutil/workflow/api/__init__.py",
71
76
  "src/ddeutil/workflow/api/api.py",
77
+ "src/ddeutil/workflow/api/log.py",
72
78
  "src/ddeutil/workflow/api/repeat.py",
73
79
  "src/ddeutil/workflow/api/routes/__init__.py",
74
80
  "src/ddeutil/workflow/api/routes/job.py",
@@ -100,6 +106,7 @@ addopts = [
100
106
  filterwarnings = [
101
107
  "error",
102
108
  "ignore::DeprecationWarning",
109
+ "ignore::pytest.PytestUnraisableExceptionWarning",
103
110
  ]
104
111
  log_cli = true
105
112
  log_cli_level = "DEBUG"
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.38"
@@ -39,6 +39,8 @@ from .job import (
39
39
  Job,
40
40
  RunsOn,
41
41
  Strategy,
42
+ local_execute,
43
+ local_execute_strategy,
42
44
  )
43
45
  from .logs import (
44
46
  TraceData,
@@ -69,6 +71,8 @@ from .stages import (
69
71
  BashStage,
70
72
  CallStage,
71
73
  EmptyStage,
74
+ ForEachStage,
75
+ ParallelStage,
72
76
  PyStage,
73
77
  Stage,
74
78
  TriggerStage,
@@ -89,7 +93,6 @@ from .templates import (
89
93
  from .utils import (
90
94
  batch,
91
95
  cross_product,
92
- dash2underscore,
93
96
  delay,
94
97
  filter_func,
95
98
  gen_id,
@@ -27,7 +27,7 @@ from .repeat import repeat_at
27
27
  from .routes import job, log
28
28
 
29
29
  load_dotenv()
30
- logger = get_logger("ddeutil.workflow")
30
+ logger = get_logger("uvicorn.error")
31
31
 
32
32
 
33
33
  class State(TypedDict):
@@ -151,6 +151,7 @@ if config.enable_route_schedule:
151
151
  async def validation_exception_handler(
152
152
  request: Request, exc: RequestValidationError
153
153
  ):
154
+ _ = request
154
155
  return UJSONResponse(
155
156
  status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
156
157
  content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
@@ -164,4 +165,5 @@ if __name__ == "__main__":
164
165
  app,
165
166
  host="0.0.0.0",
166
167
  port=80,
168
+ log_level="DEBUG",
167
169
  )
@@ -0,0 +1,59 @@
1
+ from ..conf import config
2
+
3
+ LOGGING_CONFIG = { # pragma: no cov
4
+ "version": 1,
5
+ "disable_existing_loggers": False,
6
+ "formatters": {
7
+ "standard": {
8
+ "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
9
+ },
10
+ "custom_formatter": {
11
+ "format": config.log_format,
12
+ "datefmt": config.log_datetime_format,
13
+ },
14
+ },
15
+ "root": {
16
+ "level": "DEBUG" if config.debug else "INFO",
17
+ },
18
+ "handlers": {
19
+ "default": {
20
+ "formatter": "standard",
21
+ "class": "logging.StreamHandler",
22
+ "stream": "ext://sys.stderr",
23
+ },
24
+ "stream_handler": {
25
+ "formatter": "custom_formatter",
26
+ "class": "logging.StreamHandler",
27
+ "stream": "ext://sys.stdout",
28
+ },
29
+ "file_handler": {
30
+ "formatter": "custom_formatter",
31
+ "class": "logging.handlers.RotatingFileHandler",
32
+ "filename": "logs/app.log",
33
+ "maxBytes": 1024 * 1024 * 1,
34
+ "backupCount": 3,
35
+ },
36
+ },
37
+ "loggers": {
38
+ "uvicorn": {
39
+ "handlers": ["default", "file_handler"],
40
+ "level": "DEBUG" if config.debug else "INFO",
41
+ "propagate": False,
42
+ },
43
+ "uvicorn.access": {
44
+ "handlers": ["stream_handler", "file_handler"],
45
+ "level": "DEBUG" if config.debug else "INFO",
46
+ "propagate": False,
47
+ },
48
+ "uvicorn.error": {
49
+ "handlers": ["stream_handler", "file_handler"],
50
+ "level": "DEBUG" if config.debug else "INFO",
51
+ "propagate": False,
52
+ },
53
+ # "uvicorn.asgi": {
54
+ # "handlers": ["stream_handler", "file_handler"],
55
+ # "level": "TRACE",
56
+ # "propagate": False,
57
+ # },
58
+ },
59
+ }
@@ -15,7 +15,7 @@ from starlette.concurrency import run_in_threadpool
15
15
  from ..__cron import CronJob
16
16
  from ..conf import config, get_logger
17
17
 
18
- logger = get_logger("ddeutil.workflow")
18
+ logger = get_logger("uvicorn.error")
19
19
 
20
20
 
21
21
  def get_cronjob_delta(cron: str) -> float:
@@ -17,7 +17,7 @@ from ...exceptions import JobException
17
17
  from ...job import Job
18
18
  from ...result import Result
19
19
 
20
- logger = get_logger("ddeutil.workflow")
20
+ logger = get_logger("uvicorn.error")
21
21
 
22
22
 
23
23
  job_route = APIRouter(
@@ -45,6 +45,7 @@ async def job_execute(
45
45
  run_id=result.run_id,
46
46
  parent_run_id=result.parent_run_id,
47
47
  )
48
+ context: DictData = {}
48
49
  try:
49
50
  job.set_outputs(
50
51
  job.execute(
@@ -52,7 +53,7 @@ async def job_execute(
52
53
  run_id=rs.run_id,
53
54
  parent_run_id=rs.parent_run_id,
54
55
  ).context,
55
- to=params,
56
+ to=context,
56
57
  )
57
58
  except JobException as err:
58
59
  rs.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
@@ -70,4 +71,5 @@ async def job_execute(
70
71
  exclude_defaults=True,
71
72
  ),
72
73
  "params": params,
74
+ "context": context,
73
75
  }
@@ -0,0 +1,173 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ """This route include audit and trace log paths."""
7
+ from __future__ import annotations
8
+
9
+ from fastapi import APIRouter, Path, Query
10
+ from fastapi import status as st
11
+ from fastapi.responses import UJSONResponse
12
+
13
+ from ...audit import get_audit
14
+ from ...logs import get_trace_obj
15
+
16
+ log_route = APIRouter(
17
+ prefix="/logs",
18
+ tags=["logs"],
19
+ default_response_class=UJSONResponse,
20
+ )
21
+
22
+
23
+ @log_route.get(
24
+ path="/traces/",
25
+ response_class=UJSONResponse,
26
+ status_code=st.HTTP_200_OK,
27
+ summary="Read all trace logs.",
28
+ tags=["trace"],
29
+ )
30
+ async def get_traces(
31
+ offset: int = Query(default=0, gt=0),
32
+ limit: int = Query(default=100, gt=0),
33
+ ):
34
+ """Return all trace logs from the current trace log path that config with
35
+ `WORKFLOW_LOG_PATH` environment variable name.
36
+ """
37
+ return {
38
+ "message": (
39
+ f"Getting trace logs with offset: {offset} and limit: {limit}"
40
+ ),
41
+ "traces": [
42
+ trace.model_dump(
43
+ by_alias=True,
44
+ exclude_none=True,
45
+ exclude_unset=True,
46
+ exclude_defaults=True,
47
+ )
48
+ for trace in get_trace_obj().find_logs()
49
+ ],
50
+ }
51
+
52
+
53
+ @log_route.get(
54
+ path="/traces/{run_id}",
55
+ response_class=UJSONResponse,
56
+ status_code=st.HTTP_200_OK,
57
+ summary="Read trace log with specific running ID.",
58
+ tags=["trace"],
59
+ )
60
+ async def get_trace_with_id(run_id: str):
61
+ """Return trace log with specific running ID from the current trace log path
62
+ that config with `WORKFLOW_LOG_PATH` environment variable name.
63
+
64
+ - **run_id**: A running ID that want to search a trace log from the log
65
+ path.
66
+ """
67
+ return {
68
+ "message": f"Getting trace log with specific running ID: {run_id}",
69
+ "trace": (
70
+ get_trace_obj()
71
+ .find_log_with_id(run_id)
72
+ .model_dump(
73
+ by_alias=True,
74
+ exclude_none=True,
75
+ exclude_unset=True,
76
+ exclude_defaults=True,
77
+ )
78
+ ),
79
+ }
80
+
81
+
82
+ @log_route.get(
83
+ path="/audits/",
84
+ response_class=UJSONResponse,
85
+ status_code=st.HTTP_200_OK,
86
+ summary="Read all audit logs.",
87
+ tags=["audit"],
88
+ )
89
+ async def get_audits():
90
+ """Return all audit logs from the current audit log path that config with
91
+ `WORKFLOW_AUDIT_PATH` environment variable name.
92
+ """
93
+ return {
94
+ "message": "Getting audit logs",
95
+ "audits": list(get_audit().find_audits(name="demo")),
96
+ }
97
+
98
+
99
+ @log_route.get(
100
+ path="/audits/{workflow}/",
101
+ response_class=UJSONResponse,
102
+ status_code=st.HTTP_200_OK,
103
+ summary="Read all audit logs with specific workflow name.",
104
+ tags=["audit"],
105
+ )
106
+ async def get_audit_with_workflow(workflow: str):
107
+ """Return all audit logs with specific workflow name from the current audit
108
+ log path that config with `WORKFLOW_AUDIT_PATH` environment variable name.
109
+
110
+ - **workflow**: A specific workflow name that want to find audit logs.
111
+ """
112
+ return {
113
+ "message": f"Getting audit logs with workflow name {workflow}",
114
+ "audits": list(get_audit().find_audits(name="demo")),
115
+ }
116
+
117
+
118
+ @log_route.get(
119
+ path="/audits/{workflow}/{release}",
120
+ response_class=UJSONResponse,
121
+ status_code=st.HTTP_200_OK,
122
+ summary="Read all audit logs with specific workflow name and release date.",
123
+ tags=["audit"],
124
+ )
125
+ async def get_audit_with_workflow_release(
126
+ workflow: str = Path(...),
127
+ release: str = Path(...),
128
+ ):
129
+ """Return all audit logs with specific workflow name and release date from
130
+ the current audit log path that config with `WORKFLOW_AUDIT_PATH`
131
+ environment variable name.
132
+
133
+ - **workflow**: A specific workflow name that want to find audit logs.
134
+ - **release**: A release date with a string format `%Y%m%d%H%M%S`.
135
+ """
136
+ return {
137
+ "message": (
138
+ f"Getting audit logs with workflow name {workflow} and release "
139
+ f"{release}"
140
+ ),
141
+ "audits": list(get_audit().find_audits(name="demo")),
142
+ }
143
+
144
+
145
+ @log_route.get(
146
+ path="/audits/{workflow}/{release}/{run_id}",
147
+ response_class=UJSONResponse,
148
+ status_code=st.HTTP_200_OK,
149
+ summary=(
150
+ "Read all audit logs with specific workflow name, release date "
151
+ "and running ID."
152
+ ),
153
+ tags=["audit"],
154
+ )
155
+ async def get_audit_with_workflow_release_run_id(
156
+ workflow: str, release: str, run_id: str
157
+ ):
158
+ """Return all audit logs with specific workflow name and release date from
159
+ the current audit log path that config with `WORKFLOW_AUDIT_PATH`
160
+ environment variable name.
161
+
162
+ - **workflow**: A specific workflow name that want to find audit logs.
163
+ - **release**: A release date with a string format `%Y%m%d%H%M%S`.
164
+ - **run_id**: A running ID that want to search audit log from this release
165
+ date.
166
+ """
167
+ return {
168
+ "message": (
169
+ f"Getting audit logs with workflow name {workflow}, release "
170
+ f"{release}, and running ID {run_id}"
171
+ ),
172
+ "audits": list(get_audit().find_audits(name="demo")),
173
+ }
@@ -15,7 +15,7 @@ from fastapi.responses import UJSONResponse
15
15
  from ...conf import config, get_logger
16
16
  from ...scheduler import Schedule
17
17
 
18
- logger = get_logger("ddeutil.workflow")
18
+ logger = get_logger("uvicorn.error")
19
19
 
20
20
  schedule_route = APIRouter(
21
21
  prefix="/schedules",
@@ -24,7 +24,7 @@ schedule_route = APIRouter(
24
24
  )
25
25
 
26
26
 
27
- @schedule_route.get(path="/{name}")
27
+ @schedule_route.get(path="/{name}", status_code=st.HTTP_200_OK)
28
28
  async def get_schedules(name: str):
29
29
  """Get schedule object."""
30
30
  try:
@@ -42,13 +42,13 @@ async def get_schedules(name: str):
42
42
  )
43
43
 
44
44
 
45
- @schedule_route.get(path="/deploy/")
45
+ @schedule_route.get(path="/deploy/", status_code=st.HTTP_200_OK)
46
46
  async def get_deploy_schedulers(request: Request):
47
47
  snapshot = copy.deepcopy(request.state.scheduler)
48
48
  return {"schedule": snapshot}
49
49
 
50
50
 
51
- @schedule_route.get(path="/deploy/{name}")
51
+ @schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
52
52
  async def get_deploy_scheduler(request: Request, name: str):
53
53
  if name in request.state.scheduler:
54
54
  schedule = Schedule.from_loader(name)
@@ -76,7 +76,7 @@ async def get_deploy_scheduler(request: Request, name: str):
76
76
  )
77
77
 
78
78
 
79
- @schedule_route.post(path="/deploy/{name}")
79
+ @schedule_route.post(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
80
80
  async def add_deploy_scheduler(request: Request, name: str):
81
81
  """Adding schedule name to application state store."""
82
82
  if name in request.state.scheduler:
@@ -116,7 +116,7 @@ async def add_deploy_scheduler(request: Request, name: str):
116
116
  }
117
117
 
118
118
 
119
- @schedule_route.delete(path="/deploy/{name}")
119
+ @schedule_route.delete(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
120
120
  async def del_deploy_scheduler(request: Request, name: str):
121
121
  """Delete workflow task on the schedule listener."""
122
122
  if name in request.state.scheduler:
@@ -20,7 +20,7 @@ from ...conf import Loader, get_logger
20
20
  from ...result import Result
21
21
  from ...workflow import Workflow
22
22
 
23
- logger = get_logger("ddeutil.workflow")
23
+ logger = get_logger("uvicorn.error")
24
24
 
25
25
  workflow_route = APIRouter(
26
26
  prefix="/workflows",
@@ -29,7 +29,7 @@ workflow_route = APIRouter(
29
29
  )
30
30
 
31
31
 
32
- @workflow_route.get(path="/")
32
+ @workflow_route.get(path="/", status_code=st.HTTP_200_OK)
33
33
  async def get_workflows() -> DictData:
34
34
  """Return all workflow workflows that exists in config path."""
35
35
  workflows: DictData = dict(Loader.finds(Workflow))
@@ -40,7 +40,7 @@ async def get_workflows() -> DictData:
40
40
  }
41
41
 
42
42
 
43
- @workflow_route.get(path="/{name}")
43
+ @workflow_route.get(path="/{name}", status_code=st.HTTP_200_OK)
44
44
  async def get_workflow_by_name(name: str) -> DictData:
45
45
  """Return model of workflow that passing an input workflow name."""
46
46
  try:
@@ -66,7 +66,7 @@ class ExecutePayload(BaseModel):
66
66
 
67
67
 
68
68
  @workflow_route.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
69
- async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
69
+ async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
70
70
  """Return model of workflow that passing an input workflow name."""
71
71
  try:
72
72
  workflow: Workflow = Workflow.from_loader(name=name, externals={})
@@ -90,7 +90,7 @@ async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
90
90
  return asdict(result)
91
91
 
92
92
 
93
- @workflow_route.get(path="/{name}/audits")
93
+ @workflow_route.get(path="/{name}/audits", status_code=st.HTTP_200_OK)
94
94
  async def get_workflow_audits(name: str):
95
95
  try:
96
96
  return {
@@ -112,11 +112,13 @@ async def get_workflow_audits(name: str):
112
112
  ) from None
113
113
 
114
114
 
115
- @workflow_route.get(path="/{name}/audits/{release}")
115
+ @workflow_route.get(path="/{name}/audits/{release}", status_code=st.HTTP_200_OK)
116
116
  async def get_workflow_release_audit(name: str, release: str):
117
+ """Get Workflow audit log with an input release value."""
117
118
  try:
118
119
  audit: Audit = get_audit().find_audit_with_release(
119
- name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
120
+ name=name,
121
+ release=datetime.strptime(release, "%Y%m%d%H%M%S"),
120
122
  )
121
123
  except FileNotFoundError:
122
124
  raise HTTPException(
@@ -26,6 +26,7 @@ T = TypeVar("T")
26
26
  P = ParamSpec("P")
27
27
 
28
28
  logger = logging.getLogger("ddeutil.workflow")
29
+ logging.getLogger("asyncio").setLevel(logging.INFO)
29
30
 
30
31
 
31
32
  class TagFunc(Protocol):
@@ -60,10 +61,13 @@ def tag(
60
61
 
61
62
  @wraps(func)
62
63
  def wrapped(*args: P.args, **kwargs: P.kwargs) -> TagFunc:
63
- # NOTE: Able to do anything before calling the call function.
64
64
  return func(*args, **kwargs)
65
65
 
66
- return wrapped
66
+ @wraps(func)
67
+ async def awrapped(*args: P.args, **kwargs: P.kwargs) -> TagFunc:
68
+ return await func(*args, **kwargs)
69
+
70
+ return awrapped if inspect.iscoroutinefunction(func) else wrapped
67
71
 
68
72
  return func_internal
69
73
 
@@ -91,7 +95,9 @@ def make_registry(submodule: str) -> dict[str, Registry]:
91
95
  for fstr, func in inspect.getmembers(importer, inspect.isfunction):
92
96
  # NOTE: check function attribute that already set tag by
93
97
  # ``utils.tag`` decorator.
94
- if not (hasattr(func, "tag") and hasattr(func, "name")):
98
+ if not (
99
+ hasattr(func, "tag") and hasattr(func, "name")
100
+ ): # pragma: no cov
95
101
  continue
96
102
 
97
103
  # NOTE: Define type of the func value.
@@ -31,7 +31,6 @@ def glob_files(path: Path) -> Iterator[Path]: # pragma: no cov
31
31
 
32
32
 
33
33
  __all__: TupleStr = (
34
- "LOGGING_CONFIG",
35
34
  "env",
36
35
  "get_logger",
37
36
  "Config",
@@ -422,62 +421,3 @@ def get_logger(name: str):
422
421
 
423
422
  logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
424
423
  return logger
425
-
426
-
427
- LOGGING_CONFIG = { # pragma: no cov
428
- "version": 1,
429
- "disable_existing_loggers": False,
430
- "formatters": {
431
- "standard": {
432
- "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
433
- },
434
- "custom_formatter": {
435
- "format": config.log_format,
436
- "datefmt": config.log_datetime_format,
437
- },
438
- },
439
- "root": {
440
- "level": "DEBUG" if config.debug else "INFO",
441
- },
442
- "handlers": {
443
- "default": {
444
- "formatter": "standard",
445
- "class": "logging.StreamHandler",
446
- "stream": "ext://sys.stderr",
447
- },
448
- "stream_handler": {
449
- "formatter": "custom_formatter",
450
- "class": "logging.StreamHandler",
451
- "stream": "ext://sys.stdout",
452
- },
453
- "file_handler": {
454
- "formatter": "custom_formatter",
455
- "class": "logging.handlers.RotatingFileHandler",
456
- "filename": "logs/app.log",
457
- "maxBytes": 1024 * 1024 * 1,
458
- "backupCount": 3,
459
- },
460
- },
461
- "loggers": {
462
- "uvicorn": {
463
- "handlers": ["default", "file_handler"],
464
- "level": "DEBUG" if config.debug else "INFO",
465
- "propagate": False,
466
- },
467
- "uvicorn.access": {
468
- "handlers": ["stream_handler", "file_handler"],
469
- "level": "DEBUG" if config.debug else "INFO",
470
- "propagate": False,
471
- },
472
- "uvicorn.error": {
473
- "handlers": ["stream_handler", "file_handler"],
474
- "level": "DEBUG" if config.debug else "INFO",
475
- "propagate": False,
476
- },
477
- # "uvicorn.asgi": {
478
- # "handlers": ["stream_handler", "file_handler"],
479
- # "level": "TRACE",
480
- # "propagate": False,
481
- # },
482
- },
483
- }