ddeutil-workflow 0.0.34__tar.gz → 0.0.35__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {ddeutil_workflow-0.0.34/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.35}/PKG-INFO +22 -19
  2. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/README.md +21 -18
  3. ddeutil_workflow-0.0.35/src/ddeutil/workflow/__about__.py +1 -0
  4. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/__init__.py +6 -3
  5. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/api/api.py +13 -8
  6. ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/__init__.py +8 -0
  7. ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/logs.py +36 -0
  8. ddeutil_workflow-0.0.34/src/ddeutil/workflow/api/route.py → ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/schedules.py +2 -131
  9. ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/workflows.py +137 -0
  10. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/audit.py +3 -3
  11. ddeutil_workflow-0.0.34/src/ddeutil/workflow/call.py → ddeutil_workflow-0.0.35/src/ddeutil/workflow/caller.py +4 -4
  12. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/job.py +70 -21
  13. ddeutil_workflow-0.0.34/src/ddeutil/workflow/result.py → ddeutil_workflow-0.0.35/src/ddeutil/workflow/logs.py +68 -110
  14. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/params.py +40 -12
  15. ddeutil_workflow-0.0.35/src/ddeutil/workflow/result.py +134 -0
  16. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/scheduler.py +39 -32
  17. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/stages.py +7 -10
  18. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/workflow.py +6 -6
  19. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35/src/ddeutil_workflow.egg-info}/PKG-INFO +22 -19
  20. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil_workflow.egg-info/SOURCES.txt +7 -2
  21. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_call_tag.py +2 -2
  22. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_job.py +30 -2
  23. ddeutil_workflow-0.0.35/tests/test_logs.py +6 -0
  24. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_params.py +21 -0
  25. ddeutil_workflow-0.0.34/src/ddeutil/workflow/__about__.py +0 -1
  26. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/LICENSE +0 -0
  27. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/pyproject.toml +0 -0
  28. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/setup.cfg +0 -0
  29. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/__cron.py +0 -0
  30. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/__types.py +0 -0
  31. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/api/__init__.py +0 -0
  32. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/api/repeat.py +0 -0
  33. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/conf.py +0 -0
  34. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/cron.py +0 -0
  35. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/exceptions.py +0 -0
  36. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/templates.py +0 -0
  37. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil/workflow/utils.py +0 -0
  38. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  39. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  40. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  41. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test__cron.py +0 -0
  42. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test__regex.py +0 -0
  43. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_audit.py +0 -0
  44. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_conf.py +0 -0
  45. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_cron_on.py +0 -0
  46. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_job_exec.py +0 -0
  47. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_job_exec_strategy.py +0 -0
  48. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_job_strategy.py +0 -0
  49. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_release.py +0 -0
  50. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_release_queue.py +0 -0
  51. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_result.py +0 -0
  52. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_schedule.py +0 -0
  53. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_schedule_pending.py +0 -0
  54. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_schedule_tasks.py +0 -0
  55. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_schedule_workflow.py +0 -0
  56. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_scheduler_control.py +0 -0
  57. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_stage.py +0 -0
  58. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_stage_handler_exec.py +0 -0
  59. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_templates.py +0 -0
  60. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_templates_filter.py +0 -0
  61. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_utils.py +0 -0
  62. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_workflow.py +0 -0
  63. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_workflow_exec.py +0 -0
  64. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_workflow_exec_job.py +0 -0
  65. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_workflow_exec_poke.py +0 -0
  66. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_workflow_exec_release.py +0 -0
  67. {ddeutil_workflow-0.0.34 → ddeutil_workflow-0.0.35}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.34
3
+ Version: 0.0.35
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -62,7 +62,7 @@ configuration. It called **Metadata Driven Data Workflow**.
62
62
  1. The Minimum frequency unit of scheduling is **1 minute** :warning:
63
63
  2. Can not re-run only failed stage and its pending downstream :rotating_light:
64
64
  3. All parallel tasks inside workflow engine use Multi-Threading
65
- (Python 3.13 unlock GIL :unlock:)
65
+ (🐍 Python 3.13 unlock GIL :unlock:)
66
66
 
67
67
  ---
68
68
 
@@ -86,37 +86,40 @@ flowchart LR
86
86
  B@{ shape: rounded, label: "Workflow<br>Application" }
87
87
  end
88
88
 
89
- A <--->|action &<br>response| B
90
- B -....-> |response| G
91
- G -....-> |request| B
89
+ A <-->|action &<br>response| B
90
+ B -...-> |response| G
91
+ G -...-> |request| B
92
92
 
93
93
  subgraph Data Context
94
- D@{ shape: processes, label: "Logs" }
95
- E@{ shape: lin-cyl, label: "Audit<br>Logs" }
94
+ D@{ shape: processes, label: "Logs" }
95
+ E@{ shape: lin-cyl, label: "Audit<br>Logs" }
96
96
  end
97
97
 
98
98
  subgraph Git Context
99
- F@{ shape: tag-rect, label: "YAML<br>files" }
99
+ F@{ shape: tag-rect, label: "YAML<br>files" }
100
100
  end
101
101
 
102
- B --->|disable| F
103
- F --->|read| B
102
+ A ---> |push| H(Repo)
103
+ H -.-> |pull| F
104
104
 
105
- B --->|write| E
106
- E --->|read| B
107
- B --->|write| D
105
+ B <-->|disable &<br>read| F
106
+
107
+ B <-->|read &<br>write| E
108
+
109
+ B -->|write| D
108
110
 
109
111
  D -.->|read| G
110
112
  E -.->|read| G
111
113
  ```
112
114
 
113
115
  > [!WARNING]
114
- > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
115
- > with `.yml` files and all configs file from several data orchestration framework
116
- > tools from my experience on Data Engineer. :grimacing:
117
- >
118
- > Other workflow tools that I interest on them and pick some interested feature
119
- > implement to this package:
116
+ > _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
117
+ > and all configs pattern from several data orchestration framework tools from
118
+ > my data engineering experience. :grimacing:
119
+
120
+ > [!NOTE]
121
+ > Other workflow orchestration tools that I interest and pick them to be inspiration
122
+ > some for this package:
120
123
  >
121
124
  > - [Google **Workflows**](https://cloud.google.com/workflows)
122
125
  > - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
@@ -30,7 +30,7 @@ configuration. It called **Metadata Driven Data Workflow**.
30
30
  1. The Minimum frequency unit of scheduling is **1 minute** :warning:
31
31
  2. Can not re-run only failed stage and its pending downstream :rotating_light:
32
32
  3. All parallel tasks inside workflow engine use Multi-Threading
33
- (Python 3.13 unlock GIL :unlock:)
33
+ (🐍 Python 3.13 unlock GIL :unlock:)
34
34
 
35
35
  ---
36
36
 
@@ -54,37 +54,40 @@ flowchart LR
54
54
  B@{ shape: rounded, label: "Workflow<br>Application" }
55
55
  end
56
56
 
57
- A <--->|action &<br>response| B
58
- B -....-> |response| G
59
- G -....-> |request| B
57
+ A <-->|action &<br>response| B
58
+ B -...-> |response| G
59
+ G -...-> |request| B
60
60
 
61
61
  subgraph Data Context
62
- D@{ shape: processes, label: "Logs" }
63
- E@{ shape: lin-cyl, label: "Audit<br>Logs" }
62
+ D@{ shape: processes, label: "Logs" }
63
+ E@{ shape: lin-cyl, label: "Audit<br>Logs" }
64
64
  end
65
65
 
66
66
  subgraph Git Context
67
- F@{ shape: tag-rect, label: "YAML<br>files" }
67
+ F@{ shape: tag-rect, label: "YAML<br>files" }
68
68
  end
69
69
 
70
- B --->|disable| F
71
- F --->|read| B
70
+ A ---> |push| H(Repo)
71
+ H -.-> |pull| F
72
72
 
73
- B --->|write| E
74
- E --->|read| B
75
- B --->|write| D
73
+ B <-->|disable &<br>read| F
74
+
75
+ B <-->|read &<br>write| E
76
+
77
+ B -->|write| D
76
78
 
77
79
  D -.->|read| G
78
80
  E -.->|read| G
79
81
  ```
80
82
 
81
83
  > [!WARNING]
82
- > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
83
- > with `.yml` files and all configs file from several data orchestration framework
84
- > tools from my experience on Data Engineer. :grimacing:
85
- >
86
- > Other workflow tools that I interest on them and pick some interested feature
87
- > implement to this package:
84
+ > _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
85
+ > and all configs pattern from several data orchestration framework tools from
86
+ > my data engineering experience. :grimacing:
87
+
88
+ > [!NOTE]
89
+ > Other workflow orchestration tools that I interest and pick them to be inspiration
90
+ > some for this package:
88
91
  >
89
92
  > - [Google **Workflows**](https://cloud.google.com/workflows)
90
93
  > - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.35"
@@ -9,7 +9,7 @@ from .audit import (
9
9
  Audit,
10
10
  get_audit,
11
11
  )
12
- from .call import (
12
+ from .caller import (
13
13
  ReturnTagFunc,
14
14
  TagFunc,
15
15
  extract_call,
@@ -39,6 +39,11 @@ from .job import (
39
39
  Job,
40
40
  Strategy,
41
41
  )
42
+ from .logs import (
43
+ TraceLog,
44
+ get_dt_tznow,
45
+ get_trace,
46
+ )
42
47
  from .params import (
43
48
  ChoiceParam,
44
49
  DatetimeParam,
@@ -49,9 +54,7 @@ from .params import (
49
54
  from .result import (
50
55
  Result,
51
56
  Status,
52
- TraceLog,
53
57
  default_gen_id,
54
- get_dt_tznow,
55
58
  )
56
59
  from .scheduler import (
57
60
  Schedule,
@@ -20,6 +20,7 @@ from ..conf import config, get_logger
20
20
  from ..scheduler import ReleaseThread, ReleaseThreads
21
21
  from ..workflow import ReleaseQueue, WorkflowTask
22
22
  from .repeat import repeat_at
23
+ from .routes import log
23
24
 
24
25
  load_dotenv()
25
26
  logger = get_logger("ddeutil.workflow")
@@ -77,22 +78,26 @@ async def health():
77
78
  return {"message": "Workflow API already start up"}
78
79
 
79
80
 
80
- # NOTE: Enable the workflow route.
81
+ # NOTE Add the logs route by default.
82
+ app.include_router(log, prefix=config.prefix_path)
83
+
84
+
85
+ # NOTE: Enable the workflows route.
81
86
  if config.enable_route_workflow:
82
- from .route import workflow_route
87
+ from .routes import workflow
83
88
 
84
- app.include_router(workflow_route, prefix=config.prefix_path)
89
+ app.include_router(workflow, prefix=config.prefix_path)
85
90
 
86
91
 
87
- # NOTE: Enable the schedule route.
92
+ # NOTE: Enable the schedules route.
88
93
  if config.enable_route_schedule:
89
94
  from ..audit import get_audit
90
95
  from ..scheduler import schedule_task
91
- from .route import schedule_route
96
+ from .routes import schedule
92
97
 
93
- app.include_router(schedule_route, prefix=config.prefix_path)
98
+ app.include_router(schedule, prefix=config.prefix_path)
94
99
 
95
- @schedule_route.on_event("startup")
100
+ @schedule.on_event("startup")
96
101
  @repeat_at(cron="* * * * *", delay=2)
97
102
  def scheduler_listener():
98
103
  """Schedule broker every minute at 02 second."""
@@ -109,7 +114,7 @@ if config.enable_route_schedule:
109
114
  log=get_audit(),
110
115
  )
111
116
 
112
- @schedule_route.on_event("startup")
117
+ @schedule.on_event("startup")
113
118
  @repeat_at(cron="*/5 * * * *", delay=10)
114
119
  def monitoring():
115
120
  logger.debug("[MONITOR]: Start monitoring threading.")
@@ -0,0 +1,8 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from .logs import log_route as log
7
+ from .schedules import schedule_route as schedule
8
+ from .workflows import workflow_route as workflow
@@ -0,0 +1,36 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from fastapi import APIRouter
9
+ from fastapi.responses import UJSONResponse
10
+
11
+ from ...conf import get_logger
12
+ from ...logs import get_trace_obj
13
+
14
+ logger = get_logger("ddeutil.workflow")
15
+
16
+
17
+ # NOTE: Start create the schedule routes.
18
+ #
19
+ log_route = APIRouter(
20
+ prefix="/logs",
21
+ tags=["logs"],
22
+ default_response_class=UJSONResponse,
23
+ )
24
+
25
+
26
+ @log_route.get(path="/")
27
+ async def get_logs():
28
+ return {
29
+ "message": "Getting logs",
30
+ "audits": list(get_trace_obj().find_logs()),
31
+ }
32
+
33
+
34
+ @log_route.get(path="/{run_id}")
35
+ async def get_log_with_run_id(run_id: str):
36
+ return get_trace_obj().find_log_with_id(run_id)
@@ -6,30 +6,17 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import copy
9
- from dataclasses import asdict
10
9
  from datetime import datetime, timedelta
11
- from typing import Any
12
10
 
13
11
  from fastapi import APIRouter, HTTPException, Request
14
12
  from fastapi import status as st
15
13
  from fastapi.responses import UJSONResponse
16
- from pydantic import BaseModel
17
14
 
18
- from ..__types import DictData
19
- from ..audit import Audit, get_audit
20
- from ..conf import Loader, config, get_logger
21
- from ..result import Result
22
- from ..scheduler import Schedule
23
- from ..workflow import Workflow
15
+ from ...conf import config, get_logger
16
+ from ...scheduler import Schedule
24
17
 
25
18
  logger = get_logger("ddeutil.workflow")
26
19
 
27
- workflow_route = APIRouter(
28
- prefix="/workflows",
29
- tags=["workflows"],
30
- default_response_class=UJSONResponse,
31
- )
32
-
33
20
  schedule_route = APIRouter(
34
21
  prefix="/schedules",
35
22
  tags=["schedules"],
@@ -37,122 +24,6 @@ schedule_route = APIRouter(
37
24
  )
38
25
 
39
26
 
40
- @workflow_route.get(path="/")
41
- async def get_workflows() -> DictData:
42
- """Return all workflow workflows that exists in config path."""
43
- workflows: DictData = dict(Loader.finds(Workflow))
44
- return {
45
- "message": f"Getting all workflows: {len(workflows)}",
46
- "count": len(workflows),
47
- "workflows": workflows,
48
- }
49
-
50
-
51
- @workflow_route.get(path="/{name}")
52
- async def get_workflow_by_name(name: str) -> DictData:
53
- """Return model of workflow that passing an input workflow name."""
54
- try:
55
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
56
- except ValueError as err:
57
- logger.exception(err)
58
- raise HTTPException(
59
- status_code=st.HTTP_404_NOT_FOUND,
60
- detail=(
61
- f"Workflow workflow name: {name!r} does not found in /conf path"
62
- ),
63
- ) from None
64
- return workflow.model_dump(
65
- by_alias=True,
66
- exclude_none=True,
67
- exclude_unset=True,
68
- exclude_defaults=True,
69
- )
70
-
71
-
72
- class ExecutePayload(BaseModel):
73
- params: dict[str, Any]
74
-
75
-
76
- @workflow_route.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
77
- async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
78
- """Return model of workflow that passing an input workflow name."""
79
- try:
80
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
81
- except ValueError:
82
- raise HTTPException(
83
- status_code=st.HTTP_404_NOT_FOUND,
84
- detail=(
85
- f"Workflow workflow name: {name!r} does not found in /conf path"
86
- ),
87
- ) from None
88
-
89
- # NOTE: Start execute manually
90
- try:
91
- result: Result = workflow.execute(params=payload.params)
92
- except Exception as err:
93
- raise HTTPException(
94
- status_code=st.HTTP_500_INTERNAL_SERVER_ERROR,
95
- detail=f"{type(err)}: {err}",
96
- ) from None
97
-
98
- return asdict(result)
99
-
100
-
101
- @workflow_route.get(path="/{name}/logs")
102
- async def get_workflow_logs(name: str):
103
- try:
104
- return {
105
- "message": f"Getting workflow {name!r} logs",
106
- "logs": [
107
- log.model_dump(
108
- by_alias=True,
109
- exclude_none=True,
110
- exclude_unset=True,
111
- exclude_defaults=True,
112
- )
113
- for log in get_audit().find_audits(name=name)
114
- ],
115
- }
116
- except FileNotFoundError:
117
- raise HTTPException(
118
- status_code=st.HTTP_404_NOT_FOUND,
119
- detail=f"Does not found log for workflow {name!r}",
120
- ) from None
121
-
122
-
123
- @workflow_route.get(path="/{name}/logs/{release}")
124
- async def get_workflow_release_log(name: str, release: str):
125
- try:
126
- log: Audit = get_audit().find_audit_with_release(
127
- name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
128
- )
129
- except FileNotFoundError:
130
- raise HTTPException(
131
- status_code=st.HTTP_404_NOT_FOUND,
132
- detail=(
133
- f"Does not found log for workflow {name!r} "
134
- f"with release {release!r}"
135
- ),
136
- ) from None
137
- return {
138
- "message": f"Getting workflow {name!r} log in release {release}",
139
- "log": log.model_dump(
140
- by_alias=True,
141
- exclude_none=True,
142
- exclude_unset=True,
143
- exclude_defaults=True,
144
- ),
145
- }
146
-
147
-
148
- @workflow_route.delete(
149
- path="/{name}/logs/{release}",
150
- status_code=st.HTTP_204_NO_CONTENT,
151
- )
152
- async def del_workflow_release_log(name: str, release: str):
153
- return {"message": f"Deleted workflow {name!r} log in release {release}"}
154
-
155
-
156
27
  @schedule_route.get(path="/{name}")
157
28
  async def get_schedules(name: str):
158
29
  try:
@@ -0,0 +1,137 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from dataclasses import asdict
9
+ from datetime import datetime
10
+ from typing import Any
11
+
12
+ from fastapi import APIRouter, HTTPException
13
+ from fastapi import status as st
14
+ from fastapi.responses import UJSONResponse
15
+ from pydantic import BaseModel
16
+
17
+ from ...__types import DictData
18
+ from ...audit import Audit, get_audit
19
+ from ...conf import Loader, get_logger
20
+ from ...result import Result
21
+ from ...workflow import Workflow
22
+
23
+ logger = get_logger("ddeutil.workflow")
24
+
25
+ workflow_route = APIRouter(
26
+ prefix="/workflows",
27
+ tags=["workflows"],
28
+ default_response_class=UJSONResponse,
29
+ )
30
+
31
+
32
+ @workflow_route.get(path="/")
33
+ async def get_workflows() -> DictData:
34
+ """Return all workflow workflows that exists in config path."""
35
+ workflows: DictData = dict(Loader.finds(Workflow))
36
+ return {
37
+ "message": f"Getting all workflows: {len(workflows)}",
38
+ "count": len(workflows),
39
+ "workflows": workflows,
40
+ }
41
+
42
+
43
+ @workflow_route.get(path="/{name}")
44
+ async def get_workflow_by_name(name: str) -> DictData:
45
+ """Return model of workflow that passing an input workflow name."""
46
+ try:
47
+ workflow: Workflow = Workflow.from_loader(name=name, externals={})
48
+ except ValueError as err:
49
+ logger.exception(err)
50
+ raise HTTPException(
51
+ status_code=st.HTTP_404_NOT_FOUND,
52
+ detail=(
53
+ f"Workflow workflow name: {name!r} does not found in /conf path"
54
+ ),
55
+ ) from None
56
+ return workflow.model_dump(
57
+ by_alias=True,
58
+ exclude_none=True,
59
+ exclude_unset=True,
60
+ exclude_defaults=True,
61
+ )
62
+
63
+
64
+ class ExecutePayload(BaseModel):
65
+ params: dict[str, Any]
66
+
67
+
68
+ @workflow_route.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
69
+ async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
70
+ """Return model of workflow that passing an input workflow name."""
71
+ try:
72
+ workflow: Workflow = Workflow.from_loader(name=name, externals={})
73
+ except ValueError:
74
+ raise HTTPException(
75
+ status_code=st.HTTP_404_NOT_FOUND,
76
+ detail=(
77
+ f"Workflow workflow name: {name!r} does not found in /conf path"
78
+ ),
79
+ ) from None
80
+
81
+ # NOTE: Start execute manually
82
+ try:
83
+ result: Result = workflow.execute(params=payload.params)
84
+ except Exception as err:
85
+ raise HTTPException(
86
+ status_code=st.HTTP_500_INTERNAL_SERVER_ERROR,
87
+ detail=f"{type(err)}: {err}",
88
+ ) from None
89
+
90
+ return asdict(result)
91
+
92
+
93
+ @workflow_route.get(path="/{name}/audits")
94
+ async def get_workflow_audits(name: str):
95
+ try:
96
+ return {
97
+ "message": f"Getting workflow {name!r} audits",
98
+ "audits": [
99
+ audit.model_dump(
100
+ by_alias=True,
101
+ exclude_none=True,
102
+ exclude_unset=True,
103
+ exclude_defaults=True,
104
+ )
105
+ for audit in get_audit().find_audits(name=name)
106
+ ],
107
+ }
108
+ except FileNotFoundError:
109
+ raise HTTPException(
110
+ status_code=st.HTTP_404_NOT_FOUND,
111
+ detail=f"Does not found audit for workflow {name!r}",
112
+ ) from None
113
+
114
+
115
+ @workflow_route.get(path="/{name}/audits/{release}")
116
+ async def get_workflow_release_audit(name: str, release: str):
117
+ try:
118
+ audit: Audit = get_audit().find_audit_with_release(
119
+ name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
120
+ )
121
+ except FileNotFoundError:
122
+ raise HTTPException(
123
+ status_code=st.HTTP_404_NOT_FOUND,
124
+ detail=(
125
+ f"Does not found audit for workflow {name!r} "
126
+ f"with release {release!r}"
127
+ ),
128
+ ) from None
129
+ return {
130
+ "message": f"Getting workflow {name!r} audit in release {release}",
131
+ "audit": audit.model_dump(
132
+ by_alias=True,
133
+ exclude_none=True,
134
+ exclude_unset=True,
135
+ exclude_defaults=True,
136
+ ),
137
+ }
@@ -20,7 +20,7 @@ from typing_extensions import Self
20
20
 
21
21
  from .__types import DictData, TupleStr
22
22
  from .conf import config
23
- from .result import TraceLog
23
+ from .logs import TraceLog, get_trace
24
24
 
25
25
  __all__: TupleStr = (
26
26
  "get_audit",
@@ -174,7 +174,7 @@ class FileAudit(BaseAudit):
174
174
 
175
175
  :rtype: Self
176
176
  """
177
- trace: TraceLog = TraceLog(self.run_id, self.parent_run_id)
177
+ trace: TraceLog = get_trace(self.run_id, self.parent_run_id)
178
178
 
179
179
  # NOTE: Check environ variable was set for real writing.
180
180
  if not config.enable_write_audit:
@@ -214,7 +214,7 @@ class SQLiteAudit(BaseAudit): # pragma: no cov
214
214
  """Save logging data that receive a context data from a workflow
215
215
  execution result.
216
216
  """
217
- trace: TraceLog = TraceLog(self.run_id, self.parent_run_id)
217
+ trace: TraceLog = get_trace(self.run_id, self.parent_run_id)
218
218
 
219
219
  # NOTE: Check environ variable was set for real writing.
220
220
  if not config.enable_write_audit:
@@ -60,7 +60,7 @@ def tag(
60
60
 
61
61
  @wraps(func)
62
62
  def wrapped(*args: P.args, **kwargs: P.kwargs) -> TagFunc:
63
- # NOTE: Able to do anything before calling call function.
63
+ # NOTE: Able to do anything before calling the call function.
64
64
  return func(*args, **kwargs)
65
65
 
66
66
  return wrapped
@@ -150,7 +150,7 @@ def extract_call(call: str) -> Callable[[], TagFunc]:
150
150
  """
151
151
  if not (found := Re.RE_TASK_FMT.search(call)):
152
152
  raise ValueError(
153
- f"Call {call!r} does not match with call format regex."
153
+ f"Call {call!r} does not match with the call regex format."
154
154
  )
155
155
 
156
156
  # NOTE: Pass the searching call string to `path`, `func`, and `tag`.
@@ -160,13 +160,13 @@ def extract_call(call: str) -> Callable[[], TagFunc]:
160
160
  rgt: dict[str, Registry] = make_registry(f"{call.path}")
161
161
  if call.func not in rgt:
162
162
  raise NotImplementedError(
163
- f"``REGISTER-MODULES.{call.path}.registries`` does not "
163
+ f"`REGISTER-MODULES.{call.path}.registries` does not "
164
164
  f"implement registry: {call.func!r}."
165
165
  )
166
166
 
167
167
  if call.tag not in rgt[call.func]:
168
168
  raise NotImplementedError(
169
169
  f"tag: {call.tag!r} does not found on registry func: "
170
- f"``REGISTER-MODULES.{call.path}.registries.{call.func}``"
170
+ f"`REGISTER-MODULES.{call.path}.registries.{call.func}`"
171
171
  )
172
172
  return rgt[call.func][call.tag]