ddeutil-workflow 0.0.54__py3-none-any.whl → 0.0.55__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/api/__init__.py +170 -1
- ddeutil/workflow/api/routes/job.py +22 -21
- ddeutil/workflow/api/routes/schedules.py +0 -2
- ddeutil/workflow/api/routes/workflows.py +3 -4
- ddeutil/workflow/job.py +17 -13
- ddeutil/workflow/result.py +1 -0
- ddeutil/workflow/scheduler.py +1 -3
- ddeutil/workflow/stages.py +169 -116
- ddeutil/workflow/workflow.py +18 -33
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.55.dist-info}/METADATA +5 -7
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.55.dist-info}/RECORD +15 -16
- ddeutil/workflow/api/api.py +0 -170
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.55.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.55.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.55.dist-info}/top_level.txt +0 -0
ddeutil/workflow/workflow.py
CHANGED
@@ -166,6 +166,7 @@ class ReleaseQueue:
|
|
166
166
|
extras: DictData = Field(
|
167
167
|
default_factory=dict,
|
168
168
|
description="An extra parameters that want to override config values.",
|
169
|
+
repr=False,
|
169
170
|
)
|
170
171
|
|
171
172
|
@classmethod
|
@@ -213,15 +214,6 @@ class ReleaseQueue:
|
|
213
214
|
"""
|
214
215
|
return len(self.queue) > 0
|
215
216
|
|
216
|
-
@property
|
217
|
-
def first_queue(self) -> Release:
|
218
|
-
"""Check an input Release object is the first value of the
|
219
|
-
waiting queue.
|
220
|
-
|
221
|
-
:rtype: Release
|
222
|
-
"""
|
223
|
-
return self.queue[0]
|
224
|
-
|
225
217
|
def check_queue(self, value: Release | datetime) -> bool:
|
226
218
|
"""Check a Release value already exists in list of tracking
|
227
219
|
queues.
|
@@ -240,16 +232,6 @@ class ReleaseQueue:
|
|
240
232
|
or (value in self.complete)
|
241
233
|
)
|
242
234
|
|
243
|
-
def remove_running(self, value: Release) -> Self:
|
244
|
-
"""Remove Release in the running queue if it exists.
|
245
|
-
|
246
|
-
:rtype: Self
|
247
|
-
"""
|
248
|
-
if value in self.running:
|
249
|
-
self.running.remove(value)
|
250
|
-
|
251
|
-
return self
|
252
|
-
|
253
235
|
def mark_complete(self, value: Release) -> Self:
|
254
236
|
"""Push Release to the complete queue.
|
255
237
|
|
@@ -702,10 +684,10 @@ class Workflow(BaseModel):
|
|
702
684
|
if isinstance(release, datetime):
|
703
685
|
release: Release = Release.from_dt(release, extras=self.extras)
|
704
686
|
|
705
|
-
result.trace.
|
687
|
+
result.trace.info(
|
706
688
|
f"[RELEASE]: Start {name!r} : {release.date:%Y-%m-%d %H:%M:%S}"
|
707
689
|
)
|
708
|
-
self.execute(
|
690
|
+
rs: Result = self.execute(
|
709
691
|
params=param2template(
|
710
692
|
params,
|
711
693
|
params={
|
@@ -724,7 +706,7 @@ class Workflow(BaseModel):
|
|
724
706
|
parent_run_id=result.parent_run_id,
|
725
707
|
timeout=timeout,
|
726
708
|
)
|
727
|
-
result.trace.
|
709
|
+
result.trace.info(
|
728
710
|
f"[RELEASE]: End {name!r} : {release.date:%Y-%m-%d %H:%M:%S}"
|
729
711
|
)
|
730
712
|
|
@@ -745,11 +727,12 @@ class Workflow(BaseModel):
|
|
745
727
|
)
|
746
728
|
|
747
729
|
if queue:
|
748
|
-
queue.
|
730
|
+
if release in queue.running:
|
731
|
+
queue.running.remove(release)
|
749
732
|
queue.mark_complete(release)
|
750
733
|
|
751
734
|
return result.catch(
|
752
|
-
status=
|
735
|
+
status=rs.status,
|
753
736
|
context={
|
754
737
|
"params": params,
|
755
738
|
"release": {
|
@@ -1054,16 +1037,19 @@ class Workflow(BaseModel):
|
|
1054
1037
|
For example with non-strategy job, when I want to use the output
|
1055
1038
|
from previous stage, I can access it with syntax:
|
1056
1039
|
|
1057
|
-
|
1058
|
-
|
1040
|
+
... ${job-id}.stages.${stage-id}.outputs.${key}
|
1041
|
+
... ${job-id}.stages.${stage-id}.errors.${key}
|
1059
1042
|
|
1060
1043
|
But example for strategy job:
|
1061
1044
|
|
1062
|
-
|
1063
|
-
|
1045
|
+
... ${job-id}.strategies.${strategy-id}.stages.${stage-id}.outputs.${key}
|
1046
|
+
... ${job-id}.strategies.${strategy-id}.stages.${stage-id}.errors.${key}
|
1064
1047
|
|
1065
|
-
|
1066
|
-
|
1048
|
+
This method already handle all exception class that can raise from
|
1049
|
+
the job execution. It will warp that error and keep it in the key `errors`
|
1050
|
+
at the result context.
|
1051
|
+
|
1052
|
+
:param params: A parameter data that will parameterize before execution.
|
1067
1053
|
:param run_id: (str | None) A workflow running ID.
|
1068
1054
|
:param parent_run_id: (str | None) A parent workflow running ID.
|
1069
1055
|
:param result: (Result) A Result instance for return context and status.
|
@@ -1116,7 +1102,6 @@ class Workflow(BaseModel):
|
|
1116
1102
|
):
|
1117
1103
|
job_id: str = job_queue.get()
|
1118
1104
|
job: Job = self.job(name=job_id)
|
1119
|
-
|
1120
1105
|
if (check := job.check_needs(context["jobs"])) == WAIT:
|
1121
1106
|
job_queue.task_done()
|
1122
1107
|
job_queue.put(job_id)
|
@@ -1227,8 +1212,8 @@ class WorkflowTask:
|
|
1227
1212
|
:param alias: (str) An alias name of Workflow model.
|
1228
1213
|
:param workflow: (Workflow) A Workflow model instance.
|
1229
1214
|
:param runner: (CronRunner)
|
1230
|
-
:param values:
|
1231
|
-
:param extras:
|
1215
|
+
:param values: A value data that want to parameterize.
|
1216
|
+
:param extras: An extra parameter that use to override core config values.
|
1232
1217
|
"""
|
1233
1218
|
|
1234
1219
|
alias: str
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.55
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -121,12 +121,10 @@ flowchart LR
|
|
121
121
|
|
122
122
|
> [!WARNING]
|
123
123
|
> _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
|
124
|
-
> and
|
125
|
-
>
|
126
|
-
|
127
|
-
>
|
128
|
-
> Other workflow orchestration tools that I interest and pick them to be inspiration
|
129
|
-
> some for this package:
|
124
|
+
> and my experience of data framework configs pattern. :grimacing:
|
125
|
+
>
|
126
|
+
> Other workflow orchestration services that I interest and pick them to be
|
127
|
+
> this project inspiration:
|
130
128
|
>
|
131
129
|
> - [Google **Workflows**](https://cloud.google.com/workflows)
|
132
130
|
> - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
|
@@ -1,4 +1,4 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=
|
1
|
+
ddeutil/workflow/__about__.py,sha256=kfTS6Gqr5xkMNDVKm-NTS4vmeIl2Zj4oMBQGmq19LHA,28
|
2
2
|
ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
|
3
3
|
ddeutil/workflow/__init__.py,sha256=noE8LNRcgq32m9OnIFcQqh0P7PXWdp-SGmvBCYIXgf4,1338
|
4
4
|
ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -6,26 +6,25 @@ ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4
|
|
6
6
|
ddeutil/workflow/conf.py,sha256=80rgmJKFU7BlH5xTLnghGzGhE8C6LFAQykd9mjHSjo8,12528
|
7
7
|
ddeutil/workflow/cron.py,sha256=WS2MInn0Sp5DKlZDZH5VFZ5AA0Q3_AnBnYEU4lZSv4I,9779
|
8
8
|
ddeutil/workflow/exceptions.py,sha256=r4Jrf9qtVPALU4wh4bnb_OYqC-StqSQJEmFC-_QK934,1408
|
9
|
-
ddeutil/workflow/job.py,sha256=
|
9
|
+
ddeutil/workflow/job.py,sha256=aVRWLMLv5vYFbckT6AKYrMu29FzXYESOEzDHhFIpUyo,34159
|
10
10
|
ddeutil/workflow/logs.py,sha256=rsoBrUGQrooou18fg2yvPsB8NOaXnUA5ThQpBr_WVMg,26598
|
11
11
|
ddeutil/workflow/params.py,sha256=FKY4Oo1Ze4QZKRfAk7rqKsi44YaJQAbqAtXM6vlO2hI,11392
|
12
|
-
ddeutil/workflow/result.py,sha256=
|
12
|
+
ddeutil/workflow/result.py,sha256=rI0S8-HanFDk1l6_BsYRRamzSfzKUy7bkKJUae1w_aQ,5708
|
13
13
|
ddeutil/workflow/reusables.py,sha256=iXcS7Gg-71qVX4ln0ILTDx03cTtUnj_rNoXHTVdVrxc,17636
|
14
|
-
ddeutil/workflow/scheduler.py,sha256=
|
15
|
-
ddeutil/workflow/stages.py,sha256=
|
14
|
+
ddeutil/workflow/scheduler.py,sha256=hk3-9R63DZH9J0PWbtBzaD8rqHyiOf03vTvAJDgsxTA,28279
|
15
|
+
ddeutil/workflow/stages.py,sha256=E5XoMVijjcvm_YK8AbiA8xGAQUphCPTtGazW-oLAdeI,82543
|
16
16
|
ddeutil/workflow/utils.py,sha256=NZPvPPP_5g4cigFcD7tHjIKLtKMeYAcb3oUhNyhTpJ0,7947
|
17
|
-
ddeutil/workflow/workflow.py,sha256
|
18
|
-
ddeutil/workflow/api/__init__.py,sha256=
|
19
|
-
ddeutil/workflow/api/api.py,sha256=xLrQ8yD7iOn-MkzaSxG-BADbdkqLikDna630oW3YEmc,5243
|
17
|
+
ddeutil/workflow/workflow.py,sha256=F3I_c0LHMoJFZIFt1a92sxFMlQVkE-cH--TLyX2rwuo,46799
|
18
|
+
ddeutil/workflow/api/__init__.py,sha256=kY30dL8HPY8tY_GBmm7y_3OdoXzB1-EA2a96PLU0AQw,5278
|
20
19
|
ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
|
21
20
|
ddeutil/workflow/api/utils.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
|
22
21
|
ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
|
23
|
-
ddeutil/workflow/api/routes/job.py,sha256=
|
22
|
+
ddeutil/workflow/api/routes/job.py,sha256=8X5VLDJH6PumyNIY6JGRNBsf2gWN0eG9DzxRPSh6n4I,2190
|
24
23
|
ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
|
25
|
-
ddeutil/workflow/api/routes/schedules.py,sha256=
|
26
|
-
ddeutil/workflow/api/routes/workflows.py,sha256=
|
27
|
-
ddeutil_workflow-0.0.
|
28
|
-
ddeutil_workflow-0.0.
|
29
|
-
ddeutil_workflow-0.0.
|
30
|
-
ddeutil_workflow-0.0.
|
31
|
-
ddeutil_workflow-0.0.
|
24
|
+
ddeutil/workflow/api/routes/schedules.py,sha256=14RnaJKEGMSJtncI1H_QQVZNBe_jDS40PPRO6qFc3i0,4805
|
25
|
+
ddeutil/workflow/api/routes/workflows.py,sha256=GJu5PiXEylswrXylEImpncySjeU9chrvrtjhiMCw2RQ,4529
|
26
|
+
ddeutil_workflow-0.0.55.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
27
|
+
ddeutil_workflow-0.0.55.dist-info/METADATA,sha256=sTjAMsv4yIgw-CKKHyr-sg_yrgnnDT_0UMF3E2TP-Js,19008
|
28
|
+
ddeutil_workflow-0.0.55.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
29
|
+
ddeutil_workflow-0.0.55.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
30
|
+
ddeutil_workflow-0.0.55.dist-info/RECORD,,
|
ddeutil/workflow/api/api.py
DELETED
@@ -1,170 +0,0 @@
|
|
1
|
-
# ------------------------------------------------------------------------------
|
2
|
-
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
-
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
-
# license information.
|
5
|
-
# ------------------------------------------------------------------------------
|
6
|
-
from __future__ import annotations
|
7
|
-
|
8
|
-
import contextlib
|
9
|
-
from collections.abc import AsyncIterator
|
10
|
-
from datetime import datetime, timedelta
|
11
|
-
from typing import TypedDict
|
12
|
-
|
13
|
-
from dotenv import load_dotenv
|
14
|
-
from fastapi import FastAPI, Request
|
15
|
-
from fastapi import status as st
|
16
|
-
from fastapi.encoders import jsonable_encoder
|
17
|
-
from fastapi.exceptions import RequestValidationError
|
18
|
-
from fastapi.middleware.cors import CORSMiddleware
|
19
|
-
from fastapi.middleware.gzip import GZipMiddleware
|
20
|
-
from fastapi.responses import UJSONResponse
|
21
|
-
|
22
|
-
from ..__about__ import __version__
|
23
|
-
from ..conf import api_config, config
|
24
|
-
from ..logs import get_logger
|
25
|
-
from ..scheduler import ReleaseThread, ReleaseThreads
|
26
|
-
from ..workflow import ReleaseQueue, WorkflowTask
|
27
|
-
from .routes import job, log
|
28
|
-
from .utils import repeat_at
|
29
|
-
|
30
|
-
load_dotenv()
|
31
|
-
logger = get_logger("uvicorn.error")
|
32
|
-
|
33
|
-
|
34
|
-
class State(TypedDict):
|
35
|
-
"""TypeDict for State of FastAPI application."""
|
36
|
-
|
37
|
-
scheduler: list[str]
|
38
|
-
workflow_threads: ReleaseThreads
|
39
|
-
workflow_tasks: list[WorkflowTask]
|
40
|
-
workflow_queue: dict[str, ReleaseQueue]
|
41
|
-
|
42
|
-
|
43
|
-
@contextlib.asynccontextmanager
|
44
|
-
async def lifespan(a: FastAPI) -> AsyncIterator[State]:
|
45
|
-
"""Lifespan function for the FastAPI application."""
|
46
|
-
a.state.scheduler = []
|
47
|
-
a.state.workflow_threads = {}
|
48
|
-
a.state.workflow_tasks = []
|
49
|
-
a.state.workflow_queue = {}
|
50
|
-
|
51
|
-
yield {
|
52
|
-
# NOTE: Scheduler value should be contained a key of workflow and
|
53
|
-
# list of datetime of queue and running.
|
54
|
-
#
|
55
|
-
# ... {
|
56
|
-
# ... '<workflow-name>': (
|
57
|
-
# ... [<running-datetime>, ...], [<queue-datetime>, ...]
|
58
|
-
# ... )
|
59
|
-
# ... }
|
60
|
-
#
|
61
|
-
"scheduler": a.state.scheduler,
|
62
|
-
"workflow_queue": a.state.workflow_queue,
|
63
|
-
"workflow_threads": a.state.workflow_threads,
|
64
|
-
"workflow_tasks": a.state.workflow_tasks,
|
65
|
-
}
|
66
|
-
|
67
|
-
|
68
|
-
app = FastAPI(
|
69
|
-
titile="Workflow",
|
70
|
-
description=(
|
71
|
-
"This is a workflow FastAPI application that use to manage manual "
|
72
|
-
"execute, logging, and schedule workflow via RestAPI."
|
73
|
-
),
|
74
|
-
version=__version__,
|
75
|
-
lifespan=lifespan,
|
76
|
-
default_response_class=UJSONResponse,
|
77
|
-
)
|
78
|
-
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
79
|
-
origins: list[str] = [
|
80
|
-
"http://localhost",
|
81
|
-
"http://localhost:88",
|
82
|
-
"http://localhost:80",
|
83
|
-
]
|
84
|
-
app.add_middleware(
|
85
|
-
CORSMiddleware,
|
86
|
-
allow_origins=origins,
|
87
|
-
allow_credentials=True,
|
88
|
-
allow_methods=["*"],
|
89
|
-
allow_headers=["*"],
|
90
|
-
)
|
91
|
-
|
92
|
-
|
93
|
-
@app.get("/")
|
94
|
-
async def health():
|
95
|
-
"""Index view that not return any template without json status."""
|
96
|
-
return {"message": "Workflow already start up with healthy status."}
|
97
|
-
|
98
|
-
|
99
|
-
# NOTE Add the jobs and logs routes by default.
|
100
|
-
app.include_router(job, prefix=api_config.prefix_path)
|
101
|
-
app.include_router(log, prefix=api_config.prefix_path)
|
102
|
-
|
103
|
-
|
104
|
-
# NOTE: Enable the workflows route.
|
105
|
-
if api_config.enable_route_workflow:
|
106
|
-
from .routes import workflow
|
107
|
-
|
108
|
-
app.include_router(workflow, prefix=api_config.prefix_path)
|
109
|
-
|
110
|
-
|
111
|
-
# NOTE: Enable the schedules route.
|
112
|
-
if api_config.enable_route_schedule:
|
113
|
-
from ..logs import get_audit
|
114
|
-
from ..scheduler import schedule_task
|
115
|
-
from .routes import schedule
|
116
|
-
|
117
|
-
app.include_router(schedule, prefix=api_config.prefix_path)
|
118
|
-
|
119
|
-
@schedule.on_event("startup")
|
120
|
-
@repeat_at(cron="* * * * *", delay=2)
|
121
|
-
def scheduler_listener():
|
122
|
-
"""Schedule broker every minute at 02 second."""
|
123
|
-
logger.debug(
|
124
|
-
f"[SCHEDULER]: Start listening schedule from queue "
|
125
|
-
f"{app.state.scheduler}"
|
126
|
-
)
|
127
|
-
if app.state.workflow_tasks:
|
128
|
-
schedule_task(
|
129
|
-
app.state.workflow_tasks,
|
130
|
-
stop=datetime.now(config.tz) + timedelta(minutes=1),
|
131
|
-
queue=app.state.workflow_queue,
|
132
|
-
threads=app.state.workflow_threads,
|
133
|
-
audit=get_audit(),
|
134
|
-
)
|
135
|
-
|
136
|
-
@schedule.on_event("startup")
|
137
|
-
@repeat_at(cron="*/5 * * * *", delay=10)
|
138
|
-
def monitoring():
|
139
|
-
"""Monitoring workflow thread that running in the background."""
|
140
|
-
logger.debug("[MONITOR]: Start monitoring threading.")
|
141
|
-
snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
|
142
|
-
for t_name in snapshot_threads:
|
143
|
-
|
144
|
-
thread_release: ReleaseThread = app.state.workflow_threads[t_name]
|
145
|
-
|
146
|
-
# NOTE: remove the thread that running success.
|
147
|
-
if not thread_release["thread"].is_alive():
|
148
|
-
app.state.workflow_threads.pop(t_name)
|
149
|
-
|
150
|
-
|
151
|
-
@app.exception_handler(RequestValidationError)
|
152
|
-
async def validation_exception_handler(
|
153
|
-
request: Request, exc: RequestValidationError
|
154
|
-
):
|
155
|
-
_ = request
|
156
|
-
return UJSONResponse(
|
157
|
-
status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
|
158
|
-
content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
|
159
|
-
)
|
160
|
-
|
161
|
-
|
162
|
-
if __name__ == "__main__":
|
163
|
-
import uvicorn
|
164
|
-
|
165
|
-
uvicorn.run(
|
166
|
-
app,
|
167
|
-
host="0.0.0.0",
|
168
|
-
port=80,
|
169
|
-
log_level="DEBUG",
|
170
|
-
)
|
File without changes
|
File without changes
|
File without changes
|