ddeutil-workflow 0.0.68__py3-none-any.whl → 0.0.70__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +14 -12
- ddeutil/workflow/api/log_conf.py +16 -29
- ddeutil/workflow/api/routes/logs.py +1 -1
- ddeutil/workflow/api/routes/workflows.py +3 -3
- ddeutil/workflow/audits.py +374 -0
- ddeutil/workflow/cli.py +70 -6
- ddeutil/workflow/conf.py +4 -51
- ddeutil/workflow/errors.py +7 -1
- ddeutil/workflow/event.py +2 -2
- ddeutil/workflow/job.py +9 -3
- ddeutil/workflow/result.py +10 -1
- ddeutil/workflow/reusables.py +1 -1
- ddeutil/workflow/{logs.py → traces.py} +224 -409
- ddeutil/workflow/utils.py +19 -11
- ddeutil/workflow/workflow.py +226 -18
- {ddeutil_workflow-0.0.68.dist-info → ddeutil_workflow-0.0.70.dist-info}/METADATA +29 -27
- ddeutil_workflow-0.0.70.dist-info/RECORD +30 -0
- ddeutil_workflow-0.0.68.dist-info/RECORD +0 -29
- {ddeutil_workflow-0.0.68.dist-info → ddeutil_workflow-0.0.70.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.68.dist-info → ddeutil_workflow-0.0.70.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.68.dist-info → ddeutil_workflow-0.0.70.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.68.dist-info → ddeutil_workflow-0.0.70.dist-info}/top_level.txt +0 -0
ddeutil/workflow/utils.py
CHANGED
@@ -163,21 +163,23 @@ def gen_id(
|
|
163
163
|
extras: DictData | None = None,
|
164
164
|
) -> str:
|
165
165
|
"""Generate running ID for able to tracking. This generates process use
|
166
|
-
|
166
|
+
``md5`` algorithm function if ``WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE`` set
|
167
167
|
to false. But it will cut this hashing value length to 10 it the setting
|
168
168
|
value set to true.
|
169
169
|
|
170
170
|
Simple Mode:
|
171
171
|
|
172
|
-
... 0000 00 00 00 00 00 000000
|
173
|
-
... year month day hour minute second microsecond
|
172
|
+
... 0000 00 00 00 00 00 000000 T 0000000000
|
173
|
+
... year month day hour minute second microsecond sep simple-id
|
174
174
|
|
175
175
|
:param value: A value that want to add to prefix before hashing with md5.
|
176
|
-
:param sensitive: A flag that convert the value to lower
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
:param
|
176
|
+
:param sensitive: (bool) A flag that enable to convert the value to lower
|
177
|
+
case before hashing that value before generate ID.
|
178
|
+
:param unique: (bool) A flag that add timestamp at microsecond level to
|
179
|
+
value before hashing.
|
180
|
+
:param simple_mode: (bool | None) A flag for generate ID by simple mode.
|
181
|
+
:param extras: (DictData) An extra parameter that use for override config
|
182
|
+
value.
|
181
183
|
|
182
184
|
:rtype: str
|
183
185
|
"""
|
@@ -212,7 +214,8 @@ def default_gen_id() -> str:
|
|
212
214
|
def make_exec(path: Union[Path, str]) -> None:
|
213
215
|
"""Change mode of file to be executable file.
|
214
216
|
|
215
|
-
:param path: A file path that want to make executable
|
217
|
+
:param path: (Path | str) A file path that want to make executable
|
218
|
+
permission.
|
216
219
|
"""
|
217
220
|
f: Path = Path(path) if isinstance(path, str) else path
|
218
221
|
f.chmod(f.stat().st_mode | stat.S_IEXEC)
|
@@ -285,9 +288,14 @@ def dump_all(value: T, by_alias: bool = False) -> T: ... # pragma: no cov
|
|
285
288
|
|
286
289
|
|
287
290
|
def dump_all(
|
288
|
-
value: Union[T, BaseModel],
|
291
|
+
value: Union[T, BaseModel],
|
292
|
+
by_alias: bool = False,
|
289
293
|
) -> Union[T, DictData]:
|
290
|
-
"""Dump all BaseModel object to dict.
|
294
|
+
"""Dump all nested BaseModel object to dict object.
|
295
|
+
|
296
|
+
:param value: (T | BaseModel)
|
297
|
+
:param by_alias: (bool)
|
298
|
+
"""
|
291
299
|
if isinstance(value, dict):
|
292
300
|
return {k: dump_all(value[k], by_alias=by_alias) for k in value}
|
293
301
|
elif isinstance(value, (list, tuple, set)):
|
ddeutil/workflow/workflow.py
CHANGED
@@ -27,17 +27,17 @@ from threading import Event
|
|
27
27
|
from typing import Any, Optional
|
28
28
|
from zoneinfo import ZoneInfo
|
29
29
|
|
30
|
-
from pydantic import BaseModel, Field
|
30
|
+
from pydantic import BaseModel, Field
|
31
31
|
from pydantic.functional_validators import field_validator, model_validator
|
32
32
|
from typing_extensions import Self
|
33
33
|
|
34
34
|
from . import get_status_from_error
|
35
35
|
from .__types import DictData
|
36
|
-
from .
|
36
|
+
from .audits import Audit, get_audit
|
37
|
+
from .conf import YamlParser, dynamic
|
37
38
|
from .errors import WorkflowCancelError, WorkflowError, WorkflowTimeoutError
|
38
39
|
from .event import Crontab
|
39
40
|
from .job import Job
|
40
|
-
from .logs import Audit, get_audit
|
41
41
|
from .params import Param
|
42
42
|
from .result import (
|
43
43
|
CANCEL,
|
@@ -112,7 +112,6 @@ class Workflow(BaseModel):
|
|
112
112
|
*,
|
113
113
|
path: Optional[Path] = None,
|
114
114
|
extras: DictData | None = None,
|
115
|
-
loader: type[Loader] = None,
|
116
115
|
) -> Self:
|
117
116
|
"""Create Workflow instance from the Loader object that only receive
|
118
117
|
an input workflow name. The loader object will use this workflow name to
|
@@ -122,14 +121,12 @@ class Workflow(BaseModel):
|
|
122
121
|
:param path: (Path) An override config path.
|
123
122
|
:param extras: (DictData) An extra parameters that want to override core
|
124
123
|
config values.
|
125
|
-
:param loader: A loader class for override default loader object.
|
126
124
|
|
127
125
|
:raise ValueError: If the type does not match with current object.
|
128
126
|
|
129
127
|
:rtype: Self
|
130
128
|
"""
|
131
|
-
|
132
|
-
load: Loader = loader(name, path=path, extras=extras)
|
129
|
+
load: YamlParser = YamlParser(name, path=path, extras=extras)
|
133
130
|
|
134
131
|
# NOTE: Validate the config type match with current connection model
|
135
132
|
if load.type != cls.__name__:
|
@@ -141,7 +138,7 @@ class Workflow(BaseModel):
|
|
141
138
|
if extras:
|
142
139
|
data["extras"] = extras
|
143
140
|
|
144
|
-
cls.__bypass_on__(data, path=load.path, extras=extras
|
141
|
+
cls.__bypass_on__(data, path=load.path, extras=extras)
|
145
142
|
return cls.model_validate(obj=data)
|
146
143
|
|
147
144
|
@classmethod
|
@@ -150,7 +147,6 @@ class Workflow(BaseModel):
|
|
150
147
|
data: DictData,
|
151
148
|
path: Path,
|
152
149
|
extras: DictData | None = None,
|
153
|
-
loader: type[Loader] = None,
|
154
150
|
) -> DictData:
|
155
151
|
"""Bypass the on data to loaded config data.
|
156
152
|
|
@@ -171,7 +167,7 @@ class Workflow(BaseModel):
|
|
171
167
|
# field.
|
172
168
|
data["on"] = [
|
173
169
|
(
|
174
|
-
(
|
170
|
+
YamlParser(n, path=path, extras=extras).data
|
175
171
|
if isinstance(n, str)
|
176
172
|
else n
|
177
173
|
)
|
@@ -206,7 +202,6 @@ class Workflow(BaseModel):
|
|
206
202
|
def __on_no_dup_and_reach_limit__(
|
207
203
|
cls,
|
208
204
|
value: list[Crontab],
|
209
|
-
info: ValidationInfo,
|
210
205
|
) -> list[Crontab]:
|
211
206
|
"""Validate the on fields should not contain duplicate values and if it
|
212
207
|
contains the every minute value more than one value, it will remove to
|
@@ -237,12 +232,9 @@ class Workflow(BaseModel):
|
|
237
232
|
f"{list(set_tz)}."
|
238
233
|
)
|
239
234
|
|
240
|
-
|
241
|
-
if len(set_ons) > (
|
242
|
-
conf := dynamic("max_cron_per_workflow", extras=extras)
|
243
|
-
):
|
235
|
+
if len(set_ons) > 10:
|
244
236
|
raise ValueError(
|
245
|
-
|
237
|
+
"The number of the on should not more than 10 crontabs."
|
246
238
|
)
|
247
239
|
return value
|
248
240
|
|
@@ -265,8 +257,10 @@ class Workflow(BaseModel):
|
|
265
257
|
f"{self.name!r}."
|
266
258
|
)
|
267
259
|
|
268
|
-
# NOTE:
|
269
|
-
self.jobs[job].
|
260
|
+
# NOTE: Copy the job model and set job ID to the job model.
|
261
|
+
job_model = self.jobs[job].model_copy()
|
262
|
+
job_model.id = job
|
263
|
+
self.jobs[job] = job_model
|
270
264
|
|
271
265
|
# VALIDATE: Validate workflow name should not dynamic with params
|
272
266
|
# template.
|
@@ -779,3 +773,217 @@ class Workflow(BaseModel):
|
|
779
773
|
).to_dict(),
|
780
774
|
},
|
781
775
|
)
|
776
|
+
|
777
|
+
def rerun(
|
778
|
+
self,
|
779
|
+
context: DictData,
|
780
|
+
*,
|
781
|
+
parent_run_id: Optional[str] = None,
|
782
|
+
event: Optional[Event] = None,
|
783
|
+
timeout: float = 3600,
|
784
|
+
max_job_parallel: int = 2,
|
785
|
+
) -> Result:
|
786
|
+
"""Re-Execute workflow with passing the error context data.
|
787
|
+
|
788
|
+
:param context: A context result that get the failed status.
|
789
|
+
:param parent_run_id: (Optional[str]) A parent workflow running ID.
|
790
|
+
:param event: (Event) An Event manager instance that use to cancel this
|
791
|
+
execution if it forces stopped by parent execution.
|
792
|
+
:param timeout: (float) A workflow execution time out in second unit
|
793
|
+
that use for limit time of execution and waiting job dependency.
|
794
|
+
This value does not force stop the task that still running more than
|
795
|
+
this limit time. (Default: 60 * 60 seconds)
|
796
|
+
:param max_job_parallel: (int) The maximum workers that use for job
|
797
|
+
execution in `ThreadPoolExecutor` object. (Default: 2 workers)
|
798
|
+
|
799
|
+
:rtype: Result
|
800
|
+
"""
|
801
|
+
ts: float = time.monotonic()
|
802
|
+
|
803
|
+
result: Result = Result.construct_with_rs_or_id(
|
804
|
+
parent_run_id=parent_run_id,
|
805
|
+
id_logic=self.name,
|
806
|
+
extras=self.extras,
|
807
|
+
)
|
808
|
+
if context["status"] == SUCCESS:
|
809
|
+
result.trace.info(
|
810
|
+
"[WORKFLOW]: Does not rerun because it already executed with "
|
811
|
+
"success status."
|
812
|
+
)
|
813
|
+
return result.catch(status=SUCCESS, context=context)
|
814
|
+
|
815
|
+
err = context["errors"]
|
816
|
+
result.trace.info(f"[WORKFLOW]: Previous error: {err}")
|
817
|
+
|
818
|
+
event: Event = event or Event()
|
819
|
+
max_job_parallel: int = dynamic(
|
820
|
+
"max_job_parallel", f=max_job_parallel, extras=self.extras
|
821
|
+
)
|
822
|
+
result.trace.info(
|
823
|
+
f"[WORKFLOW]: Execute: {self.name!r} ("
|
824
|
+
f"{'parallel' if max_job_parallel > 1 else 'sequential'} jobs)"
|
825
|
+
)
|
826
|
+
if not self.jobs:
|
827
|
+
result.trace.warning(f"[WORKFLOW]: {self.name!r} does not set jobs")
|
828
|
+
return result.catch(status=SUCCESS, context=context)
|
829
|
+
|
830
|
+
# NOTE: Prepare the new context for rerun process.
|
831
|
+
jobs: DictData = context.get("jobs")
|
832
|
+
new_context: DictData = {
|
833
|
+
"params": context["params"].copy(),
|
834
|
+
"jobs": {j: jobs[j] for j in jobs if jobs[j]["status"] == SUCCESS},
|
835
|
+
}
|
836
|
+
|
837
|
+
total_job: int = 0
|
838
|
+
job_queue: Queue = Queue()
|
839
|
+
for job_id in self.jobs:
|
840
|
+
|
841
|
+
if job_id in new_context["jobs"]:
|
842
|
+
continue
|
843
|
+
|
844
|
+
job_queue.put(job_id)
|
845
|
+
total_job += 1
|
846
|
+
|
847
|
+
if total_job == 0:
|
848
|
+
result.trace.warning("[WORKFLOW]: It does not have job to rerun.")
|
849
|
+
return result.catch(status=SUCCESS, context=context)
|
850
|
+
|
851
|
+
not_timeout_flag: bool = True
|
852
|
+
statuses: list[Status] = [WAIT] * total_job
|
853
|
+
skip_count: int = 0
|
854
|
+
sequence_statuses: list[Status] = []
|
855
|
+
timeout: float = dynamic(
|
856
|
+
"max_job_exec_timeout", f=timeout, extras=self.extras
|
857
|
+
)
|
858
|
+
|
859
|
+
result.catch(status=WAIT, context=new_context)
|
860
|
+
if event and event.is_set():
|
861
|
+
return result.catch(
|
862
|
+
status=CANCEL,
|
863
|
+
context={
|
864
|
+
"errors": WorkflowCancelError(
|
865
|
+
"Execution was canceled from the event was set before "
|
866
|
+
"workflow execution."
|
867
|
+
).to_dict(),
|
868
|
+
},
|
869
|
+
)
|
870
|
+
|
871
|
+
with ThreadPoolExecutor(max_job_parallel, "wf") as executor:
|
872
|
+
futures: list[Future] = []
|
873
|
+
|
874
|
+
while not job_queue.empty() and (
|
875
|
+
not_timeout_flag := ((time.monotonic() - ts) < timeout)
|
876
|
+
):
|
877
|
+
job_id: str = job_queue.get()
|
878
|
+
job: Job = self.job(name=job_id)
|
879
|
+
if (check := job.check_needs(new_context["jobs"])) == WAIT:
|
880
|
+
job_queue.task_done()
|
881
|
+
job_queue.put(job_id)
|
882
|
+
time.sleep(0.15)
|
883
|
+
continue
|
884
|
+
elif check == FAILED: # pragma: no cov
|
885
|
+
return result.catch(
|
886
|
+
status=FAILED,
|
887
|
+
context={
|
888
|
+
"status": FAILED,
|
889
|
+
"errors": WorkflowError(
|
890
|
+
f"Validate job trigger rule was failed with "
|
891
|
+
f"{job.trigger_rule.value!r}."
|
892
|
+
).to_dict(),
|
893
|
+
},
|
894
|
+
)
|
895
|
+
elif check == SKIP: # pragma: no cov
|
896
|
+
result.trace.info(
|
897
|
+
f"[JOB]: Skip job: {job_id!r} from trigger rule."
|
898
|
+
)
|
899
|
+
job.set_outputs(output={"status": SKIP}, to=new_context)
|
900
|
+
job_queue.task_done()
|
901
|
+
skip_count += 1
|
902
|
+
continue
|
903
|
+
|
904
|
+
if max_job_parallel > 1:
|
905
|
+
futures.append(
|
906
|
+
executor.submit(
|
907
|
+
self.execute_job,
|
908
|
+
job=job,
|
909
|
+
params=new_context,
|
910
|
+
result=result,
|
911
|
+
event=event,
|
912
|
+
),
|
913
|
+
)
|
914
|
+
job_queue.task_done()
|
915
|
+
continue
|
916
|
+
|
917
|
+
if len(futures) < 1:
|
918
|
+
futures.append(
|
919
|
+
executor.submit(
|
920
|
+
self.execute_job,
|
921
|
+
job=job,
|
922
|
+
params=new_context,
|
923
|
+
result=result,
|
924
|
+
event=event,
|
925
|
+
)
|
926
|
+
)
|
927
|
+
elif (future := futures.pop(0)).done():
|
928
|
+
if e := future.exception():
|
929
|
+
sequence_statuses.append(get_status_from_error(e))
|
930
|
+
else:
|
931
|
+
st, _ = future.result()
|
932
|
+
sequence_statuses.append(st)
|
933
|
+
job_queue.put(job_id)
|
934
|
+
elif future.cancelled():
|
935
|
+
sequence_statuses.append(CANCEL)
|
936
|
+
job_queue.put(job_id)
|
937
|
+
elif future.running() or "state=pending" in str(future):
|
938
|
+
futures.insert(0, future)
|
939
|
+
job_queue.put(job_id)
|
940
|
+
else: # pragma: no cov
|
941
|
+
job_queue.put(job_id)
|
942
|
+
futures.insert(0, future)
|
943
|
+
result.trace.warning(
|
944
|
+
f"[WORKFLOW]: ... Execution non-threading not "
|
945
|
+
f"handle: {future}."
|
946
|
+
)
|
947
|
+
|
948
|
+
job_queue.task_done()
|
949
|
+
|
950
|
+
if not_timeout_flag:
|
951
|
+
job_queue.join()
|
952
|
+
for total, future in enumerate(as_completed(futures), start=0):
|
953
|
+
try:
|
954
|
+
statuses[total], _ = future.result()
|
955
|
+
except WorkflowError as e:
|
956
|
+
statuses[total] = get_status_from_error(e)
|
957
|
+
|
958
|
+
# NOTE: Update skipped status from the job trigger.
|
959
|
+
for i in range(skip_count):
|
960
|
+
statuses[total + 1 + i] = SKIP
|
961
|
+
|
962
|
+
# NOTE: Update status from none-parallel job execution.
|
963
|
+
for i, s in enumerate(sequence_statuses, start=0):
|
964
|
+
statuses[total + 1 + skip_count + i] = s
|
965
|
+
|
966
|
+
return result.catch(
|
967
|
+
status=validate_statuses(statuses), context=new_context
|
968
|
+
)
|
969
|
+
|
970
|
+
event.set()
|
971
|
+
for future in futures:
|
972
|
+
future.cancel()
|
973
|
+
|
974
|
+
result.trace.error(
|
975
|
+
f"[WORKFLOW]: {self.name!r} was timeout because it use exec "
|
976
|
+
f"time more than {timeout} seconds."
|
977
|
+
)
|
978
|
+
|
979
|
+
time.sleep(0.0025)
|
980
|
+
|
981
|
+
return result.catch(
|
982
|
+
status=FAILED,
|
983
|
+
context={
|
984
|
+
"errors": WorkflowTimeoutError(
|
985
|
+
f"{self.name!r} was timeout because it use exec time more "
|
986
|
+
f"than {timeout} seconds."
|
987
|
+
).to_dict(),
|
988
|
+
},
|
989
|
+
)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.70
|
4
4
|
Summary: Lightweight workflow orchestration with YAML template
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -61,12 +61,13 @@ by a `.yaml` template.
|
|
61
61
|
|
62
62
|
---
|
63
63
|
|
64
|
-
**:pushpin: <u>Rules of This Workflow
|
64
|
+
**:pushpin: <u>Rules of This Workflow</u>**:
|
65
65
|
|
66
66
|
1. The Minimum frequency unit of built-in scheduling is **1 Minute** 🕘
|
67
67
|
2. **Can not** re-run only failed stage and its pending downstream ↩️
|
68
68
|
3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
|
69
69
|
(Python 3.13 unlock GIL 🐍🔓)
|
70
|
+
4. Recommend to pass a **Secret Value** with environment variable in YAML template 🔐
|
70
71
|
|
71
72
|
---
|
72
73
|
|
@@ -230,14 +231,17 @@ class RestAuth(BaseModel):
|
|
230
231
|
|
231
232
|
@tag("requests", alias="get-api-with-oauth-to-s3")
|
232
233
|
def get_api_with_oauth_to_s3(
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
234
|
+
method: str,
|
235
|
+
url: str,
|
236
|
+
body: dict[str, str],
|
237
|
+
auth: RestAuth,
|
238
|
+
writing_node: str,
|
239
|
+
aws: AwsCredential,
|
240
|
+
result: Result,
|
240
241
|
) -> dict[str, int]:
|
242
|
+
"""Get the data from RestAPI via Authenticate with OAuth and then store to
|
243
|
+
AWS S3 service.
|
244
|
+
"""
|
241
245
|
result.trace.info("[CALLER]: Start get data via RestAPI to S3.")
|
242
246
|
result.trace.info(f"... {method}: {url}")
|
243
247
|
if method != "post":
|
@@ -269,24 +273,22 @@ it will use default value and do not raise any error to you.
|
|
269
273
|
> The config value that you will set on the environment should combine with
|
270
274
|
> prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
|
271
275
|
|
272
|
-
| Name | Component | Default | Description
|
273
|
-
|
274
|
-
| **REGISTRY_CALLER** |
|
275
|
-
| **REGISTRY_FILTER** |
|
276
|
-
| **CONF_PATH** |
|
277
|
-
| **TIMEZONE** |
|
278
|
-
| **STAGE_DEFAULT_ID** |
|
279
|
-
| **
|
280
|
-
| **
|
281
|
-
| **
|
282
|
-
| **
|
283
|
-
| **
|
284
|
-
| **
|
285
|
-
| **
|
286
|
-
| **
|
287
|
-
| **
|
288
|
-
| **AUDIT_PATH** | Log | `./audits` | |
|
289
|
-
| **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
|
276
|
+
| Name | Component | Default | Description |
|
277
|
+
|:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------|
|
278
|
+
| **REGISTRY_CALLER** | CORE | `.` | List of importable string for the call stage. |
|
279
|
+
| **REGISTRY_FILTER** | CORE | `ddeutil.workflow.templates` | List of importable string for the filter template. |
|
280
|
+
| **CONF_PATH** | CORE | `./conf` | The config path that keep all template `.yaml` files. |
|
281
|
+
| **TIMEZONE** | CORE | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
|
282
|
+
| **STAGE_DEFAULT_ID** | CORE | `false` | A flag that enable default stage ID that use for catch an execution output. |
|
283
|
+
| **GENERATE_ID_SIMPLE_MODE** | CORE | `true` | A flog that enable generating ID with `md5` algorithm. |
|
284
|
+
| **DEBUG_MODE** | LOG | `true` | A flag that enable logging with debug level mode. |
|
285
|
+
| **FORMAT** | LOG | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | A trace message console format. |
|
286
|
+
| **FORMAT_FILE** | LOG | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | A trace message format that use to write to target pointer. |
|
287
|
+
| **DATETIME_FORMAT** | LOG | `%Y-%m-%d %H:%M:%S` | A datetime format of the trace log. |
|
288
|
+
| **TRACE_PATH** | LOG | `./logs` | A pointer of trace log that use to store. |
|
289
|
+
| **TRACE_ENABLE_WRITE** | LOG | `false` | A flag that enable writing trace log. |
|
290
|
+
| **AUDIT_PATH** | LOG | `./audits` | A pointer of audit log that use to store. |
|
291
|
+
| **AUDIT_ENABLE_WRITE** | LOG | `true` | A flag that enable writing audit log after end execution in the workflow release step. |
|
290
292
|
|
291
293
|
## :rocket: Deployment
|
292
294
|
|
@@ -0,0 +1,30 @@
|
|
1
|
+
ddeutil/workflow/__about__.py,sha256=hpI1C5z35dcwfGMtSFmLCLv6edKv0tP9m0z-1ytAu0Y,28
|
2
|
+
ddeutil/workflow/__cron.py,sha256=BOKQcreiex0SAigrK1gnLxpvOeF3aca_rQwyz9Kfve4,28751
|
3
|
+
ddeutil/workflow/__init__.py,sha256=HUy9XkBe7ttpUupJS4JDuj3aGp2QmJZfz8m2kHAIwdw,927
|
4
|
+
ddeutil/workflow/__main__.py,sha256=Qd-f8z2Q2vpiEP2x6PBFsJrpACWDVxFKQk820MhFmHo,59
|
5
|
+
ddeutil/workflow/__types.py,sha256=uNfoRbVmNK5O37UUMVnqcmoghD9oMS1q9fXC0APnjSI,4584
|
6
|
+
ddeutil/workflow/audits.py,sha256=1pg4a5wdZCAKOqMr1Z_ofzRAFsDarN1BIJenWwn9xkg,11435
|
7
|
+
ddeutil/workflow/cli.py,sha256=pFmSmkdQUIpmCbFcoyoBub2LAj3rtlFM3oToJq0C0Ac,5149
|
8
|
+
ddeutil/workflow/conf.py,sha256=KcvOlU0zzS53iK44X_T64qHSX9nr4EHGjGX31dsRiUE,13609
|
9
|
+
ddeutil/workflow/errors.py,sha256=O5rq80Sqj0QMeIsWXpRUhiFLTq0o8bwm5BQ4kuq6xmI,3013
|
10
|
+
ddeutil/workflow/event.py,sha256=e3xcECfMvH6K8Tff9cjCXIItVJjOmlonAQ0l253l6T0,11110
|
11
|
+
ddeutil/workflow/job.py,sha256=kviOQeSUsx0Z7CL0foblTulg2m_l6a3M3SMRxg9RWeg,39151
|
12
|
+
ddeutil/workflow/params.py,sha256=Pco3DyjptC5Jkx53dhLL9xlIQdJvNAZs4FLzMUfXpbQ,12402
|
13
|
+
ddeutil/workflow/result.py,sha256=ctxNSaY9tZPHEAUgvDkjWWu2APeTmlZCf1Hb0XVbbFo,8173
|
14
|
+
ddeutil/workflow/reusables.py,sha256=LSn0XTkzGHf4ulOmWub29F0JZHt0NEyzrFd4ZFx_g_k,21622
|
15
|
+
ddeutil/workflow/stages.py,sha256=kzMEMRTEuG52EOw51zyVO6LE-oiiqTIRUCk_OMcWZTM,106506
|
16
|
+
ddeutil/workflow/traces.py,sha256=DrKzxgp9FJUJd_oWYHiMtR1z8R7HeweAL4_Y8q0_pz8,25048
|
17
|
+
ddeutil/workflow/utils.py,sha256=65aMH2JKeeG7GLLoD0D5_0Cv55XV5EUd1Gn-Zz21hwo,9339
|
18
|
+
ddeutil/workflow/workflow.py,sha256=nPjNbn0UUKlUwkh7lHFRb5BSLARBOTZ0iv0TU_SMOKc,36205
|
19
|
+
ddeutil/workflow/api/__init__.py,sha256=W3fe6_NLHSUzr4Tsu79w3pmvrYjpLeP3zBk4mtpPyqg,2843
|
20
|
+
ddeutil/workflow/api/log_conf.py,sha256=WfS3udDLSyrP-C80lWOvxxmhd_XWKvQPkwDqKblcH3E,1834
|
21
|
+
ddeutil/workflow/api/routes/__init__.py,sha256=JRaJZB0D6mgR17MbZo8yLtdYDtD62AA8MdKlFqhG84M,420
|
22
|
+
ddeutil/workflow/api/routes/job.py,sha256=x809G5gCbJS257txj9eLLTbCbFK8ercXWzPDLuv5gEM,2953
|
23
|
+
ddeutil/workflow/api/routes/logs.py,sha256=HiXw93PeIiaK_xJjM8lbD2ED1Il-W1iM51085nc7qmg,5286
|
24
|
+
ddeutil/workflow/api/routes/workflows.py,sha256=D76cdLb2_9Dkfe2_8xt06CvPhAyJMqxyYkUgAs8Qlnw,4402
|
25
|
+
ddeutil_workflow-0.0.70.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
26
|
+
ddeutil_workflow-0.0.70.dist-info/METADATA,sha256=EddcIOkUz36vyIOG4GEOnBt2Zy_K8qI7_Gld5KTbeLs,15207
|
27
|
+
ddeutil_workflow-0.0.70.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
28
|
+
ddeutil_workflow-0.0.70.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
|
29
|
+
ddeutil_workflow-0.0.70.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
30
|
+
ddeutil_workflow-0.0.70.dist-info/RECORD,,
|
@@ -1,29 +0,0 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=sX7SVH5YIxA4EqrZ30bmKtzuaxvEoxEAs2Bc7wFHXgY,28
|
2
|
-
ddeutil/workflow/__cron.py,sha256=BOKQcreiex0SAigrK1gnLxpvOeF3aca_rQwyz9Kfve4,28751
|
3
|
-
ddeutil/workflow/__init__.py,sha256=JfFZlPRDgR2J0rb0SRejt1OSrOrD3GGv9Um14z8MMfs,901
|
4
|
-
ddeutil/workflow/__main__.py,sha256=Qd-f8z2Q2vpiEP2x6PBFsJrpACWDVxFKQk820MhFmHo,59
|
5
|
-
ddeutil/workflow/__types.py,sha256=uNfoRbVmNK5O37UUMVnqcmoghD9oMS1q9fXC0APnjSI,4584
|
6
|
-
ddeutil/workflow/cli.py,sha256=6cxKS3U9oyGTbCI--4x3aiv536PJ5k2KyLRjAQHE6qA,3136
|
7
|
-
ddeutil/workflow/conf.py,sha256=p5QLqRo67nivoHDEj7Rs5P5-qdBvtDa3c9Zaylug6p0,14768
|
8
|
-
ddeutil/workflow/errors.py,sha256=4DaKnyUm8RrUyQA5qakgW0ycSQLO7j-owyoh79LWQ5c,2893
|
9
|
-
ddeutil/workflow/event.py,sha256=PMGXu0wH9ld-nq670QsGcpHBCNr0Yu5Y3hA_EAPIKyo,11104
|
10
|
-
ddeutil/workflow/job.py,sha256=qcbKSOa39256nfJHL0vKJsHrelcRujX5KET2IEGS8dw,38995
|
11
|
-
ddeutil/workflow/logs.py,sha256=4jufDRXTgtU9GZ1fVXL37tnihupD69YFSkD-mNQTr_g,31657
|
12
|
-
ddeutil/workflow/params.py,sha256=Pco3DyjptC5Jkx53dhLL9xlIQdJvNAZs4FLzMUfXpbQ,12402
|
13
|
-
ddeutil/workflow/result.py,sha256=GU84psZFiJ4LRf_HXgz-R98YN4lOUkER0VR7x9DDdOU,7922
|
14
|
-
ddeutil/workflow/reusables.py,sha256=jPrOCbxagqRvRFGXJzIyDa1wKV5AZ4crZyJ10cldQP0,21620
|
15
|
-
ddeutil/workflow/stages.py,sha256=kzMEMRTEuG52EOw51zyVO6LE-oiiqTIRUCk_OMcWZTM,106506
|
16
|
-
ddeutil/workflow/utils.py,sha256=oKrhB-HOogeaO9RGXbe2vAs30A3rMMQxUd2B5pOw8zg,9131
|
17
|
-
ddeutil/workflow/workflow.py,sha256=AcSGqsH1N4LqWhYIcCPy9CoV_AGlXUrBgjpl-gniv6g,28267
|
18
|
-
ddeutil/workflow/api/__init__.py,sha256=W3fe6_NLHSUzr4Tsu79w3pmvrYjpLeP3zBk4mtpPyqg,2843
|
19
|
-
ddeutil/workflow/api/log_conf.py,sha256=P1_a5kjB0dWjSyJvmeKUU8KZdaiNd3UELoL42SiKmzU,2269
|
20
|
-
ddeutil/workflow/api/routes/__init__.py,sha256=JRaJZB0D6mgR17MbZo8yLtdYDtD62AA8MdKlFqhG84M,420
|
21
|
-
ddeutil/workflow/api/routes/job.py,sha256=x809G5gCbJS257txj9eLLTbCbFK8ercXWzPDLuv5gEM,2953
|
22
|
-
ddeutil/workflow/api/routes/logs.py,sha256=ElfXNJmwpeR_nE99RcCOUm2BRfQhly6RE9kZ7xVC530,5284
|
23
|
-
ddeutil/workflow/api/routes/workflows.py,sha256=08p2r7xoKrW1tUMKkGCaffBckdV1VvaF3OKCjFygJdE,4392
|
24
|
-
ddeutil_workflow-0.0.68.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
25
|
-
ddeutil_workflow-0.0.68.dist-info/METADATA,sha256=o3fWmIKifoP9HHQEKiELp10WI2XEUvfLCn5hFw4Ol1M,16072
|
26
|
-
ddeutil_workflow-0.0.68.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
27
|
-
ddeutil_workflow-0.0.68.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
|
28
|
-
ddeutil_workflow-0.0.68.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
29
|
-
ddeutil_workflow-0.0.68.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|