ddeutil-workflow 0.0.40__py3-none-any.whl → 0.0.42__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +21 -25
- ddeutil/workflow/api/api.py +8 -8
- ddeutil/workflow/api/routes/logs.py +1 -2
- ddeutil/workflow/api/routes/schedules.py +5 -5
- ddeutil/workflow/api/routes/workflows.py +3 -3
- ddeutil/workflow/conf.py +50 -29
- ddeutil/workflow/cron.py +12 -13
- ddeutil/workflow/job.py +68 -9
- ddeutil/workflow/logs.py +358 -69
- ddeutil/workflow/params.py +1 -0
- ddeutil/workflow/result.py +6 -15
- ddeutil/workflow/{templates.py → reusables.py} +199 -10
- ddeutil/workflow/scheduler.py +27 -29
- ddeutil/workflow/stages.py +423 -64
- ddeutil/workflow/utils.py +10 -0
- ddeutil/workflow/workflow.py +119 -74
- {ddeutil_workflow-0.0.40.dist-info → ddeutil_workflow-0.0.42.dist-info}/METADATA +12 -9
- ddeutil_workflow-0.0.42.dist-info/RECORD +30 -0
- ddeutil/workflow/audit.py +0 -257
- ddeutil/workflow/caller.py +0 -179
- ddeutil/workflow/context.py +0 -61
- ddeutil_workflow-0.0.40.dist-info/RECORD +0 -33
- {ddeutil_workflow-0.0.40.dist-info → ddeutil_workflow-0.0.42.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.40.dist-info → ddeutil_workflow-0.0.42.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.40.dist-info → ddeutil_workflow-0.0.42.dist-info}/top_level.txt +0 -0
ddeutil/workflow/logs.py
CHANGED
@@ -3,7 +3,8 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
|
6
|
+
# [x] Use fix config
|
7
|
+
"""A Logs module contain TraceLog dataclass and AuditLog model.
|
7
8
|
"""
|
8
9
|
from __future__ import annotations
|
9
10
|
|
@@ -19,9 +20,10 @@ from typing import ClassVar, Literal, Optional, Union
|
|
19
20
|
|
20
21
|
from pydantic import BaseModel, Field
|
21
22
|
from pydantic.dataclasses import dataclass
|
23
|
+
from pydantic.functional_validators import model_validator
|
22
24
|
from typing_extensions import Self
|
23
25
|
|
24
|
-
from .__types import DictStr, TupleStr
|
26
|
+
from .__types import DictData, DictStr, TupleStr
|
25
27
|
from .conf import config, get_logger
|
26
28
|
from .utils import cut_id, get_dt_now
|
27
29
|
|
@@ -36,6 +38,10 @@ __all__: TupleStr = (
|
|
36
38
|
"get_dt_tznow",
|
37
39
|
"get_trace",
|
38
40
|
"get_trace_obj",
|
41
|
+
"get_audit",
|
42
|
+
"FileAudit",
|
43
|
+
"SQLiteAudit",
|
44
|
+
"Audit",
|
39
45
|
)
|
40
46
|
|
41
47
|
|
@@ -47,6 +53,66 @@ def get_dt_tznow() -> datetime: # pragma: no cov
|
|
47
53
|
return get_dt_now(tz=config.tz)
|
48
54
|
|
49
55
|
|
56
|
+
class TraceMeda(BaseModel): # pragma: no cov
|
57
|
+
mode: Literal["stdout", "stderr"]
|
58
|
+
datetime: str
|
59
|
+
process: int
|
60
|
+
thread: int
|
61
|
+
message: str
|
62
|
+
filename: str
|
63
|
+
lineno: int
|
64
|
+
|
65
|
+
@classmethod
|
66
|
+
def make(cls, mode: Literal["stdout", "stderr"], message: str) -> Self:
|
67
|
+
"""Make a TraceMeda instance."""
|
68
|
+
frame_info: Traceback = getframeinfo(
|
69
|
+
currentframe().f_back.f_back.f_back
|
70
|
+
)
|
71
|
+
return cls(
|
72
|
+
mode=mode,
|
73
|
+
datetime=get_dt_tznow().strftime(config.log_datetime_format),
|
74
|
+
process=os.getpid(),
|
75
|
+
thread=get_ident(),
|
76
|
+
message=message,
|
77
|
+
filename=frame_info.filename.split(os.path.sep)[-1],
|
78
|
+
lineno=frame_info.lineno,
|
79
|
+
)
|
80
|
+
|
81
|
+
|
82
|
+
class TraceData(BaseModel): # pragma: no cov
|
83
|
+
stdout: str = Field(description="A standard output trace data.")
|
84
|
+
stderr: str = Field(description="A standard error trace data.")
|
85
|
+
meta: list[TraceMeda] = Field(
|
86
|
+
default_factory=list,
|
87
|
+
description=(
|
88
|
+
"A metadata mapping of this output and error before making it to "
|
89
|
+
"standard value."
|
90
|
+
),
|
91
|
+
)
|
92
|
+
|
93
|
+
@classmethod
|
94
|
+
def from_path(cls, file: Path) -> Self:
|
95
|
+
data: DictStr = {"stdout": "", "stderr": "", "meta": []}
|
96
|
+
|
97
|
+
if (file / "stdout.txt").exists():
|
98
|
+
data["stdout"] = (file / "stdout.txt").read_text(encoding="utf-8")
|
99
|
+
|
100
|
+
if (file / "stderr.txt").exists():
|
101
|
+
data["stderr"] = (file / "stderr.txt").read_text(encoding="utf-8")
|
102
|
+
|
103
|
+
if (file / "metadata.json").exists():
|
104
|
+
data["meta"] = [
|
105
|
+
json.loads(line)
|
106
|
+
for line in (
|
107
|
+
(file / "metadata.json")
|
108
|
+
.read_text(encoding="utf-8")
|
109
|
+
.splitlines()
|
110
|
+
)
|
111
|
+
]
|
112
|
+
|
113
|
+
return cls.model_validate(data)
|
114
|
+
|
115
|
+
|
50
116
|
@dataclass(frozen=True)
|
51
117
|
class BaseTraceLog(ABC): # pragma: no cov
|
52
118
|
"""Base Trace Log dataclass object."""
|
@@ -67,6 +133,14 @@ class BaseTraceLog(ABC): # pragma: no cov
|
|
67
133
|
"Create writer logic for this trace object before using."
|
68
134
|
)
|
69
135
|
|
136
|
+
@abstractmethod
|
137
|
+
async def awriter(self, message: str, is_err: bool = False) -> None:
|
138
|
+
"""Async Write a trace message after making to target pointer object.
|
139
|
+
|
140
|
+
:param message:
|
141
|
+
:param is_err:
|
142
|
+
"""
|
143
|
+
|
70
144
|
@abstractmethod
|
71
145
|
def make_message(self, message: str) -> str:
|
72
146
|
"""Prepare and Make a message before write and log processes.
|
@@ -87,7 +161,6 @@ class BaseTraceLog(ABC): # pragma: no cov
|
|
87
161
|
"""
|
88
162
|
msg: str = self.make_message(message)
|
89
163
|
|
90
|
-
# NOTE: Write file if debug mode was enabled.
|
91
164
|
if config.debug:
|
92
165
|
self.writer(msg)
|
93
166
|
|
@@ -133,49 +206,31 @@ class BaseTraceLog(ABC): # pragma: no cov
|
|
133
206
|
self.writer(msg, is_err=True)
|
134
207
|
logger.exception(msg, stacklevel=2)
|
135
208
|
|
209
|
+
async def adebug(self, message: str) -> None: # pragma: no cov
|
210
|
+
msg: str = self.make_message(message)
|
211
|
+
if config.debug:
|
212
|
+
await self.awriter(msg)
|
213
|
+
logger.info(msg, stacklevel=2)
|
136
214
|
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
thread: int
|
142
|
-
message: str
|
143
|
-
filename: str
|
144
|
-
lineno: int
|
145
|
-
|
146
|
-
|
147
|
-
class TraceData(BaseModel): # pragma: no cov
|
148
|
-
stdout: str = Field(description="A standard output trace data.")
|
149
|
-
stderr: str = Field(description="A standard error trace data.")
|
150
|
-
meta: list[TraceMeda] = Field(
|
151
|
-
default_factory=list,
|
152
|
-
description=(
|
153
|
-
"A metadata mapping of this output and error before making it to "
|
154
|
-
"standard value."
|
155
|
-
),
|
156
|
-
)
|
157
|
-
|
158
|
-
@classmethod
|
159
|
-
def from_path(cls, file: Path) -> Self:
|
160
|
-
data: DictStr = {"stdout": "", "stderr": "", "meta": []}
|
161
|
-
|
162
|
-
if (file / "stdout.txt").exists():
|
163
|
-
data["stdout"] = (file / "stdout.txt").read_text(encoding="utf-8")
|
215
|
+
async def ainfo(self, message: str) -> None: # pragma: no cov
|
216
|
+
msg: str = self.make_message(message)
|
217
|
+
await self.awriter(msg)
|
218
|
+
logger.info(msg, stacklevel=2)
|
164
219
|
|
165
|
-
|
166
|
-
|
220
|
+
async def awarning(self, message: str) -> None: # pragma: no cov
|
221
|
+
msg: str = self.make_message(message)
|
222
|
+
await self.awriter(msg)
|
223
|
+
logger.warning(msg, stacklevel=2)
|
167
224
|
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
(file / "metadata.json")
|
173
|
-
.read_text(encoding="utf-8")
|
174
|
-
.splitlines()
|
175
|
-
)
|
176
|
-
]
|
225
|
+
async def aerror(self, message: str) -> None: # pragma: no cov
|
226
|
+
msg: str = self.make_message(message)
|
227
|
+
await self.awriter(msg, is_err=True)
|
228
|
+
logger.error(msg, stacklevel=2)
|
177
229
|
|
178
|
-
|
230
|
+
async def aexception(self, message: str) -> None: # pragma: no cov
|
231
|
+
msg: str = self.make_message(message)
|
232
|
+
await self.awriter(msg, is_err=True)
|
233
|
+
logger.exception(msg, stacklevel=2)
|
179
234
|
|
180
235
|
|
181
236
|
class FileTraceLog(BaseTraceLog): # pragma: no cov
|
@@ -227,7 +282,7 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
|
|
227
282
|
def make_message(self, message: str) -> str:
|
228
283
|
"""Prepare and Make a message before write and log processes.
|
229
284
|
|
230
|
-
:param message: A message that want to prepare and make before.
|
285
|
+
:param message: (str) A message that want to prepare and make before.
|
231
286
|
|
232
287
|
:rtype: str
|
233
288
|
"""
|
@@ -249,40 +304,46 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
|
|
249
304
|
if not config.enable_write_log:
|
250
305
|
return
|
251
306
|
|
252
|
-
|
253
|
-
|
254
|
-
lineno: int = frame_info.lineno
|
255
|
-
|
256
|
-
# NOTE: set process and thread IDs.
|
257
|
-
process: int = os.getpid()
|
258
|
-
thread: int = get_ident()
|
259
|
-
|
260
|
-
write_file: str = "stderr.txt" if is_err else "stdout.txt"
|
261
|
-
write_data: dict[str, Union[str, int]] = {
|
262
|
-
"datetime": get_dt_tznow().strftime(config.log_datetime_format),
|
263
|
-
"process": process,
|
264
|
-
"thread": thread,
|
265
|
-
"message": message,
|
266
|
-
"filename": filename,
|
267
|
-
"lineno": lineno,
|
268
|
-
}
|
269
|
-
|
270
|
-
with (self.pointer / write_file).open(mode="at", encoding="utf-8") as f:
|
271
|
-
msg_fmt: str = f"{config.log_format_file}\n"
|
272
|
-
f.write(msg_fmt.format(**write_data))
|
307
|
+
write_file: str = "stderr" if is_err else "stdout"
|
308
|
+
trace_meta: TraceMeda = TraceMeda.make(mode=write_file, message=message)
|
273
309
|
|
274
|
-
with (self.pointer / "
|
310
|
+
with (self.pointer / f"{write_file}.txt").open(
|
275
311
|
mode="at", encoding="utf-8"
|
276
312
|
) as f:
|
277
313
|
f.write(
|
278
|
-
|
279
|
-
+ "\n"
|
314
|
+
f"{config.log_format_file}\n".format(**trace_meta.model_dump())
|
280
315
|
)
|
281
316
|
|
317
|
+
with (self.pointer / "metadata.json").open(
|
318
|
+
mode="at", encoding="utf-8"
|
319
|
+
) as f:
|
320
|
+
f.write(trace_meta.model_dump_json() + "\n")
|
321
|
+
|
282
322
|
async def awriter(
|
283
323
|
self, message: str, is_err: bool = False
|
284
|
-
): # pragma: no cov
|
285
|
-
|
324
|
+
) -> None: # pragma: no cov
|
325
|
+
if not config.enable_write_log:
|
326
|
+
return
|
327
|
+
|
328
|
+
try:
|
329
|
+
import aiofiles
|
330
|
+
except ImportError as e:
|
331
|
+
raise ImportError("Async mode need aiofiles package") from e
|
332
|
+
|
333
|
+
write_file: str = "stderr" if is_err else "stdout"
|
334
|
+
trace_meta: TraceMeda = TraceMeda.make(mode=write_file, message=message)
|
335
|
+
|
336
|
+
async with aiofiles.open(
|
337
|
+
self.pointer / f"{write_file}.txt", mode="at", encoding="utf-8"
|
338
|
+
) as f:
|
339
|
+
await f.write(
|
340
|
+
f"{config.log_format_file}\n".format(**trace_meta.model_dump())
|
341
|
+
)
|
342
|
+
|
343
|
+
async with aiofiles.open(
|
344
|
+
self.pointer / "metadata.json", mode="at", encoding="utf-8"
|
345
|
+
) as f:
|
346
|
+
await f.write(trace_meta.model_dump_json() + "\n")
|
286
347
|
|
287
348
|
|
288
349
|
class SQLiteTraceLog(BaseTraceLog): # pragma: no cov
|
@@ -309,6 +370,8 @@ class SQLiteTraceLog(BaseTraceLog): # pragma: no cov
|
|
309
370
|
|
310
371
|
def writer(self, message: str, is_err: bool = False) -> None: ...
|
311
372
|
|
373
|
+
def awriter(self, message: str, is_err: bool = False) -> None: ...
|
374
|
+
|
312
375
|
|
313
376
|
TraceLog = Union[
|
314
377
|
FileTraceLog,
|
@@ -329,3 +392,229 @@ def get_trace_obj() -> type[TraceLog]: # pragma: no cov
|
|
329
392
|
if config.log_path.is_file():
|
330
393
|
return SQLiteTraceLog
|
331
394
|
return FileTraceLog
|
395
|
+
|
396
|
+
|
397
|
+
class BaseAudit(BaseModel, ABC):
|
398
|
+
"""Base Audit Pydantic Model with abstraction class property that implement
|
399
|
+
only model fields. This model should to use with inherit to logging
|
400
|
+
subclass like file, sqlite, etc.
|
401
|
+
"""
|
402
|
+
|
403
|
+
name: str = Field(description="A workflow name.")
|
404
|
+
release: datetime = Field(description="A release datetime.")
|
405
|
+
type: str = Field(description="A running type before logging.")
|
406
|
+
context: DictData = Field(
|
407
|
+
default_factory=dict,
|
408
|
+
description="A context that receive from a workflow execution result.",
|
409
|
+
)
|
410
|
+
parent_run_id: Optional[str] = Field(
|
411
|
+
default=None, description="A parent running ID."
|
412
|
+
)
|
413
|
+
run_id: str = Field(description="A running ID")
|
414
|
+
update: datetime = Field(default_factory=get_dt_tznow)
|
415
|
+
execution_time: float = Field(default=0, description="An execution time.")
|
416
|
+
|
417
|
+
@model_validator(mode="after")
|
418
|
+
def __model_action(self) -> Self:
|
419
|
+
"""Do before the Audit action with WORKFLOW_AUDIT_ENABLE_WRITE env variable.
|
420
|
+
|
421
|
+
:rtype: Self
|
422
|
+
"""
|
423
|
+
if config.enable_write_audit:
|
424
|
+
self.do_before()
|
425
|
+
return self
|
426
|
+
|
427
|
+
def do_before(self) -> None: # pragma: no cov
|
428
|
+
"""To something before end up of initial log model."""
|
429
|
+
|
430
|
+
@abstractmethod
|
431
|
+
def save(self, excluded: list[str] | None) -> None: # pragma: no cov
|
432
|
+
"""Save this model logging to target logging store."""
|
433
|
+
raise NotImplementedError("Audit should implement ``save`` method.")
|
434
|
+
|
435
|
+
|
436
|
+
class FileAudit(BaseAudit):
|
437
|
+
"""File Audit Pydantic Model that use to saving log data from result of
|
438
|
+
workflow execution. It inherits from BaseAudit model that implement the
|
439
|
+
``self.save`` method for file.
|
440
|
+
"""
|
441
|
+
|
442
|
+
filename_fmt: ClassVar[str] = (
|
443
|
+
"workflow={name}/release={release:%Y%m%d%H%M%S}"
|
444
|
+
)
|
445
|
+
|
446
|
+
def do_before(self) -> None:
|
447
|
+
"""Create directory of release before saving log file."""
|
448
|
+
self.pointer().mkdir(parents=True, exist_ok=True)
|
449
|
+
|
450
|
+
@classmethod
|
451
|
+
def find_audits(cls, name: str) -> Iterator[Self]:
|
452
|
+
"""Generate the audit data that found from logs path with specific a
|
453
|
+
workflow name.
|
454
|
+
|
455
|
+
:param name: A workflow name that want to search release logging data.
|
456
|
+
|
457
|
+
:rtype: Iterator[Self]
|
458
|
+
"""
|
459
|
+
pointer: Path = config.audit_path / f"workflow={name}"
|
460
|
+
if not pointer.exists():
|
461
|
+
raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
|
462
|
+
|
463
|
+
for file in pointer.glob("./release=*/*.log"):
|
464
|
+
with file.open(mode="r", encoding="utf-8") as f:
|
465
|
+
yield cls.model_validate(obj=json.load(f))
|
466
|
+
|
467
|
+
@classmethod
|
468
|
+
def find_audit_with_release(
|
469
|
+
cls,
|
470
|
+
name: str,
|
471
|
+
release: datetime | None = None,
|
472
|
+
) -> Self:
|
473
|
+
"""Return the audit data that found from logs path with specific
|
474
|
+
workflow name and release values. If a release does not pass to an input
|
475
|
+
argument, it will return the latest release from the current log path.
|
476
|
+
|
477
|
+
:param name: A workflow name that want to search log.
|
478
|
+
:param release: A release datetime that want to search log.
|
479
|
+
|
480
|
+
:raise FileNotFoundError:
|
481
|
+
:raise NotImplementedError: If an input release does not pass to this
|
482
|
+
method. Because this method does not implement latest log.
|
483
|
+
|
484
|
+
:rtype: Self
|
485
|
+
"""
|
486
|
+
if release is None:
|
487
|
+
raise NotImplementedError("Find latest log does not implement yet.")
|
488
|
+
|
489
|
+
pointer: Path = (
|
490
|
+
config.audit_path
|
491
|
+
/ f"workflow={name}/release={release:%Y%m%d%H%M%S}"
|
492
|
+
)
|
493
|
+
if not pointer.exists():
|
494
|
+
raise FileNotFoundError(
|
495
|
+
f"Pointer: ./logs/workflow={name}/"
|
496
|
+
f"release={release:%Y%m%d%H%M%S} does not found."
|
497
|
+
)
|
498
|
+
|
499
|
+
latest_file: Path = max(pointer.glob("./*.log"), key=os.path.getctime)
|
500
|
+
with latest_file.open(mode="r", encoding="utf-8") as f:
|
501
|
+
return cls.model_validate(obj=json.load(f))
|
502
|
+
|
503
|
+
@classmethod
|
504
|
+
def is_pointed(cls, name: str, release: datetime) -> bool:
|
505
|
+
"""Check the release log already pointed or created at the destination
|
506
|
+
log path.
|
507
|
+
|
508
|
+
:param name: A workflow name.
|
509
|
+
:param release: A release datetime.
|
510
|
+
|
511
|
+
:rtype: bool
|
512
|
+
:return: Return False if the release log was not pointed or created.
|
513
|
+
"""
|
514
|
+
# NOTE: Return False if enable writing log flag does not set.
|
515
|
+
if not config.enable_write_audit:
|
516
|
+
return False
|
517
|
+
|
518
|
+
# NOTE: create pointer path that use the same logic of pointer method.
|
519
|
+
pointer: Path = config.audit_path / cls.filename_fmt.format(
|
520
|
+
name=name, release=release
|
521
|
+
)
|
522
|
+
|
523
|
+
return pointer.exists()
|
524
|
+
|
525
|
+
def pointer(self) -> Path:
|
526
|
+
"""Return release directory path that was generated from model data.
|
527
|
+
|
528
|
+
:rtype: Path
|
529
|
+
"""
|
530
|
+
return config.audit_path / self.filename_fmt.format(
|
531
|
+
name=self.name, release=self.release
|
532
|
+
)
|
533
|
+
|
534
|
+
def save(self, excluded: list[str] | None) -> Self:
|
535
|
+
"""Save logging data that receive a context data from a workflow
|
536
|
+
execution result.
|
537
|
+
|
538
|
+
:param excluded: An excluded list of key name that want to pass in the
|
539
|
+
model_dump method.
|
540
|
+
|
541
|
+
:rtype: Self
|
542
|
+
"""
|
543
|
+
trace: TraceLog = get_trace(self.run_id, self.parent_run_id)
|
544
|
+
|
545
|
+
# NOTE: Check environ variable was set for real writing.
|
546
|
+
if not config.enable_write_audit:
|
547
|
+
trace.debug("[LOG]: Skip writing log cause config was set")
|
548
|
+
return self
|
549
|
+
|
550
|
+
log_file: Path = (
|
551
|
+
self.pointer() / f"{self.parent_run_id or self.run_id}.log"
|
552
|
+
)
|
553
|
+
log_file.write_text(
|
554
|
+
json.dumps(
|
555
|
+
self.model_dump(exclude=excluded),
|
556
|
+
default=str,
|
557
|
+
indent=2,
|
558
|
+
),
|
559
|
+
encoding="utf-8",
|
560
|
+
)
|
561
|
+
return self
|
562
|
+
|
563
|
+
|
564
|
+
class SQLiteAudit(BaseAudit): # pragma: no cov
|
565
|
+
"""SQLite Audit Pydantic Model."""
|
566
|
+
|
567
|
+
table_name: ClassVar[str] = "audits"
|
568
|
+
schemas: ClassVar[
|
569
|
+
str
|
570
|
+
] = """
|
571
|
+
workflow str,
|
572
|
+
release int,
|
573
|
+
type str,
|
574
|
+
context json,
|
575
|
+
parent_run_id int,
|
576
|
+
run_id int,
|
577
|
+
update datetime
|
578
|
+
primary key ( run_id )
|
579
|
+
"""
|
580
|
+
|
581
|
+
def save(self, excluded: list[str] | None) -> SQLiteAudit:
|
582
|
+
"""Save logging data that receive a context data from a workflow
|
583
|
+
execution result.
|
584
|
+
"""
|
585
|
+
trace: TraceLog = get_trace(self.run_id, self.parent_run_id)
|
586
|
+
|
587
|
+
# NOTE: Check environ variable was set for real writing.
|
588
|
+
if not config.enable_write_audit:
|
589
|
+
trace.debug("[LOG]: Skip writing log cause config was set")
|
590
|
+
return self
|
591
|
+
|
592
|
+
raise NotImplementedError("SQLiteAudit does not implement yet.")
|
593
|
+
|
594
|
+
|
595
|
+
class RemoteFileAudit(FileAudit): # pragma: no cov
|
596
|
+
"""Remote File Audit Pydantic Model."""
|
597
|
+
|
598
|
+
def save(self, excluded: list[str] | None) -> RemoteFileAudit: ...
|
599
|
+
|
600
|
+
|
601
|
+
class RedisAudit(BaseAudit): # pragma: no cov
|
602
|
+
"""Redis Audit Pydantic Model."""
|
603
|
+
|
604
|
+
def save(self, excluded: list[str] | None) -> RedisAudit: ...
|
605
|
+
|
606
|
+
|
607
|
+
Audit = Union[
|
608
|
+
FileAudit,
|
609
|
+
SQLiteAudit,
|
610
|
+
]
|
611
|
+
|
612
|
+
|
613
|
+
def get_audit() -> type[Audit]: # pragma: no cov
|
614
|
+
"""Get an audit class that dynamic base on the config audit path value.
|
615
|
+
|
616
|
+
:rtype: type[Audit]
|
617
|
+
"""
|
618
|
+
if config.audit_path.is_file():
|
619
|
+
return SQLiteAudit
|
620
|
+
return FileAudit
|
ddeutil/workflow/params.py
CHANGED
@@ -3,6 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
# [ ] Use config
|
6
7
|
"""This module include all Param Pydantic Models that use for parsing an
|
7
8
|
incoming parameters that was passed to the Workflow and Schedule objects before
|
8
9
|
execution or release methods.
|
ddeutil/workflow/result.py
CHANGED
@@ -11,7 +11,6 @@ from __future__ import annotations
|
|
11
11
|
from dataclasses import field
|
12
12
|
from datetime import datetime
|
13
13
|
from enum import IntEnum
|
14
|
-
from threading import Event
|
15
14
|
from typing import Optional
|
16
15
|
|
17
16
|
from pydantic import ConfigDict
|
@@ -21,30 +20,21 @@ from typing_extensions import Self
|
|
21
20
|
|
22
21
|
from .__types import DictData, TupleStr
|
23
22
|
from .logs import TraceLog, get_dt_tznow, get_trace
|
24
|
-
from .utils import gen_id
|
23
|
+
from .utils import default_gen_id, gen_id
|
25
24
|
|
26
25
|
__all__: TupleStr = (
|
27
26
|
"Result",
|
28
27
|
"Status",
|
29
|
-
"default_gen_id",
|
30
28
|
)
|
31
29
|
|
32
30
|
|
33
|
-
def default_gen_id() -> str:
|
34
|
-
"""Return running ID which use for making default ID for the Result model if
|
35
|
-
a run_id field initializes at the first time.
|
36
|
-
|
37
|
-
:rtype: str
|
38
|
-
"""
|
39
|
-
return gen_id("manual", unique=True)
|
40
|
-
|
41
|
-
|
42
31
|
class Status(IntEnum):
|
43
32
|
"""Status Int Enum object."""
|
44
33
|
|
45
34
|
SUCCESS: int = 0
|
46
35
|
FAILED: int = 1
|
47
36
|
WAIT: int = 2
|
37
|
+
SKIP: int = 3
|
48
38
|
|
49
39
|
|
50
40
|
@dataclass(
|
@@ -65,7 +55,6 @@ class Result:
|
|
65
55
|
parent_run_id: Optional[str] = field(default=None, compare=False)
|
66
56
|
ts: datetime = field(default_factory=get_dt_tznow, compare=False)
|
67
57
|
|
68
|
-
event: Event = field(default_factory=Event, compare=False, repr=False)
|
69
58
|
trace: Optional[TraceLog] = field(default=None, compare=False, repr=False)
|
70
59
|
|
71
60
|
@classmethod
|
@@ -90,10 +79,12 @@ class Result:
|
|
90
79
|
|
91
80
|
@model_validator(mode="after")
|
92
81
|
def __prepare_trace(self) -> Self:
|
93
|
-
"""Prepare trace field that want to pass after its initialize step.
|
82
|
+
"""Prepare trace field that want to pass after its initialize step.
|
83
|
+
|
84
|
+
:rtype: Self
|
85
|
+
"""
|
94
86
|
if self.trace is None: # pragma: no cov
|
95
87
|
self.trace: TraceLog = get_trace(self.run_id, self.parent_run_id)
|
96
|
-
|
97
88
|
return self
|
98
89
|
|
99
90
|
def set_parent_run_id(self, running_id: str) -> Self:
|