ddeutil-workflow 0.0.58__py3-none-any.whl → 0.0.59__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__cron.py +3 -3
- ddeutil/workflow/conf.py +6 -4
- ddeutil/workflow/event.py +2 -2
- ddeutil/workflow/exceptions.py +3 -3
- ddeutil/workflow/job.py +35 -34
- ddeutil/workflow/logs.py +78 -51
- ddeutil/workflow/params.py +9 -5
- ddeutil/workflow/result.py +18 -18
- ddeutil/workflow/reusables.py +9 -9
- ddeutil/workflow/scheduler.py +8 -8
- ddeutil/workflow/stages.py +70 -70
- ddeutil/workflow/utils.py +6 -6
- ddeutil/workflow/workflow.py +31 -31
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.59.dist-info}/METADATA +6 -3
- ddeutil_workflow-0.0.59.dist-info/RECORD +31 -0
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.59.dist-info}/WHEEL +1 -1
- ddeutil_workflow-0.0.58.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.59.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.59.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.58.dist-info → ddeutil_workflow-0.0.59.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.59"
|
ddeutil/workflow/__cron.py
CHANGED
@@ -699,9 +699,9 @@ class CronJob:
|
|
699
699
|
|
700
700
|
def schedule(
|
701
701
|
self,
|
702
|
-
date: datetime
|
702
|
+
date: Optional[datetime] = None,
|
703
703
|
*,
|
704
|
-
tz: str
|
704
|
+
tz: Optional[str] = None,
|
705
705
|
) -> CronRunner:
|
706
706
|
"""Returns CronRunner instance that be datetime runner with this
|
707
707
|
cronjob. It can use `next`, `prev`, or `reset` methods to generate
|
@@ -766,7 +766,7 @@ class CronRunner:
|
|
766
766
|
def __init__(
|
767
767
|
self,
|
768
768
|
cron: CronJob | CronJobYear,
|
769
|
-
date: datetime
|
769
|
+
date: Optional[datetime] = None,
|
770
770
|
*,
|
771
771
|
tz: str | ZoneInfo | None = None,
|
772
772
|
) -> None:
|
ddeutil/workflow/conf.py
CHANGED
@@ -5,6 +5,7 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
+
import copy
|
8
9
|
import json
|
9
10
|
import os
|
10
11
|
from abc import ABC, abstractmethod
|
@@ -26,13 +27,13 @@ T = TypeVar("T")
|
|
26
27
|
PREFIX: Final[str] = "WORKFLOW"
|
27
28
|
|
28
29
|
|
29
|
-
def env(var: str, default: str
|
30
|
+
def env(var: str, default: Optional[str] = None) -> Optional[str]:
|
30
31
|
"""Get environment variable with uppercase and adding prefix string.
|
31
32
|
|
32
33
|
:param var: (str) A env variable name.
|
33
|
-
:param default: (str
|
34
|
+
:param default: (Optional[str]) A default value if an env var does not set.
|
34
35
|
|
35
|
-
:rtype: str
|
36
|
+
:rtype: Optional[str]
|
36
37
|
"""
|
37
38
|
return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
|
38
39
|
|
@@ -298,6 +299,7 @@ class FileLoad(BaseLoad):
|
|
298
299
|
f"Multi-config paths does not support for type: {type(paths)}"
|
299
300
|
)
|
300
301
|
else:
|
302
|
+
paths: list[Path] = copy.deepcopy(paths)
|
301
303
|
paths.append(path)
|
302
304
|
|
303
305
|
all_data: list[tuple[float, DictData]] = []
|
@@ -398,7 +400,7 @@ class FileLoad(BaseLoad):
|
|
398
400
|
return is_ignored(file, read_ignore(path / ignore_filename))
|
399
401
|
|
400
402
|
@classmethod
|
401
|
-
def filter_yaml(cls, file: Path, name: str
|
403
|
+
def filter_yaml(cls, file: Path, name: Optional[str] = None) -> DictData:
|
402
404
|
"""Read a YAML file context from an input file path and specific name.
|
403
405
|
|
404
406
|
:param file: (Path) A file path that want to extract YAML context.
|
ddeutil/workflow/event.py
CHANGED
@@ -10,7 +10,7 @@ from __future__ import annotations
|
|
10
10
|
|
11
11
|
from dataclasses import fields
|
12
12
|
from datetime import datetime
|
13
|
-
from typing import Annotated, Any, Literal, Union
|
13
|
+
from typing import Annotated, Any, Literal, Optional, Union
|
14
14
|
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
15
15
|
|
16
16
|
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
|
@@ -28,7 +28,7 @@ Interval = Literal["daily", "weekly", "monthly"]
|
|
28
28
|
def interval2crontab(
|
29
29
|
interval: Interval,
|
30
30
|
*,
|
31
|
-
day: str
|
31
|
+
day: Optional[str] = None,
|
32
32
|
time: str = "00:00",
|
33
33
|
) -> str:
|
34
34
|
"""Return the crontab string that was generated from specific values.
|
ddeutil/workflow/exceptions.py
CHANGED
@@ -9,7 +9,7 @@ annotate for handle error only.
|
|
9
9
|
"""
|
10
10
|
from __future__ import annotations
|
11
11
|
|
12
|
-
from typing import Literal, TypedDict, overload
|
12
|
+
from typing import Literal, Optional, TypedDict, overload
|
13
13
|
|
14
14
|
|
15
15
|
class ErrorData(TypedDict):
|
@@ -39,9 +39,9 @@ class BaseWorkflowException(Exception):
|
|
39
39
|
making an error context to the result context.
|
40
40
|
"""
|
41
41
|
|
42
|
-
def __init__(self, message: str, *, refs: str
|
42
|
+
def __init__(self, message: str, *, refs: Optional[str] = None):
|
43
43
|
super().__init__(message)
|
44
|
-
self.refs: str
|
44
|
+
self.refs: Optional[str] = refs
|
45
45
|
|
46
46
|
@overload
|
47
47
|
def to_dict(
|
ddeutil/workflow/job.py
CHANGED
@@ -19,6 +19,7 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
import copy
|
21
21
|
import time
|
22
|
+
from collections.abc import Iterator
|
22
23
|
from concurrent.futures import (
|
23
24
|
FIRST_EXCEPTION,
|
24
25
|
CancelledError,
|
@@ -67,8 +68,8 @@ def make(
|
|
67
68
|
|
68
69
|
:param matrix: (Matrix) A matrix values that want to cross product to
|
69
70
|
possible parallelism values.
|
70
|
-
:param include:
|
71
|
-
:param exclude:
|
71
|
+
:param include: A list of additional matrix that want to adds-in.
|
72
|
+
:param exclude: A list of exclude matrix that want to filter-out.
|
72
73
|
|
73
74
|
:rtype: list[DictStr]
|
74
75
|
"""
|
@@ -191,22 +192,22 @@ class Strategy(BaseModel):
|
|
191
192
|
class Rule(str, Enum):
|
192
193
|
"""Rule enum object for assign trigger option."""
|
193
194
|
|
194
|
-
ALL_SUCCESS
|
195
|
-
ALL_FAILED
|
196
|
-
ALL_DONE
|
197
|
-
ONE_FAILED
|
198
|
-
ONE_SUCCESS
|
199
|
-
NONE_FAILED
|
200
|
-
NONE_SKIPPED
|
195
|
+
ALL_SUCCESS = "all_success"
|
196
|
+
ALL_FAILED = "all_failed"
|
197
|
+
ALL_DONE = "all_done"
|
198
|
+
ONE_FAILED = "one_failed"
|
199
|
+
ONE_SUCCESS = "one_success"
|
200
|
+
NONE_FAILED = "none_failed"
|
201
|
+
NONE_SKIPPED = "none_skipped"
|
201
202
|
|
202
203
|
|
203
204
|
class RunsOn(str, Enum):
|
204
205
|
"""Runs-On enum object."""
|
205
206
|
|
206
|
-
LOCAL
|
207
|
-
SELF_HOSTED
|
208
|
-
AZ_BATCH
|
209
|
-
DOCKER
|
207
|
+
LOCAL = "local"
|
208
|
+
SELF_HOSTED = "self_hosted"
|
209
|
+
AZ_BATCH = "azure_batch"
|
210
|
+
DOCKER = "docker"
|
210
211
|
|
211
212
|
|
212
213
|
class BaseRunsOn(BaseModel): # pragma: no cov
|
@@ -566,7 +567,7 @@ class Job(BaseModel):
|
|
566
567
|
:param output: (DictData) A result data context that want to extract
|
567
568
|
and transfer to the `strategies` key in receive context.
|
568
569
|
:param to: (DictData) A received context data.
|
569
|
-
:param job_id: (str
|
570
|
+
:param job_id: (Optional[str]) A job ID if the `id` field does not set.
|
570
571
|
|
571
572
|
:rtype: DictData
|
572
573
|
"""
|
@@ -606,9 +607,9 @@ class Job(BaseModel):
|
|
606
607
|
self,
|
607
608
|
params: DictData,
|
608
609
|
*,
|
609
|
-
run_id: str
|
610
|
-
parent_run_id: str
|
611
|
-
event: Event
|
610
|
+
run_id: Optional[str] = None,
|
611
|
+
parent_run_id: Optional[str] = None,
|
612
|
+
event: Optional[Event] = None,
|
612
613
|
) -> Result:
|
613
614
|
"""Job execution with passing dynamic parameters from the workflow
|
614
615
|
execution. It will generate matrix values at the first step and run
|
@@ -676,8 +677,8 @@ def local_execute_strategy(
|
|
676
677
|
strategy: DictData,
|
677
678
|
params: DictData,
|
678
679
|
*,
|
679
|
-
result: Result
|
680
|
-
event: Event
|
680
|
+
result: Optional[Result] = None,
|
681
|
+
event: Optional[Event] = None,
|
681
682
|
) -> Result:
|
682
683
|
"""Local strategy execution with passing dynamic parameters from the
|
683
684
|
job execution and strategy matrix.
|
@@ -799,9 +800,9 @@ def local_execute(
|
|
799
800
|
job: Job,
|
800
801
|
params: DictData,
|
801
802
|
*,
|
802
|
-
run_id: str
|
803
|
-
parent_run_id: str
|
804
|
-
event: Event
|
803
|
+
run_id: Optional[str] = None,
|
804
|
+
parent_run_id: Optional[str] = None,
|
805
|
+
event: Optional[Event] = None,
|
805
806
|
) -> Result:
|
806
807
|
"""Local job execution with passing dynamic parameters from the workflow
|
807
808
|
execution or directly. It will generate matrix values at the first
|
@@ -874,10 +875,10 @@ def local_execute(
|
|
874
875
|
status: Status = SUCCESS
|
875
876
|
|
876
877
|
if not fail_fast_flag:
|
877
|
-
done:
|
878
|
+
done: Iterator[Future] = as_completed(futures)
|
878
879
|
else:
|
879
880
|
done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
|
880
|
-
if len(done) != len(futures):
|
881
|
+
if len(list(done)) != len(futures):
|
881
882
|
result.trace.warning(
|
882
883
|
"[JOB]: Handler Fail-Fast: Got exception and set event."
|
883
884
|
)
|
@@ -895,7 +896,7 @@ def local_execute(
|
|
895
896
|
else ""
|
896
897
|
)
|
897
898
|
result.trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
|
898
|
-
done:
|
899
|
+
done: Iterator[Future] = as_completed(futures)
|
899
900
|
|
900
901
|
for future in done:
|
901
902
|
try:
|
@@ -918,9 +919,9 @@ def self_hosted_execute(
|
|
918
919
|
job: Job,
|
919
920
|
params: DictData,
|
920
921
|
*,
|
921
|
-
run_id: str
|
922
|
-
parent_run_id: str
|
923
|
-
event: Event
|
922
|
+
run_id: Optional[str] = None,
|
923
|
+
parent_run_id: Optional[str] = None,
|
924
|
+
event: Optional[Event] = None,
|
924
925
|
) -> Result: # pragma: no cov
|
925
926
|
"""Self-Hosted job execution with passing dynamic parameters from the
|
926
927
|
workflow execution or itself execution. It will make request to the
|
@@ -981,9 +982,9 @@ def azure_batch_execute(
|
|
981
982
|
job: Job,
|
982
983
|
params: DictData,
|
983
984
|
*,
|
984
|
-
run_id: str
|
985
|
-
parent_run_id: str
|
986
|
-
event: Event
|
985
|
+
run_id: Optional[str] = None,
|
986
|
+
parent_run_id: Optional[str] = None,
|
987
|
+
event: Optional[Event] = None,
|
987
988
|
) -> Result: # pragma: no cov
|
988
989
|
"""Azure Batch job execution that will run all job's stages on the Azure
|
989
990
|
Batch Node and extract the result file to be returning context result.
|
@@ -1035,9 +1036,9 @@ def docker_execution(
|
|
1035
1036
|
job: Job,
|
1036
1037
|
params: DictData,
|
1037
1038
|
*,
|
1038
|
-
run_id: str
|
1039
|
-
parent_run_id: str
|
1040
|
-
event: Event
|
1039
|
+
run_id: Optional[str] = None,
|
1040
|
+
parent_run_id: Optional[str] = None,
|
1041
|
+
event: Optional[Event] = None,
|
1041
1042
|
): # pragma: no cov
|
1042
1043
|
"""Docker job execution.
|
1043
1044
|
|
ddeutil/workflow/logs.py
CHANGED
@@ -3,7 +3,6 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
# [x] Use dynamic config
|
7
6
|
# [x] Use fix config for `get_logger`, and Model initialize step.
|
8
7
|
"""A Logs module contain Trace dataclass and Audit Pydantic model.
|
9
8
|
"""
|
@@ -14,7 +13,6 @@ import logging
|
|
14
13
|
import os
|
15
14
|
from abc import ABC, abstractmethod
|
16
15
|
from collections.abc import Iterator
|
17
|
-
from dataclasses import field
|
18
16
|
from datetime import datetime
|
19
17
|
from functools import lru_cache
|
20
18
|
from inspect import Traceback, currentframe, getframeinfo
|
@@ -22,12 +20,11 @@ from pathlib import Path
|
|
22
20
|
from threading import get_ident
|
23
21
|
from typing import ClassVar, Literal, Optional, TypeVar, Union
|
24
22
|
|
25
|
-
from pydantic import BaseModel, Field
|
26
|
-
from pydantic.dataclasses import dataclass
|
23
|
+
from pydantic import BaseModel, ConfigDict, Field
|
27
24
|
from pydantic.functional_validators import model_validator
|
28
25
|
from typing_extensions import Self
|
29
26
|
|
30
|
-
from .__types import DictData
|
27
|
+
from .__types import DictData
|
31
28
|
from .conf import config, dynamic
|
32
29
|
from .utils import cut_id, get_dt_now, prepare_newline
|
33
30
|
|
@@ -86,6 +83,7 @@ class TraceMeta(BaseModel): # pragma: no cov
|
|
86
83
|
"""
|
87
84
|
|
88
85
|
mode: Literal["stdout", "stderr"] = Field(description="A meta mode.")
|
86
|
+
level: str = Field(description="A log level.")
|
89
87
|
datetime: str = Field(description="A datetime in string format.")
|
90
88
|
process: int = Field(description="A process ID.")
|
91
89
|
thread: int = Field(description="A thread ID.")
|
@@ -98,13 +96,15 @@ class TraceMeta(BaseModel): # pragma: no cov
|
|
98
96
|
cls,
|
99
97
|
mode: Literal["stdout", "stderr"],
|
100
98
|
message: str,
|
99
|
+
level: str,
|
101
100
|
*,
|
102
101
|
extras: Optional[DictData] = None,
|
103
102
|
) -> Self:
|
104
103
|
"""Make the current TraceMeta instance that catching local state.
|
105
104
|
|
106
|
-
:param mode: A metadata mode.
|
107
|
-
:param message: A message.
|
105
|
+
:param mode: (Literal["stdout", "stderr"]) A metadata mode.
|
106
|
+
:param message: (str) A message.
|
107
|
+
:param level: (str) A log level.
|
108
108
|
:param extras: (DictData) An extra parameter that want to override core
|
109
109
|
config values.
|
110
110
|
|
@@ -116,6 +116,7 @@ class TraceMeta(BaseModel): # pragma: no cov
|
|
116
116
|
extras: DictData = extras or {}
|
117
117
|
return cls(
|
118
118
|
mode=mode,
|
119
|
+
level=level,
|
119
120
|
datetime=(
|
120
121
|
get_dt_now(tz=dynamic("tz", extras=extras)).strftime(
|
121
122
|
dynamic("log_datetime_format", extras=extras)
|
@@ -150,7 +151,7 @@ class TraceData(BaseModel): # pragma: no cov
|
|
150
151
|
|
151
152
|
:rtype: Self
|
152
153
|
"""
|
153
|
-
data:
|
154
|
+
data: DictData = {"stdout": "", "stderr": "", "meta": []}
|
154
155
|
|
155
156
|
for mode in ("stdout", "stderr"):
|
156
157
|
if (file / f"{mode}.txt").exists():
|
@@ -169,19 +170,28 @@ class TraceData(BaseModel): # pragma: no cov
|
|
169
170
|
return cls.model_validate(data)
|
170
171
|
|
171
172
|
|
172
|
-
|
173
|
-
|
174
|
-
"""Base Trace dataclass with abstraction class property."""
|
173
|
+
class BaseTrace(BaseModel, ABC): # pragma: no cov
|
174
|
+
"""Base Trace model with abstraction class property."""
|
175
175
|
|
176
|
-
|
177
|
-
|
178
|
-
|
176
|
+
model_config = ConfigDict(frozen=True)
|
177
|
+
|
178
|
+
run_id: str = Field(default="A running ID")
|
179
|
+
parent_run_id: Optional[str] = Field(
|
180
|
+
default=None, description="A parent running ID"
|
181
|
+
)
|
182
|
+
extras: DictData = Field(
|
183
|
+
default_factory=dict,
|
184
|
+
description=(
|
185
|
+
"An extra parameter that want to override on the core config "
|
186
|
+
"values."
|
187
|
+
),
|
188
|
+
)
|
179
189
|
|
180
190
|
@classmethod
|
181
191
|
@abstractmethod
|
182
192
|
def find_traces(
|
183
193
|
cls,
|
184
|
-
path: Path
|
194
|
+
path: Optional[Path] = None,
|
185
195
|
extras: Optional[DictData] = None,
|
186
196
|
) -> Iterator[TraceData]: # pragma: no cov
|
187
197
|
raise NotImplementedError(
|
@@ -195,7 +205,7 @@ class BaseTrace(ABC): # pragma: no cov
|
|
195
205
|
run_id: str,
|
196
206
|
force_raise: bool = True,
|
197
207
|
*,
|
198
|
-
path: Path
|
208
|
+
path: Optional[Path] = None,
|
199
209
|
extras: Optional[DictData] = None,
|
200
210
|
) -> TraceData:
|
201
211
|
raise NotImplementedError(
|
@@ -204,24 +214,30 @@ class BaseTrace(ABC): # pragma: no cov
|
|
204
214
|
)
|
205
215
|
|
206
216
|
@abstractmethod
|
207
|
-
def writer(self, message: str, is_err: bool = False) -> None:
|
217
|
+
def writer(self, message: str, level: str, is_err: bool = False) -> None:
|
208
218
|
"""Write a trace message after making to target pointer object. The
|
209
219
|
target can be anything be inherited this class and overwrite this method
|
210
220
|
such as file, console, or database.
|
211
221
|
|
212
|
-
:param message: A message after making.
|
213
|
-
:param
|
222
|
+
:param message: (str) A message after making.
|
223
|
+
:param level: (str) A log level.
|
224
|
+
:param is_err: (bool) A flag for writing with an error trace or not.
|
225
|
+
(Default be False)
|
214
226
|
"""
|
215
227
|
raise NotImplementedError(
|
216
228
|
"Create writer logic for this trace object before using."
|
217
229
|
)
|
218
230
|
|
219
231
|
@abstractmethod
|
220
|
-
async def awriter(
|
232
|
+
async def awriter(
|
233
|
+
self, message: str, level: str, is_err: bool = False
|
234
|
+
) -> None:
|
221
235
|
"""Async Write a trace message after making to target pointer object.
|
222
236
|
|
223
|
-
:param message:
|
224
|
-
:param
|
237
|
+
:param message: (str) A message after making.
|
238
|
+
:param level: (str) A log level.
|
239
|
+
:param is_err: (bool) A flag for writing with an error trace or not.
|
240
|
+
(Default be False)
|
225
241
|
"""
|
226
242
|
raise NotImplementedError(
|
227
243
|
"Create async writer logic for this trace object before using."
|
@@ -252,7 +268,7 @@ class BaseTrace(ABC): # pragma: no cov
|
|
252
268
|
if mode != "debug" or (
|
253
269
|
mode == "debug" and dynamic("debug", extras=self.extras)
|
254
270
|
):
|
255
|
-
self.writer(msg, is_err=is_err)
|
271
|
+
self.writer(msg, level=mode, is_err=is_err)
|
256
272
|
|
257
273
|
getattr(logger, mode)(msg, stacklevel=3)
|
258
274
|
|
@@ -309,7 +325,7 @@ class BaseTrace(ABC): # pragma: no cov
|
|
309
325
|
if mode != "debug" or (
|
310
326
|
mode == "debug" and dynamic("debug", extras=self.extras)
|
311
327
|
):
|
312
|
-
await self.awriter(msg, is_err=is_err)
|
328
|
+
await self.awriter(msg, level=mode, is_err=is_err)
|
313
329
|
|
314
330
|
getattr(logger, mode)(msg, stacklevel=3)
|
315
331
|
|
@@ -360,7 +376,7 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
360
376
|
@classmethod
|
361
377
|
def find_traces(
|
362
378
|
cls,
|
363
|
-
path: Path
|
379
|
+
path: Optional[Path] = None,
|
364
380
|
extras: Optional[DictData] = None,
|
365
381
|
) -> Iterator[TraceData]: # pragma: no cov
|
366
382
|
"""Find trace logs.
|
@@ -380,7 +396,7 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
380
396
|
run_id: str,
|
381
397
|
*,
|
382
398
|
force_raise: bool = True,
|
383
|
-
path: Path
|
399
|
+
path: Optional[Path] = None,
|
384
400
|
extras: Optional[DictData] = None,
|
385
401
|
) -> TraceData:
|
386
402
|
"""Find trace log with an input specific run ID.
|
@@ -399,7 +415,7 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
399
415
|
f"Trace log on path {base_path}, does not found trace "
|
400
416
|
f"'run_id={run_id}'."
|
401
417
|
)
|
402
|
-
return
|
418
|
+
return TraceData(stdout="", stderr="")
|
403
419
|
|
404
420
|
@property
|
405
421
|
def pointer(self) -> Path:
|
@@ -433,7 +449,7 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
433
449
|
"""
|
434
450
|
return f"({self.cut_id}) {message}"
|
435
451
|
|
436
|
-
def writer(self, message: str, is_err: bool = False) -> None:
|
452
|
+
def writer(self, message: str, level: str, is_err: bool = False) -> None:
|
437
453
|
"""Write a trace message after making to target file and write metadata
|
438
454
|
in the same path of standard files.
|
439
455
|
|
@@ -443,16 +459,19 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
443
459
|
... ./logs/run_id=<run-id>/stdout.txt
|
444
460
|
... ./logs/run_id=<run-id>/stderr.txt
|
445
461
|
|
446
|
-
:param message: A message after making.
|
462
|
+
:param message: (str) A message after making.
|
463
|
+
:param level: (str) A log level.
|
447
464
|
:param is_err: A flag for writing with an error trace or not.
|
448
465
|
"""
|
449
466
|
if not dynamic("enable_write_log", extras=self.extras):
|
450
467
|
return
|
451
468
|
|
452
|
-
|
453
|
-
trace_meta: TraceMeta = TraceMeta.make(
|
469
|
+
mode: Literal["stdout", "stderr"] = "stderr" if is_err else "stdout"
|
470
|
+
trace_meta: TraceMeta = TraceMeta.make(
|
471
|
+
mode=mode, level=level, message=message
|
472
|
+
)
|
454
473
|
|
455
|
-
with (self.pointer / f"{
|
474
|
+
with (self.pointer / f"{mode}.txt").open(
|
456
475
|
mode="at", encoding="utf-8"
|
457
476
|
) as f:
|
458
477
|
fmt: str = dynamic("log_format_file", extras=self.extras)
|
@@ -464,7 +483,7 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
464
483
|
f.write(trace_meta.model_dump_json() + "\n")
|
465
484
|
|
466
485
|
async def awriter(
|
467
|
-
self, message: str, is_err: bool = False
|
486
|
+
self, message: str, level: str, is_err: bool = False
|
468
487
|
) -> None: # pragma: no cov
|
469
488
|
"""Write with async mode."""
|
470
489
|
if not dynamic("enable_write_log", extras=self.extras):
|
@@ -475,11 +494,13 @@ class FileTrace(BaseTrace): # pragma: no cov
|
|
475
494
|
except ImportError as e:
|
476
495
|
raise ImportError("Async mode need aiofiles package") from e
|
477
496
|
|
478
|
-
|
479
|
-
trace_meta: TraceMeta = TraceMeta.make(
|
497
|
+
mode: Literal["stdout", "stderr"] = "stderr" if is_err else "stdout"
|
498
|
+
trace_meta: TraceMeta = TraceMeta.make(
|
499
|
+
mode=mode, level=level, message=message
|
500
|
+
)
|
480
501
|
|
481
502
|
async with aiofiles.open(
|
482
|
-
self.pointer / f"{
|
503
|
+
self.pointer / f"{mode}.txt", mode="at", encoding="utf-8"
|
483
504
|
) as f:
|
484
505
|
fmt: str = dynamic("log_format_file", extras=self.extras)
|
485
506
|
await f.write(f"{fmt}\n".format(**trace_meta.model_dump()))
|
@@ -507,7 +528,7 @@ class SQLiteTrace(BaseTrace): # pragma: no cov
|
|
507
528
|
@classmethod
|
508
529
|
def find_traces(
|
509
530
|
cls,
|
510
|
-
path: Path
|
531
|
+
path: Optional[Path] = None,
|
511
532
|
extras: Optional[DictData] = None,
|
512
533
|
) -> Iterator[TraceData]: ...
|
513
534
|
|
@@ -517,15 +538,19 @@ class SQLiteTrace(BaseTrace): # pragma: no cov
|
|
517
538
|
run_id: str,
|
518
539
|
force_raise: bool = True,
|
519
540
|
*,
|
520
|
-
path: Path
|
541
|
+
path: Optional[Path] = None,
|
521
542
|
extras: Optional[DictData] = None,
|
522
543
|
) -> TraceData: ...
|
523
544
|
|
524
545
|
def make_message(self, message: str) -> str: ...
|
525
546
|
|
526
|
-
def writer(
|
547
|
+
def writer(
|
548
|
+
self, message: str, level: str, is_err: bool = False
|
549
|
+
) -> None: ...
|
527
550
|
|
528
|
-
def awriter(
|
551
|
+
def awriter(
|
552
|
+
self, message: str, level: str, is_err: bool = False
|
553
|
+
) -> None: ...
|
529
554
|
|
530
555
|
|
531
556
|
Trace = TypeVar("Trace", bound=BaseTrace)
|
@@ -538,7 +563,7 @@ TraceModel = Union[
|
|
538
563
|
def get_trace(
|
539
564
|
run_id: str,
|
540
565
|
*,
|
541
|
-
parent_run_id: str
|
566
|
+
parent_run_id: Optional[str] = None,
|
542
567
|
extras: Optional[DictData] = None,
|
543
568
|
) -> TraceModel: # pragma: no cov
|
544
569
|
"""Get dynamic Trace instance from the core config (it can override by an
|
@@ -553,9 +578,11 @@ def get_trace(
|
|
553
578
|
"""
|
554
579
|
if dynamic("trace_path", extras=extras).is_file():
|
555
580
|
return SQLiteTrace(
|
556
|
-
run_id, parent_run_id=parent_run_id, extras=(extras or {})
|
581
|
+
run_id=run_id, parent_run_id=parent_run_id, extras=(extras or {})
|
557
582
|
)
|
558
|
-
return FileTrace(
|
583
|
+
return FileTrace(
|
584
|
+
run_id=run_id, parent_run_id=parent_run_id, extras=(extras or {})
|
585
|
+
)
|
559
586
|
|
560
587
|
|
561
588
|
class BaseAudit(BaseModel, ABC):
|
@@ -619,7 +646,7 @@ class BaseAudit(BaseModel, ABC):
|
|
619
646
|
def find_audit_with_release(
|
620
647
|
cls,
|
621
648
|
name: str,
|
622
|
-
release: datetime
|
649
|
+
release: Optional[datetime] = None,
|
623
650
|
*,
|
624
651
|
extras: Optional[DictData] = None,
|
625
652
|
) -> Self:
|
@@ -631,7 +658,7 @@ class BaseAudit(BaseModel, ABC):
|
|
631
658
|
"""To something before end up of initial log model."""
|
632
659
|
|
633
660
|
@abstractmethod
|
634
|
-
def save(self, excluded: list[str]
|
661
|
+
def save(self, excluded: Optional[list[str]]) -> None: # pragma: no cov
|
635
662
|
"""Save this model logging to target logging store."""
|
636
663
|
raise NotImplementedError("Audit should implement ``save`` method.")
|
637
664
|
|
@@ -676,7 +703,7 @@ class FileAudit(BaseAudit):
|
|
676
703
|
def find_audit_with_release(
|
677
704
|
cls,
|
678
705
|
name: str,
|
679
|
-
release: datetime
|
706
|
+
release: Optional[datetime] = None,
|
680
707
|
*,
|
681
708
|
extras: Optional[DictData] = None,
|
682
709
|
) -> Self:
|
@@ -749,7 +776,7 @@ class FileAudit(BaseAudit):
|
|
749
776
|
"audit_path", extras=self.extras
|
750
777
|
) / self.filename_fmt.format(name=self.name, release=self.release)
|
751
778
|
|
752
|
-
def save(self, excluded: list[str]
|
779
|
+
def save(self, excluded: Optional[list[str]]) -> Self:
|
753
780
|
"""Save logging data that receive a context data from a workflow
|
754
781
|
execution result.
|
755
782
|
|
@@ -758,7 +785,7 @@ class FileAudit(BaseAudit):
|
|
758
785
|
|
759
786
|
:rtype: Self
|
760
787
|
"""
|
761
|
-
trace:
|
788
|
+
trace: TraceModel = get_trace(
|
762
789
|
self.run_id,
|
763
790
|
parent_run_id=self.parent_run_id,
|
764
791
|
extras=self.extras,
|
@@ -818,16 +845,16 @@ class SQLiteAudit(BaseAudit): # pragma: no cov
|
|
818
845
|
def find_audit_with_release(
|
819
846
|
cls,
|
820
847
|
name: str,
|
821
|
-
release: datetime
|
848
|
+
release: Optional[datetime] = None,
|
822
849
|
*,
|
823
850
|
extras: Optional[DictData] = None,
|
824
851
|
) -> Self: ...
|
825
852
|
|
826
|
-
def save(self, excluded: list[str]
|
853
|
+
def save(self, excluded: Optional[list[str]]) -> SQLiteAudit:
|
827
854
|
"""Save logging data that receive a context data from a workflow
|
828
855
|
execution result.
|
829
856
|
"""
|
830
|
-
trace:
|
857
|
+
trace: TraceModel = get_trace(
|
831
858
|
self.run_id,
|
832
859
|
parent_run_id=self.parent_run_id,
|
833
860
|
extras=self.extras,
|
ddeutil/workflow/params.py
CHANGED
@@ -82,7 +82,9 @@ class DateParam(DefaultParam): # pragma: no cov
|
|
82
82
|
description="A default date that make from the current date func.",
|
83
83
|
)
|
84
84
|
|
85
|
-
def receive(
|
85
|
+
def receive(
|
86
|
+
self, value: Optional[Union[str, datetime, date]] = None
|
87
|
+
) -> date:
|
86
88
|
"""Receive value that match with date. If an input value pass with
|
87
89
|
None, it will use default value instead.
|
88
90
|
|
@@ -121,7 +123,9 @@ class DatetimeParam(DefaultParam):
|
|
121
123
|
),
|
122
124
|
)
|
123
125
|
|
124
|
-
def receive(
|
126
|
+
def receive(
|
127
|
+
self, value: Optional[Union[str, datetime, date]] = None
|
128
|
+
) -> datetime:
|
125
129
|
"""Receive value that match with datetime. If an input value pass with
|
126
130
|
None, it will use default value instead.
|
127
131
|
|
@@ -155,11 +159,11 @@ class StrParam(DefaultParam):
|
|
155
159
|
|
156
160
|
type: Literal["str"] = "str"
|
157
161
|
|
158
|
-
def receive(self, value: str
|
162
|
+
def receive(self, value: Optional[str] = None) -> Optional[str]:
|
159
163
|
"""Receive value that match with str.
|
160
164
|
|
161
165
|
:param value: A value that want to validate with string parameter type.
|
162
|
-
:rtype: str
|
166
|
+
:rtype: Optional[str]
|
163
167
|
"""
|
164
168
|
if value is None:
|
165
169
|
return self.default
|
@@ -171,7 +175,7 @@ class IntParam(DefaultParam):
|
|
171
175
|
|
172
176
|
type: Literal["int"] = "int"
|
173
177
|
|
174
|
-
def receive(self, value: int
|
178
|
+
def receive(self, value: Optional[int] = None) -> Optional[int]:
|
175
179
|
"""Receive value that match with int.
|
176
180
|
|
177
181
|
:param value: A value that want to validate with integer parameter type.
|