ddeutil-workflow 0.0.16__py3-none-any.whl → 0.0.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/{cron.py → __cron.py} +12 -6
- ddeutil/workflow/__init__.py +1 -0
- ddeutil/workflow/api.py +1 -2
- ddeutil/workflow/cli.py +1 -2
- ddeutil/workflow/conf.py +273 -108
- ddeutil/workflow/job.py +79 -39
- ddeutil/workflow/on.py +10 -7
- ddeutil/workflow/repeat.py +1 -2
- ddeutil/workflow/route.py +1 -2
- ddeutil/workflow/scheduler.py +8 -9
- ddeutil/workflow/stage.py +50 -19
- ddeutil/workflow/utils.py +21 -21
- {ddeutil_workflow-0.0.16.dist-info → ddeutil_workflow-0.0.17.dist-info}/METADATA +26 -23
- ddeutil_workflow-0.0.17.dist-info/RECORD +21 -0
- ddeutil/workflow/log.py +0 -195
- ddeutil_workflow-0.0.16.dist-info/RECORD +0 -22
- {ddeutil_workflow-0.0.16.dist-info → ddeutil_workflow-0.0.17.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.16.dist-info → ddeutil_workflow-0.0.17.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.16.dist-info → ddeutil_workflow-0.0.17.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.16.dist-info → ddeutil_workflow-0.0.17.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.17"
|
@@ -18,10 +18,7 @@ from ddeutil.core import (
|
|
18
18
|
isinstance_check,
|
19
19
|
must_split,
|
20
20
|
)
|
21
|
-
from ddeutil.core.dtutils import
|
22
|
-
next_date,
|
23
|
-
replace_date,
|
24
|
-
)
|
21
|
+
from ddeutil.core.dtutils import next_date, replace_date
|
25
22
|
|
26
23
|
WEEKDAYS: dict[str, int] = {
|
27
24
|
"Sun": 0,
|
@@ -37,7 +34,7 @@ WEEKDAYS: dict[str, int] = {
|
|
37
34
|
class CronYearLimit(Exception): ...
|
38
35
|
|
39
36
|
|
40
|
-
def str2cron(value: str) -> str:
|
37
|
+
def str2cron(value: str) -> str: # pragma: no cov
|
41
38
|
"""Convert Special String with the @ prefix to Crontab value.
|
42
39
|
|
43
40
|
:param value: A string value that want to convert to cron value.
|
@@ -69,6 +66,8 @@ def str2cron(value: str) -> str:
|
|
69
66
|
|
70
67
|
@dataclass(frozen=True)
|
71
68
|
class Unit:
|
69
|
+
"""Unit dataclass for CronPart object."""
|
70
|
+
|
72
71
|
name: str
|
73
72
|
range: partial
|
74
73
|
min: int
|
@@ -85,6 +84,8 @@ class Unit:
|
|
85
84
|
|
86
85
|
@dataclass
|
87
86
|
class Options:
|
87
|
+
"""Options dataclass for config CronPart object."""
|
88
|
+
|
88
89
|
output_weekday_names: bool = False
|
89
90
|
output_month_names: bool = False
|
90
91
|
output_hashes: bool = False
|
@@ -158,7 +159,12 @@ CRON_UNITS_YEAR: tuple[Unit, ...] = CRON_UNITS + (
|
|
158
159
|
|
159
160
|
@total_ordering
|
160
161
|
class CronPart:
|
161
|
-
"""Part of Cron object that represent a collection of positive integers.
|
162
|
+
"""Part of Cron object that represent a collection of positive integers.
|
163
|
+
|
164
|
+
:param unit: A Unit dataclass object.
|
165
|
+
:param values: A crontab values that want to validate
|
166
|
+
:param options: A Options dataclass object.
|
167
|
+
"""
|
162
168
|
|
163
169
|
__slots__: tuple[str, ...] = (
|
164
170
|
"unit",
|
ddeutil/workflow/__init__.py
CHANGED
@@ -3,6 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
from .conf import Config, FileLog, Loader
|
6
7
|
from .exceptions import (
|
7
8
|
JobException,
|
8
9
|
ParamValueException,
|
ddeutil/workflow/api.py
CHANGED
@@ -21,8 +21,7 @@ from fastapi.responses import UJSONResponse
|
|
21
21
|
from pydantic import BaseModel
|
22
22
|
|
23
23
|
from .__about__ import __version__
|
24
|
-
from .conf import config
|
25
|
-
from .log import get_logger
|
24
|
+
from .conf import config, get_logger
|
26
25
|
from .repeat import repeat_at, repeat_every
|
27
26
|
from .scheduler import WorkflowTaskData
|
28
27
|
|
ddeutil/workflow/cli.py
CHANGED
@@ -13,8 +13,7 @@ from typing import Annotated, Optional
|
|
13
13
|
from ddeutil.core import str2list
|
14
14
|
from typer import Argument, Option, Typer
|
15
15
|
|
16
|
-
from .conf import config
|
17
|
-
from .log import get_logger
|
16
|
+
from .conf import config, get_logger
|
18
17
|
|
19
18
|
logger = get_logger("ddeutil.workflow")
|
20
19
|
cli: Typer = Typer()
|
ddeutil/workflow/conf.py
CHANGED
@@ -6,26 +6,32 @@
|
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
8
|
import json
|
9
|
+
import logging
|
9
10
|
import os
|
11
|
+
from abc import ABC, abstractmethod
|
10
12
|
from collections.abc import Iterator
|
11
|
-
from datetime import timedelta
|
12
|
-
from functools import cached_property
|
13
|
+
from datetime import datetime, timedelta
|
14
|
+
from functools import cached_property, lru_cache
|
13
15
|
from pathlib import Path
|
14
|
-
from typing import
|
16
|
+
from typing import ClassVar, Optional, TypeVar, Union
|
15
17
|
from zoneinfo import ZoneInfo
|
16
18
|
|
17
19
|
from ddeutil.core import import_string, str2bool
|
18
|
-
from ddeutil.io import
|
20
|
+
from ddeutil.io import PathSearch, YamlFlResolve
|
19
21
|
from dotenv import load_dotenv
|
20
22
|
from pydantic import BaseModel, Field
|
21
23
|
from pydantic.functional_validators import model_validator
|
24
|
+
from typing_extensions import Self
|
25
|
+
|
26
|
+
from .__types import DictData
|
22
27
|
|
23
|
-
load_dotenv()
|
24
|
-
env = os.getenv
|
25
|
-
DictData = dict[str, Any]
|
26
28
|
AnyModel = TypeVar("AnyModel", bound=BaseModel)
|
27
29
|
AnyModelType = type[AnyModel]
|
28
30
|
|
31
|
+
load_dotenv()
|
32
|
+
|
33
|
+
env = os.getenv
|
34
|
+
|
29
35
|
|
30
36
|
class Config:
|
31
37
|
"""Config object for keeping application configuration on current session
|
@@ -39,6 +45,14 @@ class Config:
|
|
39
45
|
os.getenv("WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE", "true")
|
40
46
|
)
|
41
47
|
|
48
|
+
# NOTE: Register
|
49
|
+
regis_hook_str: str = os.getenv(
|
50
|
+
"WORKFLOW_CORE_REGISTRY", "ddeutil.workflow"
|
51
|
+
)
|
52
|
+
regis_filter_str: str = os.getenv(
|
53
|
+
"WORKFLOW_CORE_REGISTRY_FILTER", "ddeutil.workflow.utils"
|
54
|
+
)
|
55
|
+
|
42
56
|
# NOTE: Logging
|
43
57
|
debug: bool = str2bool(os.getenv("WORKFLOW_LOG_DEBUG_MODE", "true"))
|
44
58
|
enable_write_log: bool = str2bool(
|
@@ -54,6 +68,9 @@ class Config:
|
|
54
68
|
)
|
55
69
|
|
56
70
|
# NOTE: Job
|
71
|
+
job_raise_error: bool = str2bool(
|
72
|
+
env("WORKFLOW_CORE_JOB_RAISE_ERROR", "true")
|
73
|
+
)
|
57
74
|
job_default_id: bool = str2bool(
|
58
75
|
env("WORKFLOW_CORE_JOB_DEFAULT_ID", "false")
|
59
76
|
)
|
@@ -69,7 +86,7 @@ class Config:
|
|
69
86
|
max_schedule_per_process: int = int(
|
70
87
|
env("WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS", "100")
|
71
88
|
)
|
72
|
-
|
89
|
+
stop_boundary_delta_str: str = env(
|
73
90
|
"WORKFLOW_APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
|
74
91
|
)
|
75
92
|
|
@@ -89,93 +106,37 @@ class Config:
|
|
89
106
|
)
|
90
107
|
try:
|
91
108
|
self.stop_boundary_delta: timedelta = timedelta(
|
92
|
-
**json.loads(self.
|
109
|
+
**json.loads(self.stop_boundary_delta_str)
|
93
110
|
)
|
94
111
|
except Exception as err:
|
95
112
|
raise ValueError(
|
96
113
|
"Config ``WORKFLOW_APP_STOP_BOUNDARY_DELTA`` can not parsing to"
|
97
|
-
f"timedelta with {self.
|
114
|
+
f"timedelta with {self.stop_boundary_delta_str}."
|
98
115
|
) from err
|
99
116
|
|
100
|
-
def refresh_dotenv(self):
|
117
|
+
def refresh_dotenv(self) -> Self:
|
101
118
|
"""Reload environment variables from the current stage."""
|
102
119
|
self.tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
|
103
120
|
self.stage_raise_error: bool = str2bool(
|
104
121
|
env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
|
105
122
|
)
|
123
|
+
return self
|
106
124
|
|
125
|
+
@property
|
126
|
+
def conf_path(self) -> Path:
|
127
|
+
"""Config path that use root_path class argument for this construction.
|
107
128
|
|
108
|
-
|
109
|
-
"""Engine Pydantic Model for keeping application path."""
|
110
|
-
|
111
|
-
paths: Paths = Field(default_factory=Paths)
|
112
|
-
registry: list[str] = Field(
|
113
|
-
default_factory=lambda: ["ddeutil.workflow"], # pragma: no cover
|
114
|
-
)
|
115
|
-
registry_filter: list[str] = Field(
|
116
|
-
default_factory=lambda: ["ddeutil.workflow.utils"], # pragma: no cover
|
117
|
-
)
|
118
|
-
|
119
|
-
@model_validator(mode="before")
|
120
|
-
def __prepare_registry(cls, values: DictData) -> DictData:
|
121
|
-
"""Prepare registry value that passing with string type. It convert the
|
122
|
-
string type to list of string.
|
129
|
+
:rtype: Path
|
123
130
|
"""
|
124
|
-
|
125
|
-
values["registry"] = [_regis]
|
126
|
-
if (_regis_filter := values.get("registry_filter")) and isinstance(
|
127
|
-
_regis_filter, str
|
128
|
-
):
|
129
|
-
values["registry_filter"] = [_regis_filter]
|
130
|
-
return values
|
131
|
-
|
132
|
-
|
133
|
-
class ConfParams(BaseModel):
|
134
|
-
"""Params Model"""
|
135
|
-
|
136
|
-
engine: Engine = Field(
|
137
|
-
default_factory=Engine,
|
138
|
-
description="A engine mapping values.",
|
139
|
-
)
|
140
|
-
|
141
|
-
|
142
|
-
def load_config() -> ConfParams:
|
143
|
-
"""Load Config data from ``workflows-conf.yaml`` file.
|
131
|
+
return self.root_path / os.getenv("WORKFLOW_CORE_PATH_CONF", "conf")
|
144
132
|
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
:var engine.registry_filter:
|
149
|
-
:var paths.root:
|
150
|
-
:var paths.conf:
|
151
|
-
"""
|
152
|
-
root_path: str = config.root_path
|
133
|
+
@property
|
134
|
+
def regis_hook(self) -> list[str]:
|
135
|
+
return [r.strip() for r in self.regis_hook_str.split(",")]
|
153
136
|
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
regis_filter: list[str] = ["ddeutil.workflow.utils"]
|
159
|
-
if regis_filter_env := os.getenv("WORKFLOW_CORE_REGISTRY_FILTER"):
|
160
|
-
regis_filter = [r.strip() for r in regis_filter_env.split(",")]
|
161
|
-
|
162
|
-
conf_path: str = (
|
163
|
-
f"{root_path}/{conf_env}"
|
164
|
-
if (conf_env := os.getenv("WORKFLOW_CORE_PATH_CONF"))
|
165
|
-
else None
|
166
|
-
)
|
167
|
-
return ConfParams.model_validate(
|
168
|
-
obj={
|
169
|
-
"engine": {
|
170
|
-
"registry": regis,
|
171
|
-
"registry_filter": regis_filter,
|
172
|
-
"paths": {
|
173
|
-
"root": root_path,
|
174
|
-
"conf": conf_path,
|
175
|
-
},
|
176
|
-
},
|
177
|
-
}
|
178
|
-
)
|
137
|
+
@property
|
138
|
+
def regis_filter(self) -> list[str]:
|
139
|
+
return [r.strip() for r in self.regis_filter_str.split(",")]
|
179
140
|
|
180
141
|
|
181
142
|
class SimLoad:
|
@@ -183,7 +144,7 @@ class SimLoad:
|
|
183
144
|
value like name of workflow or on.
|
184
145
|
|
185
146
|
:param name: A name of config data that will read by Yaml Loader object.
|
186
|
-
:param
|
147
|
+
:param conf: A Params model object.
|
187
148
|
:param externals: An external parameters
|
188
149
|
|
189
150
|
Noted:
|
@@ -201,21 +162,19 @@ class SimLoad:
|
|
201
162
|
def __init__(
|
202
163
|
self,
|
203
164
|
name: str,
|
204
|
-
|
165
|
+
conf: Config,
|
205
166
|
externals: DictData | None = None,
|
206
167
|
) -> None:
|
207
168
|
self.data: DictData = {}
|
208
|
-
for file in PathSearch(
|
209
|
-
if
|
210
|
-
data := YamlFlResolve(file).read().get(name, {})
|
211
|
-
):
|
169
|
+
for file in PathSearch(conf.conf_path).files:
|
170
|
+
if data := self.filter_suffix(file, name):
|
212
171
|
self.data = data
|
213
172
|
|
214
173
|
# VALIDATE: check the data that reading should not empty.
|
215
174
|
if not self.data:
|
216
175
|
raise ValueError(f"Config {name!r} does not found on conf path")
|
217
176
|
|
218
|
-
self.
|
177
|
+
self.conf: Config = conf
|
219
178
|
self.externals: DictData = externals or {}
|
220
179
|
self.data.update(self.externals)
|
221
180
|
|
@@ -223,7 +182,7 @@ class SimLoad:
|
|
223
182
|
def finds(
|
224
183
|
cls,
|
225
184
|
obj: object,
|
226
|
-
|
185
|
+
conf: Config,
|
227
186
|
*,
|
228
187
|
include: list[str] | None = None,
|
229
188
|
exclude: list[str] | None = None,
|
@@ -233,23 +192,31 @@ class SimLoad:
|
|
233
192
|
adds-on.
|
234
193
|
|
235
194
|
:param obj: A object that want to validate matching before return.
|
236
|
-
:param
|
195
|
+
:param conf: A config object.
|
237
196
|
:param include:
|
238
197
|
:param exclude:
|
239
198
|
:rtype: Iterator[tuple[str, DictData]]
|
240
199
|
"""
|
241
200
|
exclude: list[str] = exclude or []
|
242
|
-
for file in PathSearch(
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
201
|
+
for file in PathSearch(conf.conf_path).files:
|
202
|
+
for key, data in cls.filter_suffix(file).items():
|
203
|
+
|
204
|
+
if key in exclude:
|
205
|
+
continue
|
206
|
+
|
207
|
+
if issubclass(get_type(data["type"], conf), obj):
|
208
|
+
yield key, (
|
209
|
+
{k: data[k] for k in data if k in include}
|
210
|
+
if include
|
211
|
+
else data
|
212
|
+
)
|
213
|
+
|
214
|
+
@classmethod
|
215
|
+
def filter_suffix(cls, file: Path, name: str | None = None) -> DictData:
|
216
|
+
if any(file.suffix.endswith(s) for s in (".yml", ".yaml")):
|
217
|
+
values: DictData = YamlFlResolve(file).read()
|
218
|
+
return values.get(name, {}) if name else values
|
219
|
+
return {}
|
253
220
|
|
254
221
|
@cached_property
|
255
222
|
def type(self) -> AnyModelType:
|
@@ -258,11 +225,11 @@ class SimLoad:
|
|
258
225
|
|
259
226
|
:rtype: AnyModelType
|
260
227
|
"""
|
261
|
-
if
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
228
|
+
if _typ := self.data.get("type"):
|
229
|
+
return get_type(_typ, self.conf)
|
230
|
+
raise ValueError(
|
231
|
+
f"the 'type' value: {_typ} does not exists in config data."
|
232
|
+
)
|
266
233
|
|
267
234
|
|
268
235
|
class Loader(SimLoad):
|
@@ -288,14 +255,14 @@ class Loader(SimLoad):
|
|
288
255
|
:param exclude:
|
289
256
|
"""
|
290
257
|
return super().finds(
|
291
|
-
obj=obj,
|
258
|
+
obj=obj, conf=Config(), include=include, exclude=exclude
|
292
259
|
)
|
293
260
|
|
294
261
|
def __init__(self, name: str, externals: DictData) -> None:
|
295
|
-
super().__init__(name,
|
262
|
+
super().__init__(name, conf=Config(), externals=externals)
|
296
263
|
|
297
264
|
|
298
|
-
def get_type(t: str, params:
|
265
|
+
def get_type(t: str, params: Config) -> AnyModelType:
|
299
266
|
"""Return import type from string importable value in the type key.
|
300
267
|
|
301
268
|
:param t: A importable type string.
|
@@ -307,7 +274,7 @@ def get_type(t: str, params: ConfParams) -> AnyModelType:
|
|
307
274
|
# NOTE: Auto adding module prefix if it does not set
|
308
275
|
return import_string(f"ddeutil.workflow.{t}")
|
309
276
|
except ModuleNotFoundError:
|
310
|
-
for registry in params.
|
277
|
+
for registry in params.regis_hook:
|
311
278
|
try:
|
312
279
|
return import_string(f"{registry}.{t}")
|
313
280
|
except ModuleNotFoundError:
|
@@ -316,3 +283,201 @@ def get_type(t: str, params: ConfParams) -> AnyModelType:
|
|
316
283
|
|
317
284
|
|
318
285
|
config = Config()
|
286
|
+
|
287
|
+
|
288
|
+
@lru_cache
|
289
|
+
def get_logger(name: str):
|
290
|
+
"""Return logger object with an input module name.
|
291
|
+
|
292
|
+
:param name: A module name that want to log.
|
293
|
+
"""
|
294
|
+
logger = logging.getLogger(name)
|
295
|
+
formatter = logging.Formatter(
|
296
|
+
fmt=(
|
297
|
+
"%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
|
298
|
+
"%(thread)-5d) [%(levelname)-7s] %(message)-120s "
|
299
|
+
"(%(filename)s:%(lineno)s)"
|
300
|
+
),
|
301
|
+
datefmt="%Y-%m-%d %H:%M:%S",
|
302
|
+
)
|
303
|
+
stream = logging.StreamHandler()
|
304
|
+
stream.setFormatter(formatter)
|
305
|
+
logger.addHandler(stream)
|
306
|
+
|
307
|
+
logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
|
308
|
+
return logger
|
309
|
+
|
310
|
+
|
311
|
+
class BaseLog(BaseModel, ABC):
|
312
|
+
"""Base Log Pydantic Model with abstraction class property that implement
|
313
|
+
only model fields. This model should to use with inherit to logging
|
314
|
+
sub-class like file, sqlite, etc.
|
315
|
+
"""
|
316
|
+
|
317
|
+
name: str = Field(description="A workflow name.")
|
318
|
+
on: str = Field(description="A cronjob string of this piepline schedule.")
|
319
|
+
release: datetime = Field(description="A release datetime.")
|
320
|
+
context: DictData = Field(
|
321
|
+
default_factory=dict,
|
322
|
+
description=(
|
323
|
+
"A context data that receive from a workflow execution result.",
|
324
|
+
),
|
325
|
+
)
|
326
|
+
parent_run_id: Optional[str] = Field(default=None)
|
327
|
+
run_id: str
|
328
|
+
update: datetime = Field(default_factory=datetime.now)
|
329
|
+
|
330
|
+
@model_validator(mode="after")
|
331
|
+
def __model_action(self) -> Self:
|
332
|
+
"""Do before the Log action with WORKFLOW_LOG_ENABLE_WRITE env variable.
|
333
|
+
|
334
|
+
:rtype: Self
|
335
|
+
"""
|
336
|
+
if config.enable_write_log:
|
337
|
+
self.do_before()
|
338
|
+
return self
|
339
|
+
|
340
|
+
def do_before(self) -> None: # pragma: no cov
|
341
|
+
"""To something before end up of initial log model."""
|
342
|
+
|
343
|
+
@abstractmethod
|
344
|
+
def save(self, excluded: list[str] | None) -> None: # pragma: no cov
|
345
|
+
"""Save this model logging to target logging store."""
|
346
|
+
raise NotImplementedError("Log should implement ``save`` method.")
|
347
|
+
|
348
|
+
|
349
|
+
class FileLog(BaseLog):
|
350
|
+
"""File Log Pydantic Model that use to saving log data from result of
|
351
|
+
workflow execution. It inherit from BaseLog model that implement the
|
352
|
+
``self.save`` method for file.
|
353
|
+
"""
|
354
|
+
|
355
|
+
filename_fmt: ClassVar[str] = (
|
356
|
+
"./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
|
357
|
+
)
|
358
|
+
|
359
|
+
def do_before(self) -> None:
|
360
|
+
"""Create directory of release before saving log file."""
|
361
|
+
self.pointer().mkdir(parents=True, exist_ok=True)
|
362
|
+
|
363
|
+
@classmethod
|
364
|
+
def find_logs(cls, name: str) -> Iterator[Self]:
|
365
|
+
"""Generate the logging data that found from logs path with specific a
|
366
|
+
workflow name.
|
367
|
+
|
368
|
+
:param name: A workflow name that want to search release logging data.
|
369
|
+
"""
|
370
|
+
pointer: Path = config.root_path / f"./logs/workflow={name}"
|
371
|
+
if not pointer.exists():
|
372
|
+
raise FileNotFoundError(
|
373
|
+
f"Pointer: ./logs/workflow={name} does not found."
|
374
|
+
)
|
375
|
+
|
376
|
+
for file in pointer.glob("./release=*/*.log"):
|
377
|
+
with file.open(mode="r", encoding="utf-8") as f:
|
378
|
+
yield cls.model_validate(obj=json.load(f))
|
379
|
+
|
380
|
+
@classmethod
|
381
|
+
def find_log_latest(
|
382
|
+
cls,
|
383
|
+
name: str,
|
384
|
+
release: datetime | None = None,
|
385
|
+
) -> Self:
|
386
|
+
"""Return the logging data that found from logs path with specific
|
387
|
+
workflow name and release values. If a release does not pass to an input
|
388
|
+
argument, it will return the latest release from the current log path.
|
389
|
+
|
390
|
+
:raise FileNotFoundError:
|
391
|
+
:raise NotImplementedError:
|
392
|
+
|
393
|
+
:rtype: Self
|
394
|
+
"""
|
395
|
+
if release is None:
|
396
|
+
raise NotImplementedError("Find latest log does not implement yet.")
|
397
|
+
|
398
|
+
pointer: Path = (
|
399
|
+
config.root_path
|
400
|
+
/ f"./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
|
401
|
+
)
|
402
|
+
if not pointer.exists():
|
403
|
+
raise FileNotFoundError(
|
404
|
+
f"Pointer: ./logs/workflow={name}/"
|
405
|
+
f"release={release:%Y%m%d%H%M%S} does not found."
|
406
|
+
)
|
407
|
+
|
408
|
+
with max(pointer.glob("./*.log"), key=os.path.getctime).open(
|
409
|
+
mode="r", encoding="utf-8"
|
410
|
+
) as f:
|
411
|
+
return cls.model_validate(obj=json.load(f))
|
412
|
+
|
413
|
+
@classmethod
|
414
|
+
def is_pointed(
|
415
|
+
cls,
|
416
|
+
name: str,
|
417
|
+
release: datetime,
|
418
|
+
*,
|
419
|
+
queue: list[datetime] | None = None,
|
420
|
+
) -> bool:
|
421
|
+
"""Check this log already point in the destination.
|
422
|
+
|
423
|
+
:param name: A workflow name.
|
424
|
+
:param release: A release datetime.
|
425
|
+
:param queue: A list of queue of datetime that already run in the
|
426
|
+
future.
|
427
|
+
"""
|
428
|
+
# NOTE: Check environ variable was set for real writing.
|
429
|
+
if not config.enable_write_log:
|
430
|
+
return False
|
431
|
+
|
432
|
+
# NOTE: create pointer path that use the same logic of pointer method.
|
433
|
+
pointer: Path = config.root_path / cls.filename_fmt.format(
|
434
|
+
name=name, release=release
|
435
|
+
)
|
436
|
+
|
437
|
+
if not queue:
|
438
|
+
return pointer.exists()
|
439
|
+
return pointer.exists() or (release in queue)
|
440
|
+
|
441
|
+
def pointer(self) -> Path:
|
442
|
+
"""Return release directory path that was generated from model data.
|
443
|
+
|
444
|
+
:rtype: Path
|
445
|
+
"""
|
446
|
+
return config.root_path / self.filename_fmt.format(
|
447
|
+
name=self.name, release=self.release
|
448
|
+
)
|
449
|
+
|
450
|
+
def save(self, excluded: list[str] | None) -> Self:
|
451
|
+
"""Save logging data that receive a context data from a workflow
|
452
|
+
execution result.
|
453
|
+
|
454
|
+
:param excluded: An excluded list of key name that want to pass in the
|
455
|
+
model_dump method.
|
456
|
+
:rtype: Self
|
457
|
+
"""
|
458
|
+
# NOTE: Check environ variable was set for real writing.
|
459
|
+
if not config.enable_write_log:
|
460
|
+
return self
|
461
|
+
|
462
|
+
log_file: Path = self.pointer() / f"{self.run_id}.log"
|
463
|
+
log_file.write_text(
|
464
|
+
json.dumps(
|
465
|
+
self.model_dump(exclude=excluded),
|
466
|
+
default=str,
|
467
|
+
indent=2,
|
468
|
+
),
|
469
|
+
encoding="utf-8",
|
470
|
+
)
|
471
|
+
return self
|
472
|
+
|
473
|
+
|
474
|
+
class SQLiteLog(BaseLog): # pragma: no cov
|
475
|
+
|
476
|
+
def save(self, excluded: list[str] | None) -> None:
|
477
|
+
raise NotImplementedError("SQLiteLog does not implement yet.")
|
478
|
+
|
479
|
+
|
480
|
+
Log = Union[
|
481
|
+
FileLog,
|
482
|
+
SQLiteLog,
|
483
|
+
]
|