ddeutil-workflow 0.0.15__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.15"
1
+ __version__: str = "0.0.17"
@@ -18,10 +18,7 @@ from ddeutil.core import (
18
18
  isinstance_check,
19
19
  must_split,
20
20
  )
21
- from ddeutil.core.dtutils import (
22
- next_date,
23
- replace_date,
24
- )
21
+ from ddeutil.core.dtutils import next_date, replace_date
25
22
 
26
23
  WEEKDAYS: dict[str, int] = {
27
24
  "Sun": 0,
@@ -37,7 +34,7 @@ WEEKDAYS: dict[str, int] = {
37
34
  class CronYearLimit(Exception): ...
38
35
 
39
36
 
40
- def str2cron(value: str) -> str:
37
+ def str2cron(value: str) -> str: # pragma: no cov
41
38
  """Convert Special String with the @ prefix to Crontab value.
42
39
 
43
40
  :param value: A string value that want to convert to cron value.
@@ -69,6 +66,8 @@ def str2cron(value: str) -> str:
69
66
 
70
67
  @dataclass(frozen=True)
71
68
  class Unit:
69
+ """Unit dataclass for CronPart object."""
70
+
72
71
  name: str
73
72
  range: partial
74
73
  min: int
@@ -85,6 +84,8 @@ class Unit:
85
84
 
86
85
  @dataclass
87
86
  class Options:
87
+ """Options dataclass for config CronPart object."""
88
+
88
89
  output_weekday_names: bool = False
89
90
  output_month_names: bool = False
90
91
  output_hashes: bool = False
@@ -158,7 +159,12 @@ CRON_UNITS_YEAR: tuple[Unit, ...] = CRON_UNITS + (
158
159
 
159
160
  @total_ordering
160
161
  class CronPart:
161
- """Part of Cron object that represent a collection of positive integers."""
162
+ """Part of Cron object that represent a collection of positive integers.
163
+
164
+ :param unit: A Unit dataclass object.
165
+ :param values: A crontab values that want to validate
166
+ :param options: A Options dataclass object.
167
+ """
162
168
 
163
169
  __slots__: tuple[str, ...] = (
164
170
  "unit",
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ from .conf import Config, FileLog, Loader
6
7
  from .exceptions import (
7
8
  JobException,
8
9
  ParamValueException,
@@ -16,7 +16,7 @@ from re import (
16
16
  Match,
17
17
  Pattern,
18
18
  )
19
- from typing import Any, Optional, Union
19
+ from typing import Any, Optional, TypedDict, Union
20
20
 
21
21
  from typing_extensions import Self
22
22
 
@@ -24,8 +24,11 @@ TupleStr = tuple[str, ...]
24
24
  DictData = dict[str, Any]
25
25
  DictStr = dict[str, str]
26
26
  Matrix = dict[str, Union[list[str], list[int]]]
27
- MatrixInclude = list[dict[str, Union[str, int]]]
28
- MatrixExclude = list[dict[str, Union[str, int]]]
27
+
28
+
29
+ class Context(TypedDict):
30
+ params: dict[str, Any]
31
+ jobs: dict[str, Any]
29
32
 
30
33
 
31
34
  @dataclass(frozen=True)
@@ -56,20 +59,24 @@ class Re:
56
59
  # Regular expression:
57
60
  # - Version 1:
58
61
  # \${{\s*(?P<caller>[a-zA-Z0-9_.\s'\"\[\]\(\)\-\{}]+?)\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
59
- # - Version 2 (2024-09-30):
62
+ # - Version 2: (2024-09-30):
60
63
  # \${{\s*(?P<caller>(?P<caller_prefix>(?:[a-zA-Z_-]+\.)*)(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
64
+ # - Version 3: (2024-10-05):
65
+ # \${{\s*(?P<caller>(?P<caller_prefix>(?:[a-zA-Z_-]+\??\.)*)(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+\??))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
61
66
  #
62
67
  # Examples:
63
68
  # - ${{ params.data_dt }}
64
69
  # - ${{ params.source.table }}
70
+ # - ${{ params.datetime | fmt('%Y-%m-%d') }}
71
+ # - ${{ params.source?.schema }}
65
72
  #
66
73
  __re_caller: str = r"""
67
74
  \$
68
75
  {{
69
76
  \s*
70
77
  (?P<caller>
71
- (?P<caller_prefix>(?:[a-zA-Z_-]+\.)*)
72
- (?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+)
78
+ (?P<caller_prefix>(?:[a-zA-Z_-]+\??\.)*)
79
+ (?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+\??)
73
80
  )
74
81
  \s*
75
82
  (?P<post_filters>
@@ -109,5 +116,10 @@ class Re:
109
116
 
110
117
  @classmethod
111
118
  def finditer_caller(cls, value) -> Iterator[CallerRe]:
119
+ """Generate CallerRe object that create from matching object that
120
+ extract with re.finditer function.
121
+
122
+ :rtype: Iterator[CallerRe]
123
+ """
112
124
  for found in cls.RE_CALLER.finditer(value):
113
125
  yield CallerRe.from_regex(found)
ddeutil/workflow/api.py CHANGED
@@ -7,7 +7,6 @@ from __future__ import annotations
7
7
 
8
8
  import asyncio
9
9
  import contextlib
10
- import os
11
10
  import uuid
12
11
  from collections.abc import AsyncIterator
13
12
  from datetime import datetime, timedelta
@@ -15,7 +14,6 @@ from queue import Empty, Queue
15
14
  from threading import Thread
16
15
  from typing import TypedDict
17
16
 
18
- from ddeutil.core import str2bool
19
17
  from dotenv import load_dotenv
20
18
  from fastapi import FastAPI
21
19
  from fastapi.middleware.gzip import GZipMiddleware
@@ -23,7 +21,7 @@ from fastapi.responses import UJSONResponse
23
21
  from pydantic import BaseModel
24
22
 
25
23
  from .__about__ import __version__
26
- from .log import get_logger
24
+ from .conf import config, get_logger
27
25
  from .repeat import repeat_at, repeat_every
28
26
  from .scheduler import WorkflowTaskData
29
27
 
@@ -131,12 +129,12 @@ async def message_upper(payload: Payload):
131
129
  return await get_result(request_id)
132
130
 
133
131
 
134
- if str2bool(os.getenv("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")):
132
+ if config.enable_route_workflow:
135
133
  from .route import workflow
136
134
 
137
135
  app.include_router(workflow)
138
136
 
139
- if str2bool(os.getenv("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")):
137
+ if config.enable_route_schedule:
140
138
  from .route import schedule
141
139
  from .scheduler import workflow_task
142
140
 
ddeutil/workflow/cli.py CHANGED
@@ -6,16 +6,14 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import json
9
- import os
10
9
  from datetime import datetime
11
10
  from enum import Enum
12
11
  from typing import Annotated, Optional
13
- from zoneinfo import ZoneInfo
14
12
 
15
13
  from ddeutil.core import str2list
16
14
  from typer import Argument, Option, Typer
17
15
 
18
- from .log import get_logger
16
+ from .conf import config, get_logger
19
17
 
20
18
  logger = get_logger("ddeutil.workflow")
21
19
  cli: Typer = Typer()
@@ -73,9 +71,7 @@ def schedule(
73
71
  excluded: list[str] = str2list(excluded) if excluded else []
74
72
  externals: str = externals or "{}"
75
73
  if stop:
76
- stop: datetime = stop.astimezone(
77
- tz=ZoneInfo(os.getenv("WORKFLOW_CORE_TIMEZONE", "UTC"))
78
- )
74
+ stop: datetime = stop.astimezone(tz=config.tz)
79
75
 
80
76
  from .scheduler import workflow_runner
81
77
 
ddeutil/workflow/conf.py CHANGED
@@ -5,13 +5,31 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from __future__ import annotations
7
7
 
8
+ import json
9
+ import logging
8
10
  import os
11
+ from abc import ABC, abstractmethod
12
+ from collections.abc import Iterator
13
+ from datetime import datetime, timedelta
14
+ from functools import cached_property, lru_cache
15
+ from pathlib import Path
16
+ from typing import ClassVar, Optional, TypeVar, Union
9
17
  from zoneinfo import ZoneInfo
10
18
 
11
- from ddeutil.core import str2bool
19
+ from ddeutil.core import import_string, str2bool
20
+ from ddeutil.io import PathSearch, YamlFlResolve
12
21
  from dotenv import load_dotenv
22
+ from pydantic import BaseModel, Field
23
+ from pydantic.functional_validators import model_validator
24
+ from typing_extensions import Self
25
+
26
+ from .__types import DictData
27
+
28
+ AnyModel = TypeVar("AnyModel", bound=BaseModel)
29
+ AnyModelType = type[AnyModel]
13
30
 
14
31
  load_dotenv()
32
+
15
33
  env = os.getenv
16
34
 
17
35
 
@@ -21,25 +39,445 @@ class Config:
21
39
  """
22
40
 
23
41
  # NOTE: Core
42
+ root_path: Path = Path(os.getenv("WORKFLOW_ROOT_PATH", "."))
24
43
  tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
44
+ workflow_id_simple_mode: bool = str2bool(
45
+ os.getenv("WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE", "true")
46
+ )
47
+
48
+ # NOTE: Register
49
+ regis_hook_str: str = os.getenv(
50
+ "WORKFLOW_CORE_REGISTRY", "ddeutil.workflow"
51
+ )
52
+ regis_filter_str: str = os.getenv(
53
+ "WORKFLOW_CORE_REGISTRY_FILTER", "ddeutil.workflow.utils"
54
+ )
55
+
56
+ # NOTE: Logging
57
+ debug: bool = str2bool(os.getenv("WORKFLOW_LOG_DEBUG_MODE", "true"))
58
+ enable_write_log: bool = str2bool(
59
+ os.getenv("WORKFLOW_LOG_ENABLE_WRITE", "false")
60
+ )
25
61
 
26
62
  # NOTE: Stage
27
63
  stage_raise_error: bool = str2bool(
28
- env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "true")
64
+ env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
29
65
  )
30
66
  stage_default_id: bool = str2bool(
31
67
  env("WORKFLOW_CORE_STAGE_DEFAULT_ID", "false")
32
68
  )
33
69
 
70
+ # NOTE: Job
71
+ job_raise_error: bool = str2bool(
72
+ env("WORKFLOW_CORE_JOB_RAISE_ERROR", "true")
73
+ )
74
+ job_default_id: bool = str2bool(
75
+ env("WORKFLOW_CORE_JOB_DEFAULT_ID", "false")
76
+ )
77
+
34
78
  # NOTE: Workflow
35
79
  max_job_parallel: int = int(env("WORKFLOW_CORE_MAX_JOB_PARALLEL", "2"))
80
+ max_poking_pool_worker: int = int(
81
+ os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
82
+ )
83
+
84
+ # NOTE: Schedule App
85
+ max_schedule_process: int = int(env("WORKFLOW_APP_MAX_PROCESS", "2"))
86
+ max_schedule_per_process: int = int(
87
+ env("WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS", "100")
88
+ )
89
+ stop_boundary_delta_str: str = env(
90
+ "WORKFLOW_APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
91
+ )
92
+
93
+ # NOTE: API
94
+ enable_route_workflow: bool = str2bool(
95
+ os.getenv("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")
96
+ )
97
+ enable_route_schedule: bool = str2bool(
98
+ os.getenv("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")
99
+ )
36
100
 
37
101
  def __init__(self):
38
102
  if self.max_job_parallel < 0:
39
103
  raise ValueError(
40
- f"MAX_JOB_PARALLEL should more than 0 but got "
104
+ f"``MAX_JOB_PARALLEL`` should more than 0 but got "
41
105
  f"{self.max_job_parallel}."
42
106
  )
107
+ try:
108
+ self.stop_boundary_delta: timedelta = timedelta(
109
+ **json.loads(self.stop_boundary_delta_str)
110
+ )
111
+ except Exception as err:
112
+ raise ValueError(
113
+ "Config ``WORKFLOW_APP_STOP_BOUNDARY_DELTA`` can not parsing to"
114
+ f"timedelta with {self.stop_boundary_delta_str}."
115
+ ) from err
116
+
117
+ def refresh_dotenv(self) -> Self:
118
+ """Reload environment variables from the current stage."""
119
+ self.tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
120
+ self.stage_raise_error: bool = str2bool(
121
+ env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
122
+ )
123
+ return self
124
+
125
+ @property
126
+ def conf_path(self) -> Path:
127
+ """Config path that use root_path class argument for this construction.
128
+
129
+ :rtype: Path
130
+ """
131
+ return self.root_path / os.getenv("WORKFLOW_CORE_PATH_CONF", "conf")
132
+
133
+ @property
134
+ def regis_hook(self) -> list[str]:
135
+ return [r.strip() for r in self.regis_hook_str.split(",")]
136
+
137
+ @property
138
+ def regis_filter(self) -> list[str]:
139
+ return [r.strip() for r in self.regis_filter_str.split(",")]
140
+
141
+
142
+ class SimLoad:
143
+ """Simple Load Object that will search config data by given some identity
144
+ value like name of workflow or on.
145
+
146
+ :param name: A name of config data that will read by Yaml Loader object.
147
+ :param conf: A Params model object.
148
+ :param externals: An external parameters
149
+
150
+ Noted:
151
+
152
+ The config data should have ``type`` key for modeling validation that
153
+ make this loader know what is config should to do pass to.
154
+
155
+ ... <identity-key>:
156
+ ... type: <importable-object>
157
+ ... <key-data>: <value-data>
158
+ ... ...
159
+
160
+ """
161
+
162
+ def __init__(
163
+ self,
164
+ name: str,
165
+ conf: Config,
166
+ externals: DictData | None = None,
167
+ ) -> None:
168
+ self.data: DictData = {}
169
+ for file in PathSearch(conf.conf_path).files:
170
+ if data := self.filter_suffix(file, name):
171
+ self.data = data
172
+
173
+ # VALIDATE: check the data that reading should not empty.
174
+ if not self.data:
175
+ raise ValueError(f"Config {name!r} does not found on conf path")
176
+
177
+ self.conf: Config = conf
178
+ self.externals: DictData = externals or {}
179
+ self.data.update(self.externals)
180
+
181
+ @classmethod
182
+ def finds(
183
+ cls,
184
+ obj: object,
185
+ conf: Config,
186
+ *,
187
+ include: list[str] | None = None,
188
+ exclude: list[str] | None = None,
189
+ ) -> Iterator[tuple[str, DictData]]:
190
+ """Find all data that match with object type in config path. This class
191
+ method can use include and exclude list of identity name for filter and
192
+ adds-on.
193
+
194
+ :param obj: A object that want to validate matching before return.
195
+ :param conf: A config object.
196
+ :param include:
197
+ :param exclude:
198
+ :rtype: Iterator[tuple[str, DictData]]
199
+ """
200
+ exclude: list[str] = exclude or []
201
+ for file in PathSearch(conf.conf_path).files:
202
+ for key, data in cls.filter_suffix(file).items():
203
+
204
+ if key in exclude:
205
+ continue
206
+
207
+ if issubclass(get_type(data["type"], conf), obj):
208
+ yield key, (
209
+ {k: data[k] for k in data if k in include}
210
+ if include
211
+ else data
212
+ )
213
+
214
+ @classmethod
215
+ def filter_suffix(cls, file: Path, name: str | None = None) -> DictData:
216
+ if any(file.suffix.endswith(s) for s in (".yml", ".yaml")):
217
+ values: DictData = YamlFlResolve(file).read()
218
+ return values.get(name, {}) if name else values
219
+ return {}
220
+
221
+ @cached_property
222
+ def type(self) -> AnyModelType:
223
+ """Return object of string type which implement on any registry. The
224
+ object type.
225
+
226
+ :rtype: AnyModelType
227
+ """
228
+ if _typ := self.data.get("type"):
229
+ return get_type(_typ, self.conf)
230
+ raise ValueError(
231
+ f"the 'type' value: {_typ} does not exists in config data."
232
+ )
233
+
234
+
235
+ class Loader(SimLoad):
236
+ """Loader Object that get the config `yaml` file from current path.
237
+
238
+ :param name: A name of config data that will read by Yaml Loader object.
239
+ :param externals: An external parameters
240
+ """
241
+
242
+ @classmethod
243
+ def finds(
244
+ cls,
245
+ obj: object,
246
+ *,
247
+ include: list[str] | None = None,
248
+ exclude: list[str] | None = None,
249
+ **kwargs,
250
+ ) -> DictData:
251
+ """Override the find class method from the Simple Loader object.
252
+
253
+ :param obj: A object that want to validate matching before return.
254
+ :param include:
255
+ :param exclude:
256
+ """
257
+ return super().finds(
258
+ obj=obj, conf=Config(), include=include, exclude=exclude
259
+ )
260
+
261
+ def __init__(self, name: str, externals: DictData) -> None:
262
+ super().__init__(name, conf=Config(), externals=externals)
263
+
264
+
265
+ def get_type(t: str, params: Config) -> AnyModelType:
266
+ """Return import type from string importable value in the type key.
267
+
268
+ :param t: A importable type string.
269
+ :param params: A config parameters that use registry to search this
270
+ type.
271
+ :rtype: AnyModelType
272
+ """
273
+ try:
274
+ # NOTE: Auto adding module prefix if it does not set
275
+ return import_string(f"ddeutil.workflow.{t}")
276
+ except ModuleNotFoundError:
277
+ for registry in params.regis_hook:
278
+ try:
279
+ return import_string(f"{registry}.{t}")
280
+ except ModuleNotFoundError:
281
+ continue
282
+ return import_string(f"{t}")
43
283
 
44
284
 
45
285
  config = Config()
286
+
287
+
288
+ @lru_cache
289
+ def get_logger(name: str):
290
+ """Return logger object with an input module name.
291
+
292
+ :param name: A module name that want to log.
293
+ """
294
+ logger = logging.getLogger(name)
295
+ formatter = logging.Formatter(
296
+ fmt=(
297
+ "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
298
+ "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
299
+ "(%(filename)s:%(lineno)s)"
300
+ ),
301
+ datefmt="%Y-%m-%d %H:%M:%S",
302
+ )
303
+ stream = logging.StreamHandler()
304
+ stream.setFormatter(formatter)
305
+ logger.addHandler(stream)
306
+
307
+ logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
308
+ return logger
309
+
310
+
311
+ class BaseLog(BaseModel, ABC):
312
+ """Base Log Pydantic Model with abstraction class property that implement
313
+ only model fields. This model should to use with inherit to logging
314
+ sub-class like file, sqlite, etc.
315
+ """
316
+
317
+ name: str = Field(description="A workflow name.")
318
+ on: str = Field(description="A cronjob string of this piepline schedule.")
319
+ release: datetime = Field(description="A release datetime.")
320
+ context: DictData = Field(
321
+ default_factory=dict,
322
+ description=(
323
+ "A context data that receive from a workflow execution result.",
324
+ ),
325
+ )
326
+ parent_run_id: Optional[str] = Field(default=None)
327
+ run_id: str
328
+ update: datetime = Field(default_factory=datetime.now)
329
+
330
+ @model_validator(mode="after")
331
+ def __model_action(self) -> Self:
332
+ """Do before the Log action with WORKFLOW_LOG_ENABLE_WRITE env variable.
333
+
334
+ :rtype: Self
335
+ """
336
+ if config.enable_write_log:
337
+ self.do_before()
338
+ return self
339
+
340
+ def do_before(self) -> None: # pragma: no cov
341
+ """To something before end up of initial log model."""
342
+
343
+ @abstractmethod
344
+ def save(self, excluded: list[str] | None) -> None: # pragma: no cov
345
+ """Save this model logging to target logging store."""
346
+ raise NotImplementedError("Log should implement ``save`` method.")
347
+
348
+
349
+ class FileLog(BaseLog):
350
+ """File Log Pydantic Model that use to saving log data from result of
351
+ workflow execution. It inherit from BaseLog model that implement the
352
+ ``self.save`` method for file.
353
+ """
354
+
355
+ filename_fmt: ClassVar[str] = (
356
+ "./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
357
+ )
358
+
359
+ def do_before(self) -> None:
360
+ """Create directory of release before saving log file."""
361
+ self.pointer().mkdir(parents=True, exist_ok=True)
362
+
363
+ @classmethod
364
+ def find_logs(cls, name: str) -> Iterator[Self]:
365
+ """Generate the logging data that found from logs path with specific a
366
+ workflow name.
367
+
368
+ :param name: A workflow name that want to search release logging data.
369
+ """
370
+ pointer: Path = config.root_path / f"./logs/workflow={name}"
371
+ if not pointer.exists():
372
+ raise FileNotFoundError(
373
+ f"Pointer: ./logs/workflow={name} does not found."
374
+ )
375
+
376
+ for file in pointer.glob("./release=*/*.log"):
377
+ with file.open(mode="r", encoding="utf-8") as f:
378
+ yield cls.model_validate(obj=json.load(f))
379
+
380
+ @classmethod
381
+ def find_log_latest(
382
+ cls,
383
+ name: str,
384
+ release: datetime | None = None,
385
+ ) -> Self:
386
+ """Return the logging data that found from logs path with specific
387
+ workflow name and release values. If a release does not pass to an input
388
+ argument, it will return the latest release from the current log path.
389
+
390
+ :raise FileNotFoundError:
391
+ :raise NotImplementedError:
392
+
393
+ :rtype: Self
394
+ """
395
+ if release is None:
396
+ raise NotImplementedError("Find latest log does not implement yet.")
397
+
398
+ pointer: Path = (
399
+ config.root_path
400
+ / f"./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
401
+ )
402
+ if not pointer.exists():
403
+ raise FileNotFoundError(
404
+ f"Pointer: ./logs/workflow={name}/"
405
+ f"release={release:%Y%m%d%H%M%S} does not found."
406
+ )
407
+
408
+ with max(pointer.glob("./*.log"), key=os.path.getctime).open(
409
+ mode="r", encoding="utf-8"
410
+ ) as f:
411
+ return cls.model_validate(obj=json.load(f))
412
+
413
+ @classmethod
414
+ def is_pointed(
415
+ cls,
416
+ name: str,
417
+ release: datetime,
418
+ *,
419
+ queue: list[datetime] | None = None,
420
+ ) -> bool:
421
+ """Check this log already point in the destination.
422
+
423
+ :param name: A workflow name.
424
+ :param release: A release datetime.
425
+ :param queue: A list of queue of datetime that already run in the
426
+ future.
427
+ """
428
+ # NOTE: Check environ variable was set for real writing.
429
+ if not config.enable_write_log:
430
+ return False
431
+
432
+ # NOTE: create pointer path that use the same logic of pointer method.
433
+ pointer: Path = config.root_path / cls.filename_fmt.format(
434
+ name=name, release=release
435
+ )
436
+
437
+ if not queue:
438
+ return pointer.exists()
439
+ return pointer.exists() or (release in queue)
440
+
441
+ def pointer(self) -> Path:
442
+ """Return release directory path that was generated from model data.
443
+
444
+ :rtype: Path
445
+ """
446
+ return config.root_path / self.filename_fmt.format(
447
+ name=self.name, release=self.release
448
+ )
449
+
450
+ def save(self, excluded: list[str] | None) -> Self:
451
+ """Save logging data that receive a context data from a workflow
452
+ execution result.
453
+
454
+ :param excluded: An excluded list of key name that want to pass in the
455
+ model_dump method.
456
+ :rtype: Self
457
+ """
458
+ # NOTE: Check environ variable was set for real writing.
459
+ if not config.enable_write_log:
460
+ return self
461
+
462
+ log_file: Path = self.pointer() / f"{self.run_id}.log"
463
+ log_file.write_text(
464
+ json.dumps(
465
+ self.model_dump(exclude=excluded),
466
+ default=str,
467
+ indent=2,
468
+ ),
469
+ encoding="utf-8",
470
+ )
471
+ return self
472
+
473
+
474
+ class SQLiteLog(BaseLog): # pragma: no cov
475
+
476
+ def save(self, excluded: list[str] | None) -> None:
477
+ raise NotImplementedError("SQLiteLog does not implement yet.")
478
+
479
+
480
+ Log = Union[
481
+ FileLog,
482
+ SQLiteLog,
483
+ ]