ddeutil-workflow 0.0.68__py3-none-any.whl → 0.0.69__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,6 +13,7 @@ from dataclasses import field
13
13
  from datetime import datetime
14
14
  from enum import Enum
15
15
  from typing import Optional, Union
16
+ from zoneinfo import ZoneInfo
16
17
 
17
18
  from pydantic import ConfigDict
18
19
  from pydantic.dataclasses import dataclass
@@ -30,12 +31,20 @@ from . import (
30
31
  WorkflowError,
31
32
  )
32
33
  from .__types import DictData
34
+ from .audits import TraceModel, get_trace
33
35
  from .conf import dynamic
34
36
  from .errors import ResultError
35
- from .logs import TraceModel, get_dt_tznow, get_trace
36
37
  from .utils import default_gen_id, gen_id, get_dt_now
37
38
 
38
39
 
40
+ def get_dt_tznow(tz: Optional[ZoneInfo] = None) -> datetime: # pragma: no cov
41
+ """Return the current datetime object that passing the config timezone.
42
+
43
+ :rtype: datetime
44
+ """
45
+ return get_dt_now(tz=dynamic("tz", f=tz))
46
+
47
+
39
48
  class Status(str, Enum):
40
49
  """Status Int Enum object that use for tracking execution status to the
41
50
  Result dataclass object.
@@ -3,12 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- # [x] Use fix config for `get_logger`, and Model initialize step.
7
- """A Logs module contain Trace and Audit Pydantic models for process log from
8
- the core workflow engine. I separate part of log to 2 types:
9
- - Trace: A stdout and stderr log
10
- - Audit: An audit release log for tracking incremental running workflow.
11
- """
6
+ # [x] Use fix config for `set_logging`, and Model initialize step.
12
7
  from __future__ import annotations
13
8
 
14
9
  import json
@@ -17,7 +12,6 @@ import os
17
12
  import re
18
13
  from abc import ABC, abstractmethod
19
14
  from collections.abc import Iterator
20
- from datetime import datetime
21
15
  from functools import lru_cache
22
16
  from inspect import Traceback, currentframe, getframeinfo
23
17
  from pathlib import Path
@@ -26,7 +20,6 @@ from types import FrameType
26
20
  from typing import ClassVar, Final, Literal, Optional, TypeVar, Union
27
21
 
28
22
  from pydantic import BaseModel, ConfigDict, Field
29
- from pydantic.functional_validators import model_validator
30
23
  from typing_extensions import Self
31
24
 
32
25
  from .__types import DictData
@@ -34,6 +27,7 @@ from .conf import config, dynamic
34
27
  from .utils import cut_id, get_dt_now, prepare_newline
35
28
 
36
29
  METADATA: str = "metadata.json"
30
+ logger = logging.getLogger("ddeutil.workflow")
37
31
 
38
32
 
39
33
  @lru_cache
@@ -64,17 +58,6 @@ def set_logging(name: str) -> logging.Logger:
64
58
  return _logger
65
59
 
66
60
 
67
- logger = logging.getLogger("ddeutil.workflow")
68
-
69
-
70
- def get_dt_tznow() -> datetime: # pragma: no cov
71
- """Return the current datetime object that passing the config timezone.
72
-
73
- :rtype: datetime
74
- """
75
- return get_dt_now(tz=config.tz)
76
-
77
-
78
61
  PREFIX_LOGS: Final[dict[str, dict]] = {
79
62
  "CALLER": {
80
63
  "emoji": "📍",
@@ -98,8 +81,22 @@ class PrefixMsg(BaseModel):
98
81
  from logging message.
99
82
  """
100
83
 
101
- name: Optional[str] = Field(default=None)
102
- message: Optional[str] = Field(default=None)
84
+ name: Optional[str] = Field(default=None, description="A prefix name.")
85
+ message: Optional[str] = Field(default=None, description="A message.")
86
+
87
+ @classmethod
88
+ def from_str(cls, msg: str) -> Self:
89
+ """Extract message prefix from an input message.
90
+
91
+ Args:
92
+ msg (str): A message that want to extract.
93
+
94
+ Returns:
95
+ PrefixMsg: the validated model from a string message.
96
+ """
97
+ return PrefixMsg.model_validate(
98
+ obj=PREFIX_LOGS_REGEX.search(msg).groupdict()
99
+ )
103
100
 
104
101
  def prepare(self, extras: Optional[DictData] = None) -> str:
105
102
  """Prepare message with force add prefix before writing trace log.
@@ -118,18 +115,6 @@ class PrefixMsg(BaseModel):
118
115
  return f"{emoji}[{name}]: {self.message}"
119
116
 
120
117
 
121
- def extract_msg_prefix(msg: str) -> PrefixMsg:
122
- """Extract message prefix from an input message.
123
-
124
- :param msg: A message that want to extract.
125
-
126
- :rtype: PrefixMsg
127
- """
128
- return PrefixMsg.model_validate(
129
- obj=PREFIX_LOGS_REGEX.search(msg).groupdict()
130
- )
131
-
132
-
133
118
  class TraceMeta(BaseModel): # pragma: no cov
134
119
  """Trace Metadata model for making the current metadata of this CPU, Memory
135
120
  process, and thread data.
@@ -248,10 +233,6 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
248
233
 
249
234
  model_config = ConfigDict(frozen=True)
250
235
 
251
- run_id: str = Field(default="A running ID")
252
- parent_run_id: Optional[str] = Field(
253
- default=None, description="A parent running ID"
254
- )
255
236
  extras: DictData = Field(
256
237
  default_factory=dict,
257
238
  description=(
@@ -259,6 +240,11 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
259
240
  "values."
260
241
  ),
261
242
  )
243
+ run_id: str = Field(description="A running ID")
244
+ parent_run_id: Optional[str] = Field(
245
+ default=None,
246
+ description="A parent running ID",
247
+ )
262
248
 
263
249
  @classmethod
264
250
  @abstractmethod
@@ -267,6 +253,17 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
267
253
  path: Optional[Path] = None,
268
254
  extras: Optional[DictData] = None,
269
255
  ) -> Iterator[TraceData]: # pragma: no cov
256
+ """Return iterator of TraceData models from the target pointer.
257
+
258
+ Args:
259
+ path (:obj:`Path`, optional): A pointer path that want to override.
260
+ extras (:obj:`DictData`, optional): An extras parameter that want to
261
+ override default engine config.
262
+
263
+ Returns:
264
+ Iterator[TracData]: An iterator object that generate a TracData
265
+ model.
266
+ """
270
267
  raise NotImplementedError(
271
268
  "Trace dataclass should implement `find_traces` class-method."
272
269
  )
@@ -287,7 +284,12 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
287
284
  )
288
285
 
289
286
  @abstractmethod
290
- def writer(self, message: str, level: str, is_err: bool = False) -> None:
287
+ def writer(
288
+ self,
289
+ message: str,
290
+ level: str,
291
+ is_err: bool = False,
292
+ ) -> None:
291
293
  """Write a trace message after making to target pointer object. The
292
294
  target can be anything be inherited this class and overwrite this method
293
295
  such as file, console, or database.
@@ -303,7 +305,10 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
303
305
 
304
306
  @abstractmethod
305
307
  async def awriter(
306
- self, message: str, level: str, is_err: bool = False
308
+ self,
309
+ message: str,
310
+ level: str,
311
+ is_err: bool = False,
307
312
  ) -> None:
308
313
  """Async Write a trace message after making to target pointer object.
309
314
 
@@ -328,6 +333,89 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
328
333
  "Adjust make message method for this trace object before using."
329
334
  )
330
335
 
336
+
337
+ class ConsoleTrace(BaseTrace): # pragma: no cov
338
+ """Console Trace log model."""
339
+
340
+ @classmethod
341
+ def find_traces(
342
+ cls,
343
+ path: Optional[Path] = None,
344
+ extras: Optional[DictData] = None,
345
+ ) -> Iterator[TraceData]: # pragma: no cov
346
+ raise NotImplementedError(
347
+ "Console Trace does not support to find history traces data."
348
+ )
349
+
350
+ @classmethod
351
+ def find_trace_with_id(
352
+ cls,
353
+ run_id: str,
354
+ force_raise: bool = True,
355
+ *,
356
+ path: Optional[Path] = None,
357
+ extras: Optional[DictData] = None,
358
+ ) -> TraceData:
359
+ raise NotImplementedError(
360
+ "Console Trace does not support to find history traces data with "
361
+ "the specific running ID."
362
+ )
363
+
364
+ def writer(
365
+ self,
366
+ message: str,
367
+ level: str,
368
+ is_err: bool = False,
369
+ ) -> None:
370
+ """Write a trace message after making to target pointer object. The
371
+ target can be anything be inherited this class and overwrite this method
372
+ such as file, console, or database.
373
+
374
+ :param message: (str) A message after making.
375
+ :param level: (str) A log level.
376
+ :param is_err: (bool) A flag for writing with an error trace or not.
377
+ (Default be False)
378
+ """
379
+
380
+ async def awriter(
381
+ self,
382
+ message: str,
383
+ level: str,
384
+ is_err: bool = False,
385
+ ) -> None:
386
+ """Async Write a trace message after making to target pointer object.
387
+
388
+ :param message: (str) A message after making.
389
+ :param level: (str) A log level.
390
+ :param is_err: (bool) A flag for writing with an error trace or not.
391
+ (Default be False)
392
+ """
393
+
394
+ @property
395
+ def cut_id(self) -> str:
396
+ """Combine cutting ID of parent running ID if it set.
397
+
398
+ :rtype: str
399
+ """
400
+ cut_run_id: str = cut_id(self.run_id)
401
+ if not self.parent_run_id:
402
+ return f"{cut_run_id}"
403
+
404
+ cut_parent_run_id: str = cut_id(self.parent_run_id)
405
+ return f"{cut_parent_run_id} -> {cut_run_id}"
406
+
407
+ def make_message(self, message: str) -> str:
408
+ """Prepare and Make a message before write and log steps.
409
+
410
+ :param message: (str) A message that want to prepare and make before.
411
+
412
+ :rtype: str
413
+ """
414
+ return prepare_newline(
415
+ f"({self.cut_id}) "
416
+ f"{PrefixMsg.from_str(message).prepare(self.extras)}"
417
+ )
418
+
331
419
  def __logging(
332
420
  self, message: str, mode: str, *, is_err: bool = False
333
421
  ) -> None:
@@ -343,7 +431,7 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
343
431
  ):
344
432
  self.writer(msg, level=mode, is_err=is_err)
345
433
 
346
- getattr(logger, mode)(msg, stacklevel=3)
434
+ getattr(logger, mode)(msg, stacklevel=3, extra={"cut_id": self.cut_id})
347
435
 
348
436
  def debug(self, message: str):
349
437
  """Write trace log with append mode and logging this message with the
@@ -400,7 +488,7 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
400
488
  ):
401
489
  await self.awriter(msg, level=mode, is_err=is_err)
402
490
 
403
- getattr(logger, mode)(msg, stacklevel=3)
491
+ getattr(logger, mode)(msg, stacklevel=3, extra={"cut_id": self.cut_id})
404
492
 
405
493
  async def adebug(self, message: str) -> None: # pragma: no cov
406
494
  """Async write trace log with append mode and logging this message with
@@ -443,7 +531,7 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
443
531
  await self.__alogging(message, mode="exception", is_err=True)
444
532
 
445
533
 
446
- class FileTrace(BaseTrace): # pragma: no cov
534
+ class FileTrace(ConsoleTrace): # pragma: no cov
447
535
  """File Trace dataclass that write file to the local storage."""
448
536
 
449
537
  @classmethod
@@ -492,6 +580,11 @@ class FileTrace(BaseTrace): # pragma: no cov
492
580
 
493
581
  @property
494
582
  def pointer(self) -> Path:
583
+ """Pointer of the target path that use to writing trace log or searching
584
+ trace log.
585
+
586
+ :rtype: Path
587
+ """
495
588
  log_file: Path = (
496
589
  dynamic("trace_path", extras=self.extras)
497
590
  / f"run_id={self.parent_run_id or self.run_id}"
@@ -500,31 +593,12 @@ class FileTrace(BaseTrace): # pragma: no cov
500
593
  log_file.mkdir(parents=True)
501
594
  return log_file
502
595
 
503
- @property
504
- def cut_id(self) -> str:
505
- """Combine cutting ID of parent running ID if it set.
506
-
507
- :rtype: str
508
- """
509
- cut_run_id: str = cut_id(self.run_id)
510
- if not self.parent_run_id:
511
- return f"{cut_run_id}"
512
-
513
- cut_parent_run_id: str = cut_id(self.parent_run_id)
514
- return f"{cut_parent_run_id} -> {cut_run_id}"
515
-
516
- def make_message(self, message: str) -> str:
517
- """Prepare and Make a message before write and log steps.
518
-
519
- :param message: (str) A message that want to prepare and make before.
520
-
521
- :rtype: str
522
- """
523
- return prepare_newline(
524
- f"({self.cut_id}) {extract_msg_prefix(message).prepare(self.extras)}"
525
- )
526
-
527
- def writer(self, message: str, level: str, is_err: bool = False) -> None:
596
+ def writer(
597
+ self,
598
+ message: str,
599
+ level: str,
600
+ is_err: bool = False,
601
+ ) -> None:
528
602
  """Write a trace message after making to target file and write metadata
529
603
  in the same path of standard files.
530
604
 
@@ -556,7 +630,10 @@ class FileTrace(BaseTrace): # pragma: no cov
556
630
  f.write(trace_meta.model_dump_json() + "\n")
557
631
 
558
632
  async def awriter(
559
- self, message: str, level: str, is_err: bool = False
633
+ self,
634
+ message: str,
635
+ level: str,
636
+ is_err: bool = False,
560
637
  ) -> None: # pragma: no cov
561
638
  """Write with async mode."""
562
639
  if not dynamic("enable_write_log", extras=self.extras):
@@ -584,7 +661,7 @@ class FileTrace(BaseTrace): # pragma: no cov
584
661
  await f.write(trace_meta.model_dump_json() + "\n")
585
662
 
586
663
 
587
- class SQLiteTrace(BaseTrace): # pragma: no cov
664
+ class SQLiteTrace(ConsoleTrace): # pragma: no cov
588
665
  """SQLite Trace dataclass that write trace log to the SQLite database file."""
589
666
 
590
667
  table_name: ClassVar[str] = "audits"
@@ -618,16 +695,23 @@ class SQLiteTrace(BaseTrace): # pragma: no cov
618
695
  def make_message(self, message: str) -> str: ...
619
696
 
620
697
  def writer(
621
- self, message: str, level: str, is_err: bool = False
698
+ self,
699
+ message: str,
700
+ level: str,
701
+ is_err: bool = False,
622
702
  ) -> None: ...
623
703
 
624
704
  def awriter(
625
- self, message: str, level: str, is_err: bool = False
705
+ self,
706
+ message: str,
707
+ level: str,
708
+ is_err: bool = False,
626
709
  ) -> None: ...
627
710
 
628
711
 
629
712
  Trace = TypeVar("Trace", bound=BaseTrace)
630
713
  TraceModel = Union[
714
+ ConsoleTrace,
631
715
  FileTrace,
632
716
  SQLiteTrace,
633
717
  ]
@@ -656,310 +740,3 @@ def get_trace(
656
740
  return FileTrace(
657
741
  run_id=run_id, parent_run_id=parent_run_id, extras=(extras or {})
658
742
  )
659
-
660
-
661
- class BaseAudit(BaseModel, ABC):
662
- """Base Audit Pydantic Model with abstraction class property that implement
663
- only model fields. This model should to use with inherit to logging
664
- subclass like file, sqlite, etc.
665
- """
666
-
667
- extras: DictData = Field(
668
- default_factory=dict,
669
- description="An extras parameter that want to override core config",
670
- )
671
- name: str = Field(description="A workflow name.")
672
- release: datetime = Field(description="A release datetime.")
673
- type: str = Field(description="A running type before logging.")
674
- context: DictData = Field(
675
- default_factory=dict,
676
- description="A context that receive from a workflow execution result.",
677
- )
678
- parent_run_id: Optional[str] = Field(
679
- default=None, description="A parent running ID."
680
- )
681
- run_id: str = Field(description="A running ID")
682
- update: datetime = Field(default_factory=get_dt_tznow)
683
- execution_time: float = Field(default=0, description="An execution time.")
684
-
685
- @model_validator(mode="after")
686
- def __model_action(self) -> Self:
687
- """Do before the Audit action with WORKFLOW_AUDIT_ENABLE_WRITE env variable.
688
-
689
- :rtype: Self
690
- """
691
- if dynamic("enable_write_audit", extras=self.extras):
692
- self.do_before()
693
-
694
- # NOTE: Start setting log config in this line with cache.
695
- set_logging("ddeutil.workflow")
696
- return self
697
-
698
- @classmethod
699
- @abstractmethod
700
- def is_pointed(
701
- cls,
702
- name: str,
703
- release: datetime,
704
- *,
705
- extras: Optional[DictData] = None,
706
- ) -> bool:
707
- raise NotImplementedError(
708
- "Audit should implement `is_pointed` class-method"
709
- )
710
-
711
- @classmethod
712
- @abstractmethod
713
- def find_audits(
714
- cls, name: str, *, extras: Optional[DictData] = None
715
- ) -> Iterator[Self]:
716
- raise NotImplementedError(
717
- "Audit should implement `find_audits` class-method"
718
- )
719
-
720
- @classmethod
721
- @abstractmethod
722
- def find_audit_with_release(
723
- cls,
724
- name: str,
725
- release: Optional[datetime] = None,
726
- *,
727
- extras: Optional[DictData] = None,
728
- ) -> Self:
729
- raise NotImplementedError(
730
- "Audit should implement `find_audit_with_release` class-method"
731
- )
732
-
733
- def do_before(self) -> None: # pragma: no cov
734
- """To something before end up of initial log model."""
735
-
736
- @abstractmethod
737
- def save(self, excluded: Optional[list[str]]) -> None: # pragma: no cov
738
- """Save this model logging to target logging store."""
739
- raise NotImplementedError("Audit should implement `save` method.")
740
-
741
-
742
- class FileAudit(BaseAudit):
743
- """File Audit Pydantic Model that use to saving log data from result of
744
- workflow execution. It inherits from BaseAudit model that implement the
745
- ``self.save`` method for file.
746
- """
747
-
748
- filename_fmt: ClassVar[str] = (
749
- "workflow={name}/release={release:%Y%m%d%H%M%S}"
750
- )
751
-
752
- def do_before(self) -> None:
753
- """Create directory of release before saving log file."""
754
- self.pointer().mkdir(parents=True, exist_ok=True)
755
-
756
- @classmethod
757
- def find_audits(
758
- cls, name: str, *, extras: Optional[DictData] = None
759
- ) -> Iterator[Self]:
760
- """Generate the audit data that found from logs path with specific a
761
- workflow name.
762
-
763
- :param name: A workflow name that want to search release logging data.
764
- :param extras: An extra parameter that want to override core config.
765
-
766
- :rtype: Iterator[Self]
767
- """
768
- pointer: Path = (
769
- dynamic("audit_path", extras=extras) / f"workflow={name}"
770
- )
771
- if not pointer.exists():
772
- raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
773
-
774
- for file in pointer.glob("./release=*/*.log"):
775
- with file.open(mode="r", encoding="utf-8") as f:
776
- yield cls.model_validate(obj=json.load(f))
777
-
778
- @classmethod
779
- def find_audit_with_release(
780
- cls,
781
- name: str,
782
- release: Optional[datetime] = None,
783
- *,
784
- extras: Optional[DictData] = None,
785
- ) -> Self:
786
- """Return the audit data that found from logs path with specific
787
- workflow name and release values. If a release does not pass to an input
788
- argument, it will return the latest release from the current log path.
789
-
790
- :param name: (str) A workflow name that want to search log.
791
- :param release: (datetime) A release datetime that want to search log.
792
- :param extras: An extra parameter that want to override core config.
793
-
794
- :raise FileNotFoundError:
795
- :raise NotImplementedError: If an input release does not pass to this
796
- method. Because this method does not implement latest log.
797
-
798
- :rtype: Self
799
- """
800
- if release is None:
801
- raise NotImplementedError("Find latest log does not implement yet.")
802
-
803
- pointer: Path = (
804
- dynamic("audit_path", extras=extras)
805
- / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
806
- )
807
- if not pointer.exists():
808
- raise FileNotFoundError(
809
- f"Pointer: ./logs/workflow={name}/"
810
- f"release={release:%Y%m%d%H%M%S} does not found."
811
- )
812
-
813
- latest_file: Path = max(pointer.glob("./*.log"), key=os.path.getctime)
814
- with latest_file.open(mode="r", encoding="utf-8") as f:
815
- return cls.model_validate(obj=json.load(f))
816
-
817
- @classmethod
818
- def is_pointed(
819
- cls,
820
- name: str,
821
- release: datetime,
822
- *,
823
- extras: Optional[DictData] = None,
824
- ) -> bool:
825
- """Check the release log already pointed or created at the destination
826
- log path.
827
-
828
- :param name: (str) A workflow name.
829
- :param release: (datetime) A release datetime.
830
- :param extras: An extra parameter that want to override core config.
831
-
832
- :rtype: bool
833
- :return: Return False if the release log was not pointed or created.
834
- """
835
- # NOTE: Return False if enable writing log flag does not set.
836
- if not dynamic("enable_write_audit", extras=extras):
837
- return False
838
-
839
- # NOTE: create pointer path that use the same logic of pointer method.
840
- pointer: Path = dynamic(
841
- "audit_path", extras=extras
842
- ) / cls.filename_fmt.format(name=name, release=release)
843
-
844
- return pointer.exists()
845
-
846
- def pointer(self) -> Path:
847
- """Return release directory path that was generated from model data.
848
-
849
- :rtype: Path
850
- """
851
- return dynamic(
852
- "audit_path", extras=self.extras
853
- ) / self.filename_fmt.format(name=self.name, release=self.release)
854
-
855
- def save(self, excluded: Optional[list[str]] = None) -> Self:
856
- """Save logging data that receive a context data from a workflow
857
- execution result.
858
-
859
- :param excluded: An excluded list of key name that want to pass in the
860
- model_dump method.
861
-
862
- :rtype: Self
863
- """
864
- trace: TraceModel = get_trace(
865
- self.run_id,
866
- parent_run_id=self.parent_run_id,
867
- extras=self.extras,
868
- )
869
-
870
- # NOTE: Check environ variable was set for real writing.
871
- if not dynamic("enable_write_audit", extras=self.extras):
872
- trace.debug("[AUDIT]: Skip writing log cause config was set")
873
- return self
874
-
875
- log_file: Path = (
876
- self.pointer() / f"{self.parent_run_id or self.run_id}.log"
877
- )
878
- log_file.write_text(
879
- json.dumps(
880
- self.model_dump(exclude=excluded),
881
- default=str,
882
- indent=2,
883
- ),
884
- encoding="utf-8",
885
- )
886
- return self
887
-
888
-
889
- class SQLiteAudit(BaseAudit): # pragma: no cov
890
- """SQLite Audit Pydantic Model."""
891
-
892
- table_name: ClassVar[str] = "audits"
893
- schemas: ClassVar[
894
- str
895
- ] = """
896
- workflow str,
897
- release int,
898
- type str,
899
- context json,
900
- parent_run_id int,
901
- run_id int,
902
- update datetime
903
- primary key ( run_id )
904
- """
905
-
906
- @classmethod
907
- def is_pointed(
908
- cls,
909
- name: str,
910
- release: datetime,
911
- *,
912
- extras: Optional[DictData] = None,
913
- ) -> bool: ...
914
-
915
- @classmethod
916
- def find_audits(
917
- cls, name: str, *, extras: Optional[DictData] = None
918
- ) -> Iterator[Self]: ...
919
-
920
- @classmethod
921
- def find_audit_with_release(
922
- cls,
923
- name: str,
924
- release: Optional[datetime] = None,
925
- *,
926
- extras: Optional[DictData] = None,
927
- ) -> Self: ...
928
-
929
- def save(self, excluded: Optional[list[str]]) -> SQLiteAudit:
930
- """Save logging data that receive a context data from a workflow
931
- execution result.
932
- """
933
- trace: TraceModel = get_trace(
934
- self.run_id,
935
- parent_run_id=self.parent_run_id,
936
- extras=self.extras,
937
- )
938
-
939
- # NOTE: Check environ variable was set for real writing.
940
- if not dynamic("enable_write_audit", extras=self.extras):
941
- trace.debug("[AUDIT]: Skip writing log cause config was set")
942
- return self
943
-
944
- raise NotImplementedError("SQLiteAudit does not implement yet.")
945
-
946
-
947
- Audit = TypeVar("Audit", bound=BaseAudit)
948
- AuditModel = Union[
949
- FileAudit,
950
- SQLiteAudit,
951
- ]
952
-
953
-
954
- def get_audit(
955
- extras: Optional[DictData] = None,
956
- ) -> type[AuditModel]: # pragma: no cov
957
- """Get an audit class that dynamic base on the config audit path value.
958
-
959
- :param extras: An extra parameter that want to override the core config.
960
-
961
- :rtype: type[Audit]
962
- """
963
- if dynamic("audit_path", extras=extras).is_file():
964
- return SQLiteAudit
965
- return FileAudit