ddeutil-workflow 0.0.32__py3-none-any.whl → 0.0.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/conf.py CHANGED
@@ -8,19 +8,14 @@ from __future__ import annotations
8
8
  import json
9
9
  import logging
10
10
  import os
11
- from abc import ABC, abstractmethod
12
11
  from collections.abc import Iterator
13
- from datetime import datetime, timedelta
12
+ from datetime import timedelta
14
13
  from functools import cached_property, lru_cache
15
14
  from pathlib import Path
16
- from typing import ClassVar, Optional, TypeVar, Union
17
15
  from zoneinfo import ZoneInfo
18
16
 
19
17
  from ddeutil.core import str2bool
20
18
  from ddeutil.io import YamlFlResolve
21
- from pydantic import BaseModel, Field
22
- from pydantic.functional_validators import model_validator
23
- from typing_extensions import Self
24
19
 
25
20
  from .__types import DictData, TupleStr
26
21
 
@@ -36,52 +31,17 @@ def glob_files(path: Path) -> Iterator[Path]: # pragma: no cov
36
31
 
37
32
 
38
33
  __all__: TupleStr = (
34
+ "LOGGING_CONFIG",
39
35
  "env",
40
36
  "get_logger",
41
- "get_log",
42
- "C",
43
37
  "Config",
44
38
  "SimLoad",
45
39
  "Loader",
46
40
  "config",
47
- "logger",
48
- "FileLog",
49
- "SQLiteLog",
50
- "Log",
41
+ "glob_files",
51
42
  )
52
43
 
53
44
 
54
- @lru_cache
55
- def get_logger(name: str):
56
- """Return logger object with an input module name.
57
-
58
- :param name: A module name that want to log.
59
- """
60
- lg = logging.getLogger(name)
61
-
62
- # NOTE: Developers using this package can then disable all logging just for
63
- # this package by;
64
- #
65
- # `logging.getLogger('ddeutil.workflow').propagate = False`
66
- #
67
- lg.addHandler(logging.NullHandler())
68
-
69
- formatter = logging.Formatter(
70
- fmt=(
71
- "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
72
- "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
73
- "(%(filename)s:%(lineno)s)"
74
- ),
75
- datefmt="%Y-%m-%d %H:%M:%S",
76
- )
77
- stream = logging.StreamHandler()
78
- stream.setFormatter(formatter)
79
- lg.addHandler(stream)
80
-
81
- lg.setLevel(logging.DEBUG if config.debug else logging.INFO)
82
- return lg
83
-
84
-
85
45
  class BaseConfig: # pragma: no cov
86
46
  """BaseConfig object inheritable."""
87
47
 
@@ -138,9 +98,9 @@ class Config(BaseConfig): # pragma: no cov
138
98
 
139
99
  # NOTE: Register
140
100
  @property
141
- def regis_hook(self) -> list[str]:
142
- regis_hook_str: str = env("CORE_REGISTRY", ".")
143
- return [r.strip() for r in regis_hook_str.split(",")]
101
+ def regis_call(self) -> list[str]:
102
+ regis_call_str: str = env("CORE_REGISTRY", ".")
103
+ return [r.strip() for r in regis_call_str.split(",")]
144
104
 
145
105
  @property
146
106
  def regis_filter(self) -> list[str]:
@@ -149,7 +109,7 @@ class Config(BaseConfig): # pragma: no cov
149
109
  )
150
110
  return [r.strip() for r in regis_filter_str.split(",")]
151
111
 
152
- # NOTE: Logging
112
+ # NOTE: Log
153
113
  @property
154
114
  def log_path(self) -> Path:
155
115
  return Path(env("LOG_PATH", "./logs"))
@@ -158,10 +118,44 @@ class Config(BaseConfig): # pragma: no cov
158
118
  def debug(self) -> bool:
159
119
  return str2bool(env("LOG_DEBUG_MODE", "true"))
160
120
 
121
+ @property
122
+ def log_format(self) -> str:
123
+ return env(
124
+ "LOG_FORMAT",
125
+ (
126
+ "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
127
+ "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
128
+ "(%(filename)s:%(lineno)s)"
129
+ ),
130
+ )
131
+
132
+ @property
133
+ def log_format_file(self) -> str:
134
+ return env(
135
+ "LOG_FORMAT_FILE",
136
+ (
137
+ "{datetime} ({process:5d}, {thread:5d}) {message:120s} "
138
+ "({filename}:{lineno})"
139
+ ),
140
+ )
141
+
161
142
  @property
162
143
  def enable_write_log(self) -> bool:
163
144
  return str2bool(env("LOG_ENABLE_WRITE", "false"))
164
145
 
146
+ # NOTE: Audit Log
147
+ @property
148
+ def audit_path(self) -> Path:
149
+ return Path(env("AUDIT_PATH", "./audits"))
150
+
151
+ @property
152
+ def enable_write_audit(self) -> bool:
153
+ return str2bool(env("AUDIT_ENABLE_WRITE", "false"))
154
+
155
+ @property
156
+ def log_datetime_format(self) -> str:
157
+ return env("LOG_DATETIME_FORMAT", "%Y-%m-%d %H:%M:%S")
158
+
165
159
  # NOTE: Stage
166
160
  @property
167
161
  def stage_raise_error(self) -> bool:
@@ -209,7 +203,7 @@ class Config(BaseConfig): # pragma: no cov
209
203
  def max_queue_complete_hist(self) -> int:
210
204
  return int(env("CORE_MAX_QUEUE_COMPLETE_HIST", "16"))
211
205
 
212
- # NOTE: Schedule App
206
+ # NOTE: App
213
207
  @property
214
208
  def max_schedule_process(self) -> int:
215
209
  return int(env("APP_MAX_PROCESS", "2"))
@@ -245,15 +239,12 @@ class Config(BaseConfig): # pragma: no cov
245
239
  return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
246
240
 
247
241
 
248
- C = TypeVar("C", bound=BaseConfig)
249
-
250
-
251
242
  class SimLoad:
252
243
  """Simple Load Object that will search config data by given some identity
253
244
  value like name of workflow or on.
254
245
 
255
246
  :param name: A name of config data that will read by Yaml Loader object.
256
- :param conf: A Params model object.
247
+ :param conf_path: A config path object.
257
248
  :param externals: An external parameters
258
249
 
259
250
  Noted:
@@ -271,28 +262,32 @@ class SimLoad:
271
262
  def __init__(
272
263
  self,
273
264
  name: str,
274
- conf: C,
265
+ conf_path: Path,
275
266
  externals: DictData | None = None,
276
267
  ) -> None:
268
+ self.conf_path: Path = conf_path
269
+ self.externals: DictData = externals or {}
270
+
277
271
  self.data: DictData = {}
278
- for file in glob_files(conf.conf_path):
272
+ for file in glob_files(conf_path):
273
+
274
+ if self.is_ignore(file, conf_path):
275
+ continue
279
276
 
280
- if data := self.filter_suffix(file, name):
277
+ if data := self.filter_suffix(file, name=name):
281
278
  self.data = data
282
279
 
283
280
  # VALIDATE: check the data that reading should not empty.
284
281
  if not self.data:
285
282
  raise ValueError(f"Config {name!r} does not found on conf path")
286
283
 
287
- self.conf: C = conf
288
- self.externals: DictData = externals or {}
289
284
  self.data.update(self.externals)
290
285
 
291
286
  @classmethod
292
287
  def finds(
293
288
  cls,
294
289
  obj: object,
295
- conf: C,
290
+ conf_path: Path,
296
291
  *,
297
292
  included: list[str] | None = None,
298
293
  excluded: list[str] | None = None,
@@ -302,17 +297,22 @@ class SimLoad:
302
297
  adds-on.
303
298
 
304
299
  :param obj: An object that want to validate matching before return.
305
- :param conf: A config object.
306
- :param included:
307
- :param excluded:
300
+ :param conf_path: A config object.
301
+ :param included: An excluded list of data key that want to reject this
302
+ data if any key exist.
303
+ :param excluded: An included list of data key that want to filter from
304
+ data.
308
305
 
309
306
  :rtype: Iterator[tuple[str, DictData]]
310
307
  """
311
308
  exclude: list[str] = excluded or []
312
- for file in glob_files(conf.conf_path):
309
+ for file in glob_files(conf_path):
313
310
 
314
311
  for key, data in cls.filter_suffix(file).items():
315
312
 
313
+ if cls.is_ignore(file, conf_path):
314
+ continue
315
+
316
316
  if key in exclude:
317
317
  continue
318
318
 
@@ -323,11 +323,26 @@ class SimLoad:
323
323
  else data
324
324
  )
325
325
 
326
+ @classmethod
327
+ def is_ignore(cls, file: Path, conf_path: Path) -> bool:
328
+ ignore_file: Path = conf_path / ".confignore"
329
+ ignore: list[str] = []
330
+ if ignore_file.exists():
331
+ ignore = ignore_file.read_text(encoding="utf-8").splitlines()
332
+
333
+ if any(
334
+ (file.match(f"**/{pattern}/*") or file.match(f"**/{pattern}*"))
335
+ for pattern in ignore
336
+ ):
337
+ return True
338
+ return False
339
+
326
340
  @classmethod
327
341
  def filter_suffix(cls, file: Path, name: str | None = None) -> DictData:
328
342
  if any(file.suffix.endswith(s) for s in (".yml", ".yaml")):
329
343
  values: DictData = YamlFlResolve(file).read()
330
344
  return values.get(name, {}) if name else values
345
+
331
346
  return {}
332
347
 
333
348
  @cached_property
@@ -344,10 +359,6 @@ class SimLoad:
344
359
  )
345
360
 
346
361
 
347
- config = Config()
348
- logger = get_logger("ddeutil.workflow")
349
-
350
-
351
362
  class Loader(SimLoad):
352
363
  """Loader Object that get the config `yaml` file from current path.
353
364
 
@@ -373,219 +384,100 @@ class Loader(SimLoad):
373
384
  :rtype: Iterator[tuple[str, DictData]]
374
385
  """
375
386
  return super().finds(
376
- obj=obj, conf=config, included=included, excluded=excluded
387
+ obj=obj,
388
+ conf_path=config.conf_path,
389
+ included=included,
390
+ excluded=excluded,
377
391
  )
378
392
 
379
393
  def __init__(self, name: str, externals: DictData) -> None:
380
- super().__init__(name, conf=config, externals=externals)
381
-
382
-
383
- class BaseLog(BaseModel, ABC):
384
- """Base Log Pydantic Model with abstraction class property that implement
385
- only model fields. This model should to use with inherit to logging
386
- subclass like file, sqlite, etc.
387
- """
388
-
389
- name: str = Field(description="A workflow name.")
390
- release: datetime = Field(description="A release datetime.")
391
- type: str = Field(description="A running type before logging.")
392
- context: DictData = Field(
393
- default_factory=dict,
394
- description="A context that receive from a workflow execution result.",
395
- )
396
- parent_run_id: Optional[str] = Field(default=None)
397
- run_id: str
398
- update: datetime = Field(default_factory=datetime.now)
394
+ super().__init__(name, conf_path=config.conf_path, externals=externals)
399
395
 
400
- @model_validator(mode="after")
401
- def __model_action(self) -> Self:
402
- """Do before the Log action with WORKFLOW_LOG_ENABLE_WRITE env variable.
403
396
 
404
- :rtype: Self
405
- """
406
- if config.enable_write_log:
407
- self.do_before()
408
- return self
397
+ config: Config = Config()
409
398
 
410
- def do_before(self) -> None: # pragma: no cov
411
- """To something before end up of initial log model."""
412
-
413
- @abstractmethod
414
- def save(self, excluded: list[str] | None) -> None: # pragma: no cov
415
- """Save this model logging to target logging store."""
416
- raise NotImplementedError("Log should implement ``save`` method.")
417
399
 
400
+ @lru_cache
401
+ def get_logger(name: str):
402
+ """Return logger object with an input module name.
418
403
 
419
- class FileLog(BaseLog):
420
- """File Log Pydantic Model that use to saving log data from result of
421
- workflow execution. It inherits from BaseLog model that implement the
422
- ``self.save`` method for file.
404
+ :param name: A module name that want to log.
423
405
  """
406
+ logger = logging.getLogger(name)
424
407
 
425
- filename_fmt: ClassVar[str] = (
426
- "workflow={name}/release={release:%Y%m%d%H%M%S}"
427
- )
428
-
429
- def do_before(self) -> None:
430
- """Create directory of release before saving log file."""
431
- self.pointer().mkdir(parents=True, exist_ok=True)
432
-
433
- @classmethod
434
- def find_logs(cls, name: str) -> Iterator[Self]:
435
- """Generate the logging data that found from logs path with specific a
436
- workflow name.
437
-
438
- :param name: A workflow name that want to search release logging data.
439
-
440
- :rtype: Iterator[Self]
441
- """
442
- pointer: Path = config.log_path / f"workflow={name}"
443
- if not pointer.exists():
444
- raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
445
-
446
- for file in pointer.glob("./release=*/*.log"):
447
- with file.open(mode="r", encoding="utf-8") as f:
448
- yield cls.model_validate(obj=json.load(f))
449
-
450
- @classmethod
451
- def find_log_with_release(
452
- cls,
453
- name: str,
454
- release: datetime | None = None,
455
- ) -> Self:
456
- """Return the logging data that found from logs path with specific
457
- workflow name and release values. If a release does not pass to an input
458
- argument, it will return the latest release from the current log path.
459
-
460
- :param name: A workflow name that want to search log.
461
- :param release: A release datetime that want to search log.
462
-
463
- :raise FileNotFoundError:
464
- :raise NotImplementedError:
465
-
466
- :rtype: Self
467
- """
468
- if release is None:
469
- raise NotImplementedError("Find latest log does not implement yet.")
470
-
471
- pointer: Path = (
472
- config.log_path / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
473
- )
474
- if not pointer.exists():
475
- raise FileNotFoundError(
476
- f"Pointer: ./logs/workflow={name}/"
477
- f"release={release:%Y%m%d%H%M%S} does not found."
478
- )
479
-
480
- with max(pointer.glob("./*.log"), key=os.path.getctime).open(
481
- mode="r", encoding="utf-8"
482
- ) as f:
483
- return cls.model_validate(obj=json.load(f))
484
-
485
- @classmethod
486
- def is_pointed(cls, name: str, release: datetime) -> bool:
487
- """Check the release log already pointed or created at the destination
488
- log path.
489
-
490
- :param name: A workflow name.
491
- :param release: A release datetime.
492
-
493
- :rtype: bool
494
- :return: Return False if the release log was not pointed or created.
495
- """
496
- # NOTE: Return False if enable writing log flag does not set.
497
- if not config.enable_write_log:
498
- return False
499
-
500
- # NOTE: create pointer path that use the same logic of pointer method.
501
- pointer: Path = config.log_path / cls.filename_fmt.format(
502
- name=name, release=release
503
- )
504
-
505
- return pointer.exists()
506
-
507
- def pointer(self) -> Path:
508
- """Return release directory path that was generated from model data.
509
-
510
- :rtype: Path
511
- """
512
- return config.log_path / self.filename_fmt.format(
513
- name=self.name, release=self.release
514
- )
515
-
516
- def save(self, excluded: list[str] | None) -> Self:
517
- """Save logging data that receive a context data from a workflow
518
- execution result.
519
-
520
- :param excluded: An excluded list of key name that want to pass in the
521
- model_dump method.
522
-
523
- :rtype: Self
524
- """
525
- from .utils import cut_id
526
-
527
- # NOTE: Check environ variable was set for real writing.
528
- if not config.enable_write_log:
529
- logger.debug(
530
- f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
531
- f"config was set"
532
- )
533
- return self
534
-
535
- log_file: Path = self.pointer() / f"{self.run_id}.log"
536
- log_file.write_text(
537
- json.dumps(
538
- self.model_dump(exclude=excluded),
539
- default=str,
540
- indent=2,
541
- ),
542
- encoding="utf-8",
543
- )
544
- return self
545
-
546
-
547
- class SQLiteLog(BaseLog): # pragma: no cov
548
-
549
- table: str = "workflow_log"
550
- ddl: str = """
551
- workflow str,
552
- release int,
553
- type str,
554
- context json,
555
- parent_run_id int,
556
- run_id int,
557
- update datetime
558
- primary key ( run_id )
559
- """
560
-
561
- def save(self, excluded: list[str] | None) -> SQLiteLog:
562
- """Save logging data that receive a context data from a workflow
563
- execution result.
564
- """
565
- from .utils import cut_id
566
-
567
- # NOTE: Check environ variable was set for real writing.
568
- if not config.enable_write_log:
569
- logger.debug(
570
- f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
571
- f"config was set"
572
- )
573
- return self
574
-
575
- raise NotImplementedError("SQLiteLog does not implement yet.")
576
-
577
-
578
- Log = Union[
579
- FileLog,
580
- SQLiteLog,
581
- ]
582
-
583
-
584
- def get_log() -> type[Log]: # pragma: no cov
585
- """Get logging class that dynamic base on the config log path value.
408
+ # NOTE: Developers using this package can then disable all logging just for
409
+ # this package by;
410
+ #
411
+ # `logging.getLogger('ddeutil.workflow').propagate = False`
412
+ #
413
+ logger.addHandler(logging.NullHandler())
586
414
 
587
- :rtype: type[Log]
588
- """
589
- if config.log_path.is_file():
590
- return SQLiteLog
591
- return FileLog
415
+ formatter = logging.Formatter(
416
+ fmt=config.log_format,
417
+ datefmt=config.log_datetime_format,
418
+ )
419
+ stream = logging.StreamHandler()
420
+ stream.setFormatter(formatter)
421
+ logger.addHandler(stream)
422
+
423
+ logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
424
+ return logger
425
+
426
+
427
+ LOGGING_CONFIG = { # pragma: no cov
428
+ "version": 1,
429
+ "disable_existing_loggers": False,
430
+ "formatters": {
431
+ "standard": {
432
+ "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
433
+ },
434
+ "custom_formatter": {
435
+ "format": config.log_format,
436
+ "datefmt": config.log_datetime_format,
437
+ },
438
+ },
439
+ "root": {
440
+ "level": "DEBUG" if config.debug else "INFO",
441
+ },
442
+ "handlers": {
443
+ "default": {
444
+ "formatter": "standard",
445
+ "class": "logging.StreamHandler",
446
+ "stream": "ext://sys.stderr",
447
+ },
448
+ "stream_handler": {
449
+ "formatter": "custom_formatter",
450
+ "class": "logging.StreamHandler",
451
+ "stream": "ext://sys.stdout",
452
+ },
453
+ "file_handler": {
454
+ "formatter": "custom_formatter",
455
+ "class": "logging.handlers.RotatingFileHandler",
456
+ "filename": "logs/app.log",
457
+ "maxBytes": 1024 * 1024 * 1,
458
+ "backupCount": 3,
459
+ },
460
+ },
461
+ "loggers": {
462
+ "uvicorn": {
463
+ "handlers": ["default", "file_handler"],
464
+ "level": "DEBUG" if config.debug else "INFO",
465
+ "propagate": False,
466
+ },
467
+ "uvicorn.access": {
468
+ "handlers": ["stream_handler", "file_handler"],
469
+ "level": "DEBUG" if config.debug else "INFO",
470
+ "propagate": False,
471
+ },
472
+ "uvicorn.error": {
473
+ "handlers": ["stream_handler", "file_handler"],
474
+ "level": "DEBUG" if config.debug else "INFO",
475
+ "propagate": False,
476
+ },
477
+ # "uvicorn.asgi": {
478
+ # "handlers": ["stream_handler", "file_handler"],
479
+ # "level": "TRACE",
480
+ # "propagate": False,
481
+ # },
482
+ },
483
+ }