ddeutil-workflow 0.0.31__py3-none-any.whl → 0.0.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/conf.py CHANGED
@@ -8,19 +8,14 @@ from __future__ import annotations
8
8
  import json
9
9
  import logging
10
10
  import os
11
- from abc import ABC, abstractmethod
12
11
  from collections.abc import Iterator
13
- from datetime import datetime, timedelta
12
+ from datetime import timedelta
14
13
  from functools import cached_property, lru_cache
15
14
  from pathlib import Path
16
- from typing import ClassVar, Optional, TypeVar, Union
17
15
  from zoneinfo import ZoneInfo
18
16
 
19
17
  from ddeutil.core import str2bool
20
18
  from ddeutil.io import YamlFlResolve
21
- from pydantic import BaseModel, Field
22
- from pydantic.functional_validators import model_validator
23
- from typing_extensions import Self
24
19
 
25
20
  from .__types import DictData, TupleStr
26
21
 
@@ -36,52 +31,16 @@ def glob_files(path: Path) -> Iterator[Path]: # pragma: no cov
36
31
 
37
32
 
38
33
  __all__: TupleStr = (
34
+ "LOGGING_CONFIG",
39
35
  "env",
40
36
  "get_logger",
41
- "get_log",
42
- "C",
43
37
  "Config",
44
38
  "SimLoad",
45
39
  "Loader",
46
40
  "config",
47
- "logger",
48
- "FileLog",
49
- "SQLiteLog",
50
- "Log",
51
41
  )
52
42
 
53
43
 
54
- @lru_cache
55
- def get_logger(name: str):
56
- """Return logger object with an input module name.
57
-
58
- :param name: A module name that want to log.
59
- """
60
- lg = logging.getLogger(name)
61
-
62
- # NOTE: Developers using this package can then disable all logging just for
63
- # this package by;
64
- #
65
- # `logging.getLogger('ddeutil.workflow').propagate = False`
66
- #
67
- lg.addHandler(logging.NullHandler())
68
-
69
- formatter = logging.Formatter(
70
- fmt=(
71
- "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
72
- "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
73
- "(%(filename)s:%(lineno)s)"
74
- ),
75
- datefmt="%Y-%m-%d %H:%M:%S",
76
- )
77
- stream = logging.StreamHandler()
78
- stream.setFormatter(formatter)
79
- lg.addHandler(stream)
80
-
81
- lg.setLevel(logging.DEBUG if config.debug else logging.INFO)
82
- return lg
83
-
84
-
85
44
  class BaseConfig: # pragma: no cov
86
45
  """BaseConfig object inheritable."""
87
46
 
@@ -149,7 +108,7 @@ class Config(BaseConfig): # pragma: no cov
149
108
  )
150
109
  return [r.strip() for r in regis_filter_str.split(",")]
151
110
 
152
- # NOTE: Logging
111
+ # NOTE: Log
153
112
  @property
154
113
  def log_path(self) -> Path:
155
114
  return Path(env("LOG_PATH", "./logs"))
@@ -158,9 +117,33 @@ class Config(BaseConfig): # pragma: no cov
158
117
  def debug(self) -> bool:
159
118
  return str2bool(env("LOG_DEBUG_MODE", "true"))
160
119
 
120
+ @property
121
+ def log_format(self) -> str:
122
+ return env(
123
+ "LOG_FORMAT",
124
+ (
125
+ "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
126
+ "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
127
+ "(%(filename)s:%(lineno)s)"
128
+ ),
129
+ )
130
+
131
+ @property
132
+ def enable_rotate_file(self) -> bool:
133
+ return str2bool(env("LOG_ENABLE_ROTATED_FILE", "false"))
134
+
135
+ # NOTE: Audit Log
136
+ @property
137
+ def audit_path(self) -> Path:
138
+ return Path(env("AUDIT_PATH", "./logs"))
139
+
161
140
  @property
162
141
  def enable_write_log(self) -> bool:
163
- return str2bool(env("LOG_ENABLE_WRITE", "false"))
142
+ return str2bool(env("AUDIT_ENABLE_WRITE", "false"))
143
+
144
+ @property
145
+ def log_datetime_format(self) -> str:
146
+ return env("LOG_DATETIME_FORMAT", "%Y-%m-%d %H:%M:%S")
164
147
 
165
148
  # NOTE: Stage
166
149
  @property
@@ -209,7 +192,7 @@ class Config(BaseConfig): # pragma: no cov
209
192
  def max_queue_complete_hist(self) -> int:
210
193
  return int(env("CORE_MAX_QUEUE_COMPLETE_HIST", "16"))
211
194
 
212
- # NOTE: Schedule App
195
+ # NOTE: App
213
196
  @property
214
197
  def max_schedule_process(self) -> int:
215
198
  return int(env("APP_MAX_PROCESS", "2"))
@@ -245,15 +228,12 @@ class Config(BaseConfig): # pragma: no cov
245
228
  return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
246
229
 
247
230
 
248
- C = TypeVar("C", bound=BaseConfig)
249
-
250
-
251
231
  class SimLoad:
252
232
  """Simple Load Object that will search config data by given some identity
253
233
  value like name of workflow or on.
254
234
 
255
235
  :param name: A name of config data that will read by Yaml Loader object.
256
- :param conf: A Params model object.
236
+ :param conf_path: A config path object.
257
237
  :param externals: An external parameters
258
238
 
259
239
  Noted:
@@ -271,11 +251,11 @@ class SimLoad:
271
251
  def __init__(
272
252
  self,
273
253
  name: str,
274
- conf: C,
254
+ conf_path: Path,
275
255
  externals: DictData | None = None,
276
256
  ) -> None:
277
257
  self.data: DictData = {}
278
- for file in glob_files(conf.conf_path):
258
+ for file in glob_files(conf_path):
279
259
 
280
260
  if data := self.filter_suffix(file, name):
281
261
  self.data = data
@@ -284,7 +264,7 @@ class SimLoad:
284
264
  if not self.data:
285
265
  raise ValueError(f"Config {name!r} does not found on conf path")
286
266
 
287
- self.conf: C = conf
267
+ self.conf_path: Path = conf_path
288
268
  self.externals: DictData = externals or {}
289
269
  self.data.update(self.externals)
290
270
 
@@ -292,7 +272,7 @@ class SimLoad:
292
272
  def finds(
293
273
  cls,
294
274
  obj: object,
295
- conf: C,
275
+ conf_path: Path,
296
276
  *,
297
277
  included: list[str] | None = None,
298
278
  excluded: list[str] | None = None,
@@ -302,14 +282,14 @@ class SimLoad:
302
282
  adds-on.
303
283
 
304
284
  :param obj: An object that want to validate matching before return.
305
- :param conf: A config object.
285
+ :param conf_path: A config object.
306
286
  :param included:
307
287
  :param excluded:
308
288
 
309
289
  :rtype: Iterator[tuple[str, DictData]]
310
290
  """
311
291
  exclude: list[str] = excluded or []
312
- for file in glob_files(conf.conf_path):
292
+ for file in glob_files(conf_path):
313
293
 
314
294
  for key, data in cls.filter_suffix(file).items():
315
295
 
@@ -344,10 +324,6 @@ class SimLoad:
344
324
  )
345
325
 
346
326
 
347
- config = Config()
348
- logger = get_logger("ddeutil.workflow")
349
-
350
-
351
327
  class Loader(SimLoad):
352
328
  """Loader Object that get the config `yaml` file from current path.
353
329
 
@@ -373,219 +349,100 @@ class Loader(SimLoad):
373
349
  :rtype: Iterator[tuple[str, DictData]]
374
350
  """
375
351
  return super().finds(
376
- obj=obj, conf=config, included=included, excluded=excluded
352
+ obj=obj,
353
+ conf_path=config.conf_path,
354
+ included=included,
355
+ excluded=excluded,
377
356
  )
378
357
 
379
358
  def __init__(self, name: str, externals: DictData) -> None:
380
- super().__init__(name, conf=config, externals=externals)
381
-
359
+ super().__init__(name, conf_path=config.conf_path, externals=externals)
382
360
 
383
- class BaseLog(BaseModel, ABC):
384
- """Base Log Pydantic Model with abstraction class property that implement
385
- only model fields. This model should to use with inherit to logging
386
- subclass like file, sqlite, etc.
387
- """
388
-
389
- name: str = Field(description="A workflow name.")
390
- release: datetime = Field(description="A release datetime.")
391
- type: str = Field(description="A running type before logging.")
392
- context: DictData = Field(
393
- default_factory=dict,
394
- description="A context that receive from a workflow execution result.",
395
- )
396
- parent_run_id: Optional[str] = Field(default=None)
397
- run_id: str
398
- update: datetime = Field(default_factory=datetime.now)
399
361
 
400
- @model_validator(mode="after")
401
- def __model_action(self) -> Self:
402
- """Do before the Log action with WORKFLOW_LOG_ENABLE_WRITE env variable.
362
+ config: Config = Config()
403
363
 
404
- :rtype: Self
405
- """
406
- if config.enable_write_log:
407
- self.do_before()
408
- return self
409
-
410
- def do_before(self) -> None: # pragma: no cov
411
- """To something before end up of initial log model."""
412
-
413
- @abstractmethod
414
- def save(self, excluded: list[str] | None) -> None: # pragma: no cov
415
- """Save this model logging to target logging store."""
416
- raise NotImplementedError("Log should implement ``save`` method.")
417
364
 
365
+ @lru_cache
366
+ def get_logger(name: str):
367
+ """Return logger object with an input module name.
418
368
 
419
- class FileLog(BaseLog):
420
- """File Log Pydantic Model that use to saving log data from result of
421
- workflow execution. It inherits from BaseLog model that implement the
422
- ``self.save`` method for file.
369
+ :param name: A module name that want to log.
423
370
  """
371
+ logger = logging.getLogger(name)
424
372
 
425
- filename_fmt: ClassVar[str] = (
426
- "workflow={name}/release={release:%Y%m%d%H%M%S}"
427
- )
428
-
429
- def do_before(self) -> None:
430
- """Create directory of release before saving log file."""
431
- self.pointer().mkdir(parents=True, exist_ok=True)
432
-
433
- @classmethod
434
- def find_logs(cls, name: str) -> Iterator[Self]:
435
- """Generate the logging data that found from logs path with specific a
436
- workflow name.
437
-
438
- :param name: A workflow name that want to search release logging data.
439
-
440
- :rtype: Iterator[Self]
441
- """
442
- pointer: Path = config.log_path / f"workflow={name}"
443
- if not pointer.exists():
444
- raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
445
-
446
- for file in pointer.glob("./release=*/*.log"):
447
- with file.open(mode="r", encoding="utf-8") as f:
448
- yield cls.model_validate(obj=json.load(f))
449
-
450
- @classmethod
451
- def find_log_with_release(
452
- cls,
453
- name: str,
454
- release: datetime | None = None,
455
- ) -> Self:
456
- """Return the logging data that found from logs path with specific
457
- workflow name and release values. If a release does not pass to an input
458
- argument, it will return the latest release from the current log path.
459
-
460
- :param name: A workflow name that want to search log.
461
- :param release: A release datetime that want to search log.
462
-
463
- :raise FileNotFoundError:
464
- :raise NotImplementedError:
465
-
466
- :rtype: Self
467
- """
468
- if release is None:
469
- raise NotImplementedError("Find latest log does not implement yet.")
470
-
471
- pointer: Path = (
472
- config.log_path / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
473
- )
474
- if not pointer.exists():
475
- raise FileNotFoundError(
476
- f"Pointer: ./logs/workflow={name}/"
477
- f"release={release:%Y%m%d%H%M%S} does not found."
478
- )
479
-
480
- with max(pointer.glob("./*.log"), key=os.path.getctime).open(
481
- mode="r", encoding="utf-8"
482
- ) as f:
483
- return cls.model_validate(obj=json.load(f))
484
-
485
- @classmethod
486
- def is_pointed(cls, name: str, release: datetime) -> bool:
487
- """Check the release log already pointed or created at the destination
488
- log path.
489
-
490
- :param name: A workflow name.
491
- :param release: A release datetime.
492
-
493
- :rtype: bool
494
- :return: Return False if the release log was not pointed or created.
495
- """
496
- # NOTE: Return False if enable writing log flag does not set.
497
- if not config.enable_write_log:
498
- return False
499
-
500
- # NOTE: create pointer path that use the same logic of pointer method.
501
- pointer: Path = config.log_path / cls.filename_fmt.format(
502
- name=name, release=release
503
- )
504
-
505
- return pointer.exists()
506
-
507
- def pointer(self) -> Path:
508
- """Return release directory path that was generated from model data.
509
-
510
- :rtype: Path
511
- """
512
- return config.log_path / self.filename_fmt.format(
513
- name=self.name, release=self.release
514
- )
515
-
516
- def save(self, excluded: list[str] | None) -> Self:
517
- """Save logging data that receive a context data from a workflow
518
- execution result.
519
-
520
- :param excluded: An excluded list of key name that want to pass in the
521
- model_dump method.
522
-
523
- :rtype: Self
524
- """
525
- from .utils import cut_id
526
-
527
- # NOTE: Check environ variable was set for real writing.
528
- if not config.enable_write_log:
529
- logger.debug(
530
- f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
531
- f"config was set"
532
- )
533
- return self
534
-
535
- log_file: Path = self.pointer() / f"{self.run_id}.log"
536
- log_file.write_text(
537
- json.dumps(
538
- self.model_dump(exclude=excluded),
539
- default=str,
540
- indent=2,
541
- ),
542
- encoding="utf-8",
543
- )
544
- return self
545
-
546
-
547
- class SQLiteLog(BaseLog): # pragma: no cov
548
-
549
- table: str = "workflow_log"
550
- ddl: str = """
551
- workflow str,
552
- release int,
553
- type str,
554
- context json,
555
- parent_run_id int,
556
- run_id int,
557
- update datetime
558
- primary key ( run_id )
559
- """
560
-
561
- def save(self, excluded: list[str] | None) -> SQLiteLog:
562
- """Save logging data that receive a context data from a workflow
563
- execution result.
564
- """
565
- from .utils import cut_id
566
-
567
- # NOTE: Check environ variable was set for real writing.
568
- if not config.enable_write_log:
569
- logger.debug(
570
- f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
571
- f"config was set"
572
- )
573
- return self
574
-
575
- raise NotImplementedError("SQLiteLog does not implement yet.")
576
-
577
-
578
- Log = Union[
579
- FileLog,
580
- SQLiteLog,
581
- ]
582
-
583
-
584
- def get_log() -> type[Log]: # pragma: no cov
585
- """Get logging class that dynamic base on the config log path value.
373
+ # NOTE: Developers using this package can then disable all logging just for
374
+ # this package by;
375
+ #
376
+ # `logging.getLogger('ddeutil.workflow').propagate = False`
377
+ #
378
+ logger.addHandler(logging.NullHandler())
586
379
 
587
- :rtype: type[Log]
588
- """
589
- if config.log_path.is_file():
590
- return SQLiteLog
591
- return FileLog
380
+ formatter = logging.Formatter(
381
+ fmt=config.log_format,
382
+ datefmt=config.log_datetime_format,
383
+ )
384
+ stream = logging.StreamHandler()
385
+ stream.setFormatter(formatter)
386
+ logger.addHandler(stream)
387
+
388
+ logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
389
+ return logger
390
+
391
+
392
+ LOGGING_CONFIG = { # pragma: no cov
393
+ "version": 1,
394
+ "disable_existing_loggers": False,
395
+ "formatters": {
396
+ "standard": {
397
+ "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
398
+ },
399
+ "custom_formatter": {
400
+ "format": config.log_format,
401
+ "datefmt": config.log_datetime_format,
402
+ },
403
+ },
404
+ "root": {
405
+ "level": "DEBUG" if config.debug else "INFO",
406
+ },
407
+ "handlers": {
408
+ "default": {
409
+ "formatter": "standard",
410
+ "class": "logging.StreamHandler",
411
+ "stream": "ext://sys.stderr",
412
+ },
413
+ "stream_handler": {
414
+ "formatter": "custom_formatter",
415
+ "class": "logging.StreamHandler",
416
+ "stream": "ext://sys.stdout",
417
+ },
418
+ "file_handler": {
419
+ "formatter": "custom_formatter",
420
+ "class": "logging.handlers.RotatingFileHandler",
421
+ "filename": "logs/app.log",
422
+ "maxBytes": 1024 * 1024 * 1,
423
+ "backupCount": 3,
424
+ },
425
+ },
426
+ "loggers": {
427
+ "uvicorn": {
428
+ "handlers": ["default", "file_handler"],
429
+ "level": "DEBUG" if config.debug else "INFO",
430
+ "propagate": False,
431
+ },
432
+ "uvicorn.access": {
433
+ "handlers": ["stream_handler", "file_handler"],
434
+ "level": "DEBUG" if config.debug else "INFO",
435
+ "propagate": False,
436
+ },
437
+ "uvicorn.error": {
438
+ "handlers": ["stream_handler", "file_handler"],
439
+ "level": "DEBUG" if config.debug else "INFO",
440
+ "propagate": False,
441
+ },
442
+ # "uvicorn.asgi": {
443
+ # "handlers": ["stream_handler", "file_handler"],
444
+ # "level": "TRACE",
445
+ # "propagate": False,
446
+ # },
447
+ },
448
+ }