mm-std 0.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. mm_std-0.0.1/PKG-INFO +27 -0
  2. mm_std-0.0.1/pyproject.toml +78 -0
  3. mm_std-0.0.1/setup.cfg +4 -0
  4. mm_std-0.0.1/src/mm_std/__init__.py +46 -0
  5. mm_std-0.0.1/src/mm_std/command.py +35 -0
  6. mm_std-0.0.1/src/mm_std/concurrency.py +157 -0
  7. mm_std-0.0.1/src/mm_std/config.py +78 -0
  8. mm_std-0.0.1/src/mm_std/crypto.py +13 -0
  9. mm_std-0.0.1/src/mm_std/date.py +48 -0
  10. mm_std-0.0.1/src/mm_std/dict.py +4 -0
  11. mm_std-0.0.1/src/mm_std/env.py +9 -0
  12. mm_std-0.0.1/src/mm_std/fs.py +13 -0
  13. mm_std-0.0.1/src/mm_std/json_.py +36 -0
  14. mm_std-0.0.1/src/mm_std/log.py +22 -0
  15. mm_std-0.0.1/src/mm_std/net.py +154 -0
  16. mm_std-0.0.1/src/mm_std/print_.py +54 -0
  17. mm_std-0.0.1/src/mm_std/py.typed +0 -0
  18. mm_std-0.0.1/src/mm_std/random_.py +38 -0
  19. mm_std-0.0.1/src/mm_std/result.py +266 -0
  20. mm_std-0.0.1/src/mm_std/str.py +106 -0
  21. mm_std-0.0.1/src/mm_std/telegram.py +35 -0
  22. mm_std-0.0.1/src/mm_std/types.py +4 -0
  23. mm_std-0.0.1/src/mm_std/zip.py +8 -0
  24. mm_std-0.0.1/src/mm_std.egg-info/PKG-INFO +27 -0
  25. mm_std-0.0.1/src/mm_std.egg-info/SOURCES.txt +41 -0
  26. mm_std-0.0.1/src/mm_std.egg-info/dependency_links.txt +1 -0
  27. mm_std-0.0.1/src/mm_std.egg-info/requires.txt +24 -0
  28. mm_std-0.0.1/src/mm_std.egg-info/top_level.txt +1 -0
  29. mm_std-0.0.1/tests/test_command.py +20 -0
  30. mm_std-0.0.1/tests/test_concurrency.py +142 -0
  31. mm_std-0.0.1/tests/test_crypto.py +18 -0
  32. mm_std-0.0.1/tests/test_date.py +25 -0
  33. mm_std-0.0.1/tests/test_dict.py +8 -0
  34. mm_std-0.0.1/tests/test_env.py +5 -0
  35. mm_std-0.0.1/tests/test_fs.py +12 -0
  36. mm_std-0.0.1/tests/test_json.py +13 -0
  37. mm_std-0.0.1/tests/test_log.py +6 -0
  38. mm_std-0.0.1/tests/test_net.py +124 -0
  39. mm_std-0.0.1/tests/test_print.py +7 -0
  40. mm_std-0.0.1/tests/test_random.py +30 -0
  41. mm_std-0.0.1/tests/test_result.py +52 -0
  42. mm_std-0.0.1/tests/test_str.py +60 -0
  43. mm_std-0.0.1/tests/test_telegram.py +17 -0
mm_std-0.0.1/PKG-INFO ADDED
@@ -0,0 +1,27 @@
1
+ Metadata-Version: 2.1
2
+ Name: mm-std
3
+ Version: 0.0.1
4
+ Requires-Python: >=3.11
5
+ Requires-Dist: requests~=2.32.3
6
+ Requires-Dist: PySocks~=1.7.1
7
+ Requires-Dist: pydash~=8.0.0
8
+ Requires-Dist: python-dateutil~=2.9.0
9
+ Requires-Dist: pydantic~=2.8.2
10
+ Requires-Dist: python-dotenv~=1.0.1
11
+ Requires-Dist: PyYAML~=6.0.1
12
+ Requires-Dist: cryptography~=43.0.0
13
+ Requires-Dist: rich
14
+ Provides-Extra: dev
15
+ Requires-Dist: build~=1.2.1; extra == "dev"
16
+ Requires-Dist: twine~=5.1.0; extra == "dev"
17
+ Requires-Dist: pytest~=8.3.2; extra == "dev"
18
+ Requires-Dist: pytest-xdist~=3.6.1; extra == "dev"
19
+ Requires-Dist: pytest-httpserver~=1.0.8; extra == "dev"
20
+ Requires-Dist: coverage~=7.6.0; extra == "dev"
21
+ Requires-Dist: ruff~=0.5.2; extra == "dev"
22
+ Requires-Dist: pip-audit~=2.7.0; extra == "dev"
23
+ Requires-Dist: bandit~=1.7.7; extra == "dev"
24
+ Requires-Dist: mypy~=1.11.0; extra == "dev"
25
+ Requires-Dist: types-python-dateutil~=2.9.0; extra == "dev"
26
+ Requires-Dist: types-requests~=2.32.0.20240523; extra == "dev"
27
+ Requires-Dist: types-PyYAML~=6.0.12.12; extra == "dev"
@@ -0,0 +1,78 @@
1
+ [project]
2
+ name = "mm-std"
3
+ version = "0.0.1"
4
+ description = ""
5
+ requires-python = ">=3.11"
6
+ dependencies = [
7
+ "requests~=2.32.3",
8
+ "PySocks~=1.7.1",
9
+ "pydash~=8.0.0",
10
+ "python-dateutil~=2.9.0",
11
+ "pydantic~=2.8.2",
12
+ "python-dotenv~=1.0.1",
13
+ "PyYAML~=6.0.1",
14
+ "cryptography~=43.0.0",
15
+ "rich",
16
+ ]
17
+ [project.optional-dependencies]
18
+ dev = [
19
+ "build~=1.2.1",
20
+ "twine~=5.1.0",
21
+ "pytest~=8.3.2",
22
+ "pytest-xdist~=3.6.1",
23
+ "pytest-httpserver~=1.0.8",
24
+ "coverage~=7.6.0",
25
+ "ruff~=0.5.2",
26
+ "pip-audit~=2.7.0",
27
+ "bandit~=1.7.7",
28
+ "mypy~=1.11.0",
29
+ "types-python-dateutil~=2.9.0",
30
+ "types-requests~=2.32.0.20240523",
31
+ "types-PyYAML~=6.0.12.12",
32
+ ]
33
+
34
+
35
+ [build-system]
36
+ requires = ["setuptools"]
37
+ build-backend = "setuptools.build_meta"
38
+
39
+
40
+ [tool.mypy]
41
+ python_version = "3.12"
42
+ warn_no_return = false
43
+ strict = true
44
+ exclude = ["^tests/", "^tmp/"]
45
+
46
+
47
+ [tool.ruff]
48
+ line-length = 130
49
+ target-version = "py312"
50
+ lint.select = [
51
+ "F", # Pyflakes
52
+ "E", "W", # pycodestyle
53
+ "B", # flake8-bugbear
54
+ "A", # flake8-builtins
55
+ "COM", # flake8-commas
56
+ "C40", # flake8-comprehensions
57
+ "G", # flake8-logging-format
58
+ "PIE", # flake8-pie
59
+ "T20", # flake8-print
60
+ "RUF", # Ruff-specific rules
61
+ ]
62
+ lint.ignore = [
63
+ "A003", # builtin-attribute-shadowing
64
+ "UP040", # non-pep695-type-alias
65
+ "COM812"
66
+ ]
67
+ [tool.ruff.format]
68
+ quote-style = "double"
69
+ indent-style = "space"
70
+
71
+
72
+ [tool.bandit]
73
+ exclude_dirs = ["tests"]
74
+ skips = ["B311"]
75
+
76
+
77
+ [tool.pytest.ini_options]
78
+ markers = ["proxy: requires access proxies", "telegram: requires a telegram bot"]
mm_std-0.0.1/setup.cfg ADDED
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,46 @@
1
+ from .command import CommandResult as CommandResult
2
+ from .command import run_command as run_command
3
+ from .command import run_ssh_command as run_ssh_command
4
+ from .concurrency import ConcurrentTasks as ConcurrentTasks
5
+ from .concurrency import Scheduler as Scheduler
6
+ from .concurrency import synchronized as synchronized
7
+ from .concurrency import synchronized_parameter as synchronized_parameter
8
+ from .config import BaseConfig as BaseConfig
9
+ from .crypto import fernet_decrypt as fernet_decrypt
10
+ from .crypto import fernet_encrypt as fernet_encrypt
11
+ from .crypto import fernet_generate_key as fernet_generate_key
12
+ from .date import parse_date as parse_date
13
+ from .date import utc_delta as utc_delta
14
+ from .date import utc_now as utc_now
15
+ from .date import utc_random as utc_random
16
+ from .dict import replace_empty_values as replace_empty_values
17
+ from .env import get_dotenv as get_dotenv
18
+ from .json_ import CustomJSONEncoder as CustomJSONEncoder
19
+ from .json_ import json_dumps as json_dumps
20
+ from .log import init_logger as init_logger
21
+ from .net import CHROME_USER_AGENT as CHROME_USER_AGENT
22
+ from .net import FIREFOX_USER_AGENT as FIREFOX_USER_AGENT
23
+ from .net import HResponse as HResponse
24
+ from .net import add_query_params_to_url as add_query_params_to_url
25
+ from .net import check_port as check_port
26
+ from .net import hr as hr
27
+ from .net import hrequest as hrequest
28
+ from .print_ import PrintFormat as PrintFormat
29
+ from .print_ import fatal as fatal
30
+ from .print_ import print_console as print_console
31
+ from .print_ import print_json as print_json
32
+ from .print_ import print_plain as print_plain
33
+ from .print_ import print_table as print_table
34
+ from .random_ import random_choice as random_choice
35
+ from .random_ import random_decimal as random_decimal
36
+ from .random_ import random_str_choice as random_str_choice
37
+ from .result import Err as Err
38
+ from .result import Ok as Ok
39
+ from .result import Result as Result
40
+ from .result import try_ok as try_ok
41
+ from .str import number_with_separator as number_with_separator
42
+ from .str import str_ends_with_any as str_ends_with_any
43
+ from .str import str_starts_with_any as str_starts_with_any
44
+ from .str import str_to_list as str_to_list
45
+ from .telegram import send_telegram_message as send_telegram_message
46
+ from .zip import read_text_from_zip_archive as read_text_from_zip_archive
@@ -0,0 +1,35 @@
1
+ import subprocess # nosec
2
+ from dataclasses import dataclass
3
+
4
+
5
+ @dataclass
6
+ class CommandResult:
7
+ stdout: str
8
+ stderr: str
9
+ code: int
10
+
11
+ @property
12
+ def out(self) -> str:
13
+ if self.stdout:
14
+ return self.stdout + "\n" + self.stderr
15
+ return self.stderr
16
+
17
+
18
+ def run_command(cmd: str, timeout: int | None = 60, capture_output: bool = True, echo_cmd_console: bool = False) -> CommandResult:
19
+ if echo_cmd_console:
20
+ print(cmd) # noqa: T201
21
+ try:
22
+ process = subprocess.run(cmd, timeout=timeout, capture_output=capture_output, shell=True, check=False) # nosec
23
+ stdout = process.stdout.decode("utf-8") if capture_output else ""
24
+ stderr = process.stderr.decode("utf-8") if capture_output else ""
25
+ return CommandResult(stdout=stdout, stderr=stderr, code=process.returncode)
26
+ except subprocess.TimeoutExpired:
27
+ return CommandResult(stdout="", stderr="timeout", code=124)
28
+
29
+
30
+ def run_ssh_command(host: str, cmd: str, ssh_key_path: str | None = None, timeout: int = 60) -> CommandResult:
31
+ ssh_cmd = "ssh -o 'StrictHostKeyChecking=no' -o 'LogLevel=ERROR'"
32
+ if ssh_key_path:
33
+ ssh_cmd += f" -i {ssh_key_path} "
34
+ ssh_cmd += f" {host} {cmd}"
35
+ return run_command(ssh_cmd, timeout=timeout)
@@ -0,0 +1,157 @@
1
+ import concurrent.futures
2
+ import functools
3
+ import time
4
+ from collections import defaultdict
5
+ from collections.abc import Callable
6
+ from concurrent.futures.thread import ThreadPoolExecutor
7
+ from dataclasses import dataclass, field
8
+ from datetime import datetime
9
+ from logging import Logger
10
+ from threading import Lock, Thread
11
+ from typing import ParamSpec, TypeAlias, TypeVar
12
+
13
+ from .date import is_too_old, utc_now
14
+
15
+ Func: TypeAlias = Callable[..., object]
16
+ Args: TypeAlias = tuple[object, ...]
17
+ Kwargs: TypeAlias = dict[str, object]
18
+
19
+
20
+ class ConcurrentTasks:
21
+ def __init__(self, max_workers: int = 5, timeout: int | None = None, thread_name_prefix: str = "concurrent_tasks") -> None:
22
+ self.max_workers = max_workers
23
+ self.timeout = timeout
24
+ self.thread_name_prefix = thread_name_prefix
25
+ self.tasks: list[ConcurrentTasks.Task] = []
26
+ self.exceptions: dict[str, Exception] = {}
27
+ self.error = False
28
+ self.timeout_error = False
29
+ self.result: dict[str, object] = {}
30
+
31
+ @dataclass
32
+ class Task:
33
+ key: str
34
+ func: Func
35
+ args: Args
36
+ kwargs: Kwargs
37
+
38
+ def add_task(self, key: str, func: Func, args: Args = (), kwargs: Kwargs | None = None) -> None:
39
+ if kwargs is None:
40
+ kwargs = {}
41
+ self.tasks.append(ConcurrentTasks.Task(key, func, args, kwargs))
42
+
43
+ def execute(self) -> None:
44
+ with ThreadPoolExecutor(self.max_workers, thread_name_prefix=self.thread_name_prefix) as executor:
45
+ future_to_key = {executor.submit(task.func, *task.args, **task.kwargs): task.key for task in self.tasks}
46
+ try:
47
+ result_map = concurrent.futures.as_completed(future_to_key, timeout=self.timeout)
48
+ for future in result_map:
49
+ key = future_to_key[future]
50
+ try:
51
+ self.result[key] = future.result()
52
+ except Exception as err:
53
+ self.error = True
54
+ self.exceptions[key] = err
55
+ except concurrent.futures.TimeoutError:
56
+ self.error = True
57
+ self.timeout_error = True
58
+
59
+
60
+ T = TypeVar("T")
61
+ P = ParamSpec("P")
62
+
63
+
64
+ def synchronized_parameter(arg_index: int = 0, skip_if_locked: bool = False) -> Callable[..., Callable[P, T | None]]:
65
+ locks: dict[object, Lock] = defaultdict(Lock)
66
+
67
+ def outer(func: Callable[P, T]) -> Callable[P, T | None]:
68
+ @functools.wraps(func)
69
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T | None:
70
+ if skip_if_locked and locks[args[arg_index]].locked():
71
+ return None
72
+ try:
73
+ with locks[args[arg_index]]:
74
+ return func(*args, **kwargs)
75
+ finally:
76
+ locks.pop(args[arg_index], None)
77
+
78
+ wrapper.locks = locks # type: ignore[attr-defined]
79
+ return wrapper
80
+
81
+ return outer
82
+
83
+
84
+ def synchronized(fn: Callable[P, T]) -> Callable[P, T]:
85
+ lock = Lock()
86
+
87
+ @functools.wraps(fn)
88
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
89
+ with lock:
90
+ return fn(*args, **kwargs)
91
+
92
+ return wrapper
93
+
94
+
95
+ class Scheduler:
96
+ def __init__(self, log: Logger, loop_delay: float = 0.5, debug: bool = False):
97
+ self.log = log
98
+ self.debug = debug
99
+ self.loop_delay = loop_delay
100
+ self.stopped = False
101
+ self.jobs: list[Scheduler.Job] = []
102
+ self.run_immediately_jobs: list[Scheduler.Job] = []
103
+ self._debug("init")
104
+
105
+ @dataclass
106
+ class Job:
107
+ func: Func
108
+ args: tuple[object, ...]
109
+ interval: int
110
+ is_running: bool = False
111
+ last_at: datetime = field(default_factory=utc_now)
112
+
113
+ def __str__(self) -> str:
114
+ return str(self.func)
115
+
116
+ def add_job(self, func: Func, interval: int, args: tuple[object, ...] = (), run_immediately: bool = False) -> None:
117
+ job = Scheduler.Job(func, args, interval)
118
+ self.jobs.append(job)
119
+ if run_immediately:
120
+ self.run_immediately_jobs.append(job)
121
+
122
+ def _run_job(self, job: Job) -> None:
123
+ self._debug(f"_run_job: {job}")
124
+ if self.stopped:
125
+ return
126
+ try:
127
+ job.func(*job.args)
128
+ self._debug(f"_run_job: {job} done")
129
+ except Exception as e:
130
+ self.log.exception("scheduler error: %s", str(e))
131
+ self._debug(f"_run_job: {job} error")
132
+ finally:
133
+ job.is_running = False
134
+
135
+ def _start(self) -> None:
136
+ self._debug(f"_start: jobs={len(self.jobs)}, run_immediately_jobs={len(self.run_immediately_jobs)}")
137
+ for j in self.run_immediately_jobs:
138
+ j.is_running = True
139
+ j.last_at = utc_now()
140
+ Thread(target=self._run_job, args=(j,)).start()
141
+ while not self.stopped:
142
+ for j in self.jobs:
143
+ if not j.is_running and is_too_old(j.last_at, j.interval):
144
+ j.is_running = True
145
+ j.last_at = utc_now()
146
+ Thread(target=self._run_job, args=(j,)).start()
147
+ time.sleep(self.loop_delay)
148
+
149
+ def _debug(self, message: str) -> None:
150
+ if self.debug:
151
+ self.log.debug("Scheduler: %s", message)
152
+
153
+ def start(self) -> None:
154
+ Thread(target=self._start).start()
155
+
156
+ def stop(self) -> None:
157
+ self.stopped = True
@@ -0,0 +1,78 @@
1
+ import io
2
+ from pathlib import Path
3
+ from typing import TypeVar
4
+
5
+ import yaml
6
+ from pydantic import BaseModel, ConfigDict, ValidationError
7
+
8
+ from .print_ import PrintFormat, print_console, print_json, print_plain, print_table
9
+ from .str import str_to_list
10
+ from .zip import read_text_from_zip_archive
11
+
12
+ T = TypeVar("T")
13
+
14
+
15
+ class BaseConfig(BaseModel):
16
+ model_config = ConfigDict(extra="forbid")
17
+
18
+ @classmethod
19
+ def to_list_str_validator(
20
+ cls,
21
+ v: str | list[str] | None,
22
+ *,
23
+ lower: bool = False,
24
+ unique: bool = False,
25
+ remove_comments: bool = False,
26
+ split_line: bool = False,
27
+ ) -> list[str]:
28
+ if v is None:
29
+ return []
30
+ if isinstance(v, str):
31
+ return str_to_list(v, unique=unique, remove_comments=remove_comments, split_line=split_line, lower=lower)
32
+ return v
33
+
34
+ @classmethod
35
+ def read_config( # nosec
36
+ cls: type[T],
37
+ config_path: io.TextIOWrapper | str | Path,
38
+ error_print_type: PrintFormat = PrintFormat.PLAIN,
39
+ zip_password: str = "",
40
+ ) -> T:
41
+ try:
42
+ # is it zip archive?
43
+ if isinstance(config_path, str) and config_path.endswith(".zip"):
44
+ config_path = str(Path(config_path).expanduser())
45
+ return cls(**yaml.full_load(read_text_from_zip_archive(config_path, password=zip_password)))
46
+ if isinstance(config_path, io.TextIOWrapper) and config_path.name.endswith(".zip"):
47
+ config_path = str(Path(config_path.name).expanduser())
48
+ return cls(**yaml.full_load(read_text_from_zip_archive(config_path, password=zip_password)))
49
+ if isinstance(config_path, Path) and config_path.name.endswith(".zip"):
50
+ config_path = str(config_path.expanduser())
51
+ return cls(**yaml.full_load(read_text_from_zip_archive(config_path, password=zip_password)))
52
+
53
+ # plain yml file
54
+ if isinstance(config_path, str):
55
+ return cls(**yaml.full_load(Path(config_path).expanduser().read_text()))
56
+ elif isinstance(config_path, Path):
57
+ return cls(**yaml.full_load(config_path.expanduser().read_text()))
58
+ else:
59
+ return cls(**yaml.full_load(config_path))
60
+ except ValidationError as err:
61
+ print_plain("config validation errors", error_print_type)
62
+ json_errors = []
63
+ rows = []
64
+ for e in err.errors():
65
+ loc = e["loc"]
66
+ field = ".".join(str(lo) for lo in loc) if len(loc) > 0 else ""
67
+ print_plain(f"{field} {e['msg']}", error_print_type)
68
+ json_errors.append({field: e["msg"]})
69
+ rows.append([field, e["msg"]])
70
+ print_table("config validation errors", ["field", "message"], rows)
71
+ print_json({"errors": json_errors}, error_print_type)
72
+ exit(1)
73
+ except Exception as err:
74
+ if error_print_type == "json":
75
+ print_json({"exception": str(err)})
76
+ else:
77
+ print_console(f"config error: {err!s}")
78
+ exit(1)
@@ -0,0 +1,13 @@
1
+ from cryptography.fernet import Fernet
2
+
3
+
4
+ def fernet_generate_key() -> str:
5
+ return Fernet.generate_key().decode()
6
+
7
+
8
+ def fernet_encrypt(*, data: str, key: str) -> str:
9
+ return Fernet(key).encrypt(data.encode()).decode()
10
+
11
+
12
+ def fernet_decrypt(*, encoded_data: str, key: str) -> str:
13
+ return Fernet(key).decrypt(encoded_data).decode()
@@ -0,0 +1,48 @@
1
+ import random
2
+ from datetime import UTC, datetime, timedelta
3
+
4
+ from dateutil import parser
5
+
6
+
7
+ def utc_now() -> datetime:
8
+ return datetime.now(UTC)
9
+
10
+
11
+ def utc_delta(
12
+ *,
13
+ days: int | None = None,
14
+ hours: int | None = None,
15
+ minutes: int | None = None,
16
+ seconds: int | None = None,
17
+ ) -> datetime:
18
+ params = {}
19
+ if days:
20
+ params["days"] = days
21
+ if hours:
22
+ params["hours"] = hours
23
+ if minutes:
24
+ params["minutes"] = minutes
25
+ if seconds:
26
+ params["seconds"] = seconds
27
+ return datetime.now(UTC) + timedelta(**params)
28
+
29
+
30
+ def parse_date(value: str, ignore_tz: bool = False) -> datetime:
31
+ return parser.parse(value, ignoretz=ignore_tz)
32
+
33
+
34
+ def utc_random(
35
+ *,
36
+ from_time: datetime | None = None,
37
+ range_hours: int = 0,
38
+ range_minutes: int = 0,
39
+ range_seconds: int = 0,
40
+ ) -> datetime:
41
+ if from_time is None:
42
+ from_time = utc_now()
43
+ to_time = from_time + timedelta(hours=range_hours, minutes=range_minutes, seconds=range_seconds)
44
+ return from_time + (to_time - from_time) * random.random()
45
+
46
+
47
+ def is_too_old(value: datetime | None, seconds: int) -> bool:
48
+ return value is None or value < utc_delta(seconds=-1 * seconds)
@@ -0,0 +1,4 @@
1
+ def replace_empty_values(data: dict[object, object], defaults: dict[object, object]) -> None:
2
+ for k, v in defaults.items():
3
+ if not data.get(k):
4
+ data[k] = v
@@ -0,0 +1,9 @@
1
+ import os
2
+
3
+ from dotenv import load_dotenv
4
+
5
+ load_dotenv()
6
+
7
+
8
+ def get_dotenv(key: str) -> str | None:
9
+ return os.getenv(key)
@@ -0,0 +1,13 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def read_text(path: str | Path) -> str:
5
+ if isinstance(path, str):
6
+ path = Path(path)
7
+ return path.read_text()
8
+
9
+
10
+ def get_filename_without_extension(path: str | Path) -> str:
11
+ if isinstance(path, str):
12
+ path = Path(path)
13
+ return path.stem
@@ -0,0 +1,36 @@
1
+ import json
2
+ from dataclasses import asdict, is_dataclass
3
+ from datetime import date, datetime
4
+ from decimal import Decimal
5
+ from enum import Enum
6
+ from json import JSONEncoder
7
+ from typing import Any
8
+
9
+ from pydantic import BaseModel
10
+
11
+ from mm_std.result import Err, Ok
12
+
13
+
14
+ class CustomJSONEncoder(JSONEncoder):
15
+ def default(self, o: Any) -> Any:
16
+ if isinstance(o, Ok):
17
+ return {"ok": o.ok}
18
+ if isinstance(o, Err):
19
+ return {"err": o.err}
20
+ if isinstance(o, Decimal):
21
+ return str(o)
22
+ if isinstance(o, datetime | date):
23
+ return o.isoformat()
24
+ if is_dataclass(o) and not isinstance(o, type):
25
+ return asdict(o)
26
+ if isinstance(o, Enum):
27
+ return o.value
28
+ if isinstance(o, BaseModel):
29
+ return o.model_dump()
30
+ if isinstance(o, Exception):
31
+ return str(o)
32
+ return JSONEncoder.default(self, o)
33
+
34
+
35
+ def json_dumps(data: object) -> str:
36
+ return json.dumps(data, cls=CustomJSONEncoder)
@@ -0,0 +1,22 @@
1
+ import logging
2
+ from logging.handlers import RotatingFileHandler
3
+ from pathlib import Path
4
+
5
+
6
+ def init_logger(name: str, file_path: str | None = None, file_mkdir: bool = True, level: int = logging.DEBUG) -> logging.Logger:
7
+ log = logging.getLogger(name)
8
+ log.setLevel(level)
9
+ log.propagate = False
10
+ fmt = logging.Formatter(fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
11
+ console_handler = logging.StreamHandler()
12
+ console_handler.setLevel(logging.DEBUG)
13
+ console_handler.setFormatter(fmt)
14
+ log.addHandler(console_handler)
15
+ if file_path:
16
+ if file_mkdir:
17
+ Path(file_path).parent.mkdir(exist_ok=True)
18
+ file_handler = RotatingFileHandler(file_path, maxBytes=10 * 1024 * 1024, backupCount=1)
19
+ file_handler.setLevel(logging.INFO)
20
+ file_handler.setFormatter(fmt)
21
+ log.addHandler(file_handler)
22
+ return log