mm-std 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mm_std/__init__.py +46 -0
- mm_std/command.py +35 -0
- mm_std/concurrency.py +157 -0
- mm_std/config.py +78 -0
- mm_std/crypto.py +13 -0
- mm_std/date.py +48 -0
- mm_std/dict.py +4 -0
- mm_std/env.py +9 -0
- mm_std/fs.py +13 -0
- mm_std/json_.py +36 -0
- mm_std/log.py +22 -0
- mm_std/net.py +154 -0
- mm_std/print_.py +54 -0
- mm_std/py.typed +0 -0
- mm_std/random_.py +38 -0
- mm_std/result.py +266 -0
- mm_std/str.py +106 -0
- mm_std/telegram.py +35 -0
- mm_std/types.py +4 -0
- mm_std/zip.py +8 -0
- mm_std-0.0.1.dist-info/METADATA +28 -0
- mm_std-0.0.1.dist-info/RECORD +24 -0
- mm_std-0.0.1.dist-info/WHEEL +5 -0
- mm_std-0.0.1.dist-info/top_level.txt +1 -0
mm_std/__init__.py
ADDED
@@ -0,0 +1,46 @@
|
|
1
|
+
from .command import CommandResult as CommandResult
|
2
|
+
from .command import run_command as run_command
|
3
|
+
from .command import run_ssh_command as run_ssh_command
|
4
|
+
from .concurrency import ConcurrentTasks as ConcurrentTasks
|
5
|
+
from .concurrency import Scheduler as Scheduler
|
6
|
+
from .concurrency import synchronized as synchronized
|
7
|
+
from .concurrency import synchronized_parameter as synchronized_parameter
|
8
|
+
from .config import BaseConfig as BaseConfig
|
9
|
+
from .crypto import fernet_decrypt as fernet_decrypt
|
10
|
+
from .crypto import fernet_encrypt as fernet_encrypt
|
11
|
+
from .crypto import fernet_generate_key as fernet_generate_key
|
12
|
+
from .date import parse_date as parse_date
|
13
|
+
from .date import utc_delta as utc_delta
|
14
|
+
from .date import utc_now as utc_now
|
15
|
+
from .date import utc_random as utc_random
|
16
|
+
from .dict import replace_empty_values as replace_empty_values
|
17
|
+
from .env import get_dotenv as get_dotenv
|
18
|
+
from .json_ import CustomJSONEncoder as CustomJSONEncoder
|
19
|
+
from .json_ import json_dumps as json_dumps
|
20
|
+
from .log import init_logger as init_logger
|
21
|
+
from .net import CHROME_USER_AGENT as CHROME_USER_AGENT
|
22
|
+
from .net import FIREFOX_USER_AGENT as FIREFOX_USER_AGENT
|
23
|
+
from .net import HResponse as HResponse
|
24
|
+
from .net import add_query_params_to_url as add_query_params_to_url
|
25
|
+
from .net import check_port as check_port
|
26
|
+
from .net import hr as hr
|
27
|
+
from .net import hrequest as hrequest
|
28
|
+
from .print_ import PrintFormat as PrintFormat
|
29
|
+
from .print_ import fatal as fatal
|
30
|
+
from .print_ import print_console as print_console
|
31
|
+
from .print_ import print_json as print_json
|
32
|
+
from .print_ import print_plain as print_plain
|
33
|
+
from .print_ import print_table as print_table
|
34
|
+
from .random_ import random_choice as random_choice
|
35
|
+
from .random_ import random_decimal as random_decimal
|
36
|
+
from .random_ import random_str_choice as random_str_choice
|
37
|
+
from .result import Err as Err
|
38
|
+
from .result import Ok as Ok
|
39
|
+
from .result import Result as Result
|
40
|
+
from .result import try_ok as try_ok
|
41
|
+
from .str import number_with_separator as number_with_separator
|
42
|
+
from .str import str_ends_with_any as str_ends_with_any
|
43
|
+
from .str import str_starts_with_any as str_starts_with_any
|
44
|
+
from .str import str_to_list as str_to_list
|
45
|
+
from .telegram import send_telegram_message as send_telegram_message
|
46
|
+
from .zip import read_text_from_zip_archive as read_text_from_zip_archive
|
mm_std/command.py
ADDED
@@ -0,0 +1,35 @@
|
|
1
|
+
import subprocess # nosec
|
2
|
+
from dataclasses import dataclass
|
3
|
+
|
4
|
+
|
5
|
+
@dataclass
|
6
|
+
class CommandResult:
|
7
|
+
stdout: str
|
8
|
+
stderr: str
|
9
|
+
code: int
|
10
|
+
|
11
|
+
@property
|
12
|
+
def out(self) -> str:
|
13
|
+
if self.stdout:
|
14
|
+
return self.stdout + "\n" + self.stderr
|
15
|
+
return self.stderr
|
16
|
+
|
17
|
+
|
18
|
+
def run_command(cmd: str, timeout: int | None = 60, capture_output: bool = True, echo_cmd_console: bool = False) -> CommandResult:
|
19
|
+
if echo_cmd_console:
|
20
|
+
print(cmd) # noqa: T201
|
21
|
+
try:
|
22
|
+
process = subprocess.run(cmd, timeout=timeout, capture_output=capture_output, shell=True, check=False) # nosec
|
23
|
+
stdout = process.stdout.decode("utf-8") if capture_output else ""
|
24
|
+
stderr = process.stderr.decode("utf-8") if capture_output else ""
|
25
|
+
return CommandResult(stdout=stdout, stderr=stderr, code=process.returncode)
|
26
|
+
except subprocess.TimeoutExpired:
|
27
|
+
return CommandResult(stdout="", stderr="timeout", code=124)
|
28
|
+
|
29
|
+
|
30
|
+
def run_ssh_command(host: str, cmd: str, ssh_key_path: str | None = None, timeout: int = 60) -> CommandResult:
|
31
|
+
ssh_cmd = "ssh -o 'StrictHostKeyChecking=no' -o 'LogLevel=ERROR'"
|
32
|
+
if ssh_key_path:
|
33
|
+
ssh_cmd += f" -i {ssh_key_path} "
|
34
|
+
ssh_cmd += f" {host} {cmd}"
|
35
|
+
return run_command(ssh_cmd, timeout=timeout)
|
mm_std/concurrency.py
ADDED
@@ -0,0 +1,157 @@
|
|
1
|
+
import concurrent.futures
|
2
|
+
import functools
|
3
|
+
import time
|
4
|
+
from collections import defaultdict
|
5
|
+
from collections.abc import Callable
|
6
|
+
from concurrent.futures.thread import ThreadPoolExecutor
|
7
|
+
from dataclasses import dataclass, field
|
8
|
+
from datetime import datetime
|
9
|
+
from logging import Logger
|
10
|
+
from threading import Lock, Thread
|
11
|
+
from typing import ParamSpec, TypeAlias, TypeVar
|
12
|
+
|
13
|
+
from .date import is_too_old, utc_now
|
14
|
+
|
15
|
+
Func: TypeAlias = Callable[..., object]
|
16
|
+
Args: TypeAlias = tuple[object, ...]
|
17
|
+
Kwargs: TypeAlias = dict[str, object]
|
18
|
+
|
19
|
+
|
20
|
+
class ConcurrentTasks:
|
21
|
+
def __init__(self, max_workers: int = 5, timeout: int | None = None, thread_name_prefix: str = "concurrent_tasks") -> None:
|
22
|
+
self.max_workers = max_workers
|
23
|
+
self.timeout = timeout
|
24
|
+
self.thread_name_prefix = thread_name_prefix
|
25
|
+
self.tasks: list[ConcurrentTasks.Task] = []
|
26
|
+
self.exceptions: dict[str, Exception] = {}
|
27
|
+
self.error = False
|
28
|
+
self.timeout_error = False
|
29
|
+
self.result: dict[str, object] = {}
|
30
|
+
|
31
|
+
@dataclass
|
32
|
+
class Task:
|
33
|
+
key: str
|
34
|
+
func: Func
|
35
|
+
args: Args
|
36
|
+
kwargs: Kwargs
|
37
|
+
|
38
|
+
def add_task(self, key: str, func: Func, args: Args = (), kwargs: Kwargs | None = None) -> None:
|
39
|
+
if kwargs is None:
|
40
|
+
kwargs = {}
|
41
|
+
self.tasks.append(ConcurrentTasks.Task(key, func, args, kwargs))
|
42
|
+
|
43
|
+
def execute(self) -> None:
|
44
|
+
with ThreadPoolExecutor(self.max_workers, thread_name_prefix=self.thread_name_prefix) as executor:
|
45
|
+
future_to_key = {executor.submit(task.func, *task.args, **task.kwargs): task.key for task in self.tasks}
|
46
|
+
try:
|
47
|
+
result_map = concurrent.futures.as_completed(future_to_key, timeout=self.timeout)
|
48
|
+
for future in result_map:
|
49
|
+
key = future_to_key[future]
|
50
|
+
try:
|
51
|
+
self.result[key] = future.result()
|
52
|
+
except Exception as err:
|
53
|
+
self.error = True
|
54
|
+
self.exceptions[key] = err
|
55
|
+
except concurrent.futures.TimeoutError:
|
56
|
+
self.error = True
|
57
|
+
self.timeout_error = True
|
58
|
+
|
59
|
+
|
60
|
+
T = TypeVar("T")
|
61
|
+
P = ParamSpec("P")
|
62
|
+
|
63
|
+
|
64
|
+
def synchronized_parameter(arg_index: int = 0, skip_if_locked: bool = False) -> Callable[..., Callable[P, T | None]]:
|
65
|
+
locks: dict[object, Lock] = defaultdict(Lock)
|
66
|
+
|
67
|
+
def outer(func: Callable[P, T]) -> Callable[P, T | None]:
|
68
|
+
@functools.wraps(func)
|
69
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T | None:
|
70
|
+
if skip_if_locked and locks[args[arg_index]].locked():
|
71
|
+
return None
|
72
|
+
try:
|
73
|
+
with locks[args[arg_index]]:
|
74
|
+
return func(*args, **kwargs)
|
75
|
+
finally:
|
76
|
+
locks.pop(args[arg_index], None)
|
77
|
+
|
78
|
+
wrapper.locks = locks # type: ignore[attr-defined]
|
79
|
+
return wrapper
|
80
|
+
|
81
|
+
return outer
|
82
|
+
|
83
|
+
|
84
|
+
def synchronized(fn: Callable[P, T]) -> Callable[P, T]:
|
85
|
+
lock = Lock()
|
86
|
+
|
87
|
+
@functools.wraps(fn)
|
88
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
89
|
+
with lock:
|
90
|
+
return fn(*args, **kwargs)
|
91
|
+
|
92
|
+
return wrapper
|
93
|
+
|
94
|
+
|
95
|
+
class Scheduler:
|
96
|
+
def __init__(self, log: Logger, loop_delay: float = 0.5, debug: bool = False):
|
97
|
+
self.log = log
|
98
|
+
self.debug = debug
|
99
|
+
self.loop_delay = loop_delay
|
100
|
+
self.stopped = False
|
101
|
+
self.jobs: list[Scheduler.Job] = []
|
102
|
+
self.run_immediately_jobs: list[Scheduler.Job] = []
|
103
|
+
self._debug("init")
|
104
|
+
|
105
|
+
@dataclass
|
106
|
+
class Job:
|
107
|
+
func: Func
|
108
|
+
args: tuple[object, ...]
|
109
|
+
interval: int
|
110
|
+
is_running: bool = False
|
111
|
+
last_at: datetime = field(default_factory=utc_now)
|
112
|
+
|
113
|
+
def __str__(self) -> str:
|
114
|
+
return str(self.func)
|
115
|
+
|
116
|
+
def add_job(self, func: Func, interval: int, args: tuple[object, ...] = (), run_immediately: bool = False) -> None:
|
117
|
+
job = Scheduler.Job(func, args, interval)
|
118
|
+
self.jobs.append(job)
|
119
|
+
if run_immediately:
|
120
|
+
self.run_immediately_jobs.append(job)
|
121
|
+
|
122
|
+
def _run_job(self, job: Job) -> None:
|
123
|
+
self._debug(f"_run_job: {job}")
|
124
|
+
if self.stopped:
|
125
|
+
return
|
126
|
+
try:
|
127
|
+
job.func(*job.args)
|
128
|
+
self._debug(f"_run_job: {job} done")
|
129
|
+
except Exception as e:
|
130
|
+
self.log.exception("scheduler error: %s", str(e))
|
131
|
+
self._debug(f"_run_job: {job} error")
|
132
|
+
finally:
|
133
|
+
job.is_running = False
|
134
|
+
|
135
|
+
def _start(self) -> None:
|
136
|
+
self._debug(f"_start: jobs={len(self.jobs)}, run_immediately_jobs={len(self.run_immediately_jobs)}")
|
137
|
+
for j in self.run_immediately_jobs:
|
138
|
+
j.is_running = True
|
139
|
+
j.last_at = utc_now()
|
140
|
+
Thread(target=self._run_job, args=(j,)).start()
|
141
|
+
while not self.stopped:
|
142
|
+
for j in self.jobs:
|
143
|
+
if not j.is_running and is_too_old(j.last_at, j.interval):
|
144
|
+
j.is_running = True
|
145
|
+
j.last_at = utc_now()
|
146
|
+
Thread(target=self._run_job, args=(j,)).start()
|
147
|
+
time.sleep(self.loop_delay)
|
148
|
+
|
149
|
+
def _debug(self, message: str) -> None:
|
150
|
+
if self.debug:
|
151
|
+
self.log.debug("Scheduler: %s", message)
|
152
|
+
|
153
|
+
def start(self) -> None:
|
154
|
+
Thread(target=self._start).start()
|
155
|
+
|
156
|
+
def stop(self) -> None:
|
157
|
+
self.stopped = True
|
mm_std/config.py
ADDED
@@ -0,0 +1,78 @@
|
|
1
|
+
import io
|
2
|
+
from pathlib import Path
|
3
|
+
from typing import TypeVar
|
4
|
+
|
5
|
+
import yaml
|
6
|
+
from pydantic import BaseModel, ConfigDict, ValidationError
|
7
|
+
|
8
|
+
from .print_ import PrintFormat, print_console, print_json, print_plain, print_table
|
9
|
+
from .str import str_to_list
|
10
|
+
from .zip import read_text_from_zip_archive
|
11
|
+
|
12
|
+
T = TypeVar("T")
|
13
|
+
|
14
|
+
|
15
|
+
class BaseConfig(BaseModel):
|
16
|
+
model_config = ConfigDict(extra="forbid")
|
17
|
+
|
18
|
+
@classmethod
|
19
|
+
def to_list_str_validator(
|
20
|
+
cls,
|
21
|
+
v: str | list[str] | None,
|
22
|
+
*,
|
23
|
+
lower: bool = False,
|
24
|
+
unique: bool = False,
|
25
|
+
remove_comments: bool = False,
|
26
|
+
split_line: bool = False,
|
27
|
+
) -> list[str]:
|
28
|
+
if v is None:
|
29
|
+
return []
|
30
|
+
if isinstance(v, str):
|
31
|
+
return str_to_list(v, unique=unique, remove_comments=remove_comments, split_line=split_line, lower=lower)
|
32
|
+
return v
|
33
|
+
|
34
|
+
@classmethod
|
35
|
+
def read_config( # nosec
|
36
|
+
cls: type[T],
|
37
|
+
config_path: io.TextIOWrapper | str | Path,
|
38
|
+
error_print_type: PrintFormat = PrintFormat.PLAIN,
|
39
|
+
zip_password: str = "",
|
40
|
+
) -> T:
|
41
|
+
try:
|
42
|
+
# is it zip archive?
|
43
|
+
if isinstance(config_path, str) and config_path.endswith(".zip"):
|
44
|
+
config_path = str(Path(config_path).expanduser())
|
45
|
+
return cls(**yaml.full_load(read_text_from_zip_archive(config_path, password=zip_password)))
|
46
|
+
if isinstance(config_path, io.TextIOWrapper) and config_path.name.endswith(".zip"):
|
47
|
+
config_path = str(Path(config_path.name).expanduser())
|
48
|
+
return cls(**yaml.full_load(read_text_from_zip_archive(config_path, password=zip_password)))
|
49
|
+
if isinstance(config_path, Path) and config_path.name.endswith(".zip"):
|
50
|
+
config_path = str(config_path.expanduser())
|
51
|
+
return cls(**yaml.full_load(read_text_from_zip_archive(config_path, password=zip_password)))
|
52
|
+
|
53
|
+
# plain yml file
|
54
|
+
if isinstance(config_path, str):
|
55
|
+
return cls(**yaml.full_load(Path(config_path).expanduser().read_text()))
|
56
|
+
elif isinstance(config_path, Path):
|
57
|
+
return cls(**yaml.full_load(config_path.expanduser().read_text()))
|
58
|
+
else:
|
59
|
+
return cls(**yaml.full_load(config_path))
|
60
|
+
except ValidationError as err:
|
61
|
+
print_plain("config validation errors", error_print_type)
|
62
|
+
json_errors = []
|
63
|
+
rows = []
|
64
|
+
for e in err.errors():
|
65
|
+
loc = e["loc"]
|
66
|
+
field = ".".join(str(lo) for lo in loc) if len(loc) > 0 else ""
|
67
|
+
print_plain(f"{field} {e['msg']}", error_print_type)
|
68
|
+
json_errors.append({field: e["msg"]})
|
69
|
+
rows.append([field, e["msg"]])
|
70
|
+
print_table("config validation errors", ["field", "message"], rows)
|
71
|
+
print_json({"errors": json_errors}, error_print_type)
|
72
|
+
exit(1)
|
73
|
+
except Exception as err:
|
74
|
+
if error_print_type == "json":
|
75
|
+
print_json({"exception": str(err)})
|
76
|
+
else:
|
77
|
+
print_console(f"config error: {err!s}")
|
78
|
+
exit(1)
|
mm_std/crypto.py
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
from cryptography.fernet import Fernet
|
2
|
+
|
3
|
+
|
4
|
+
def fernet_generate_key() -> str:
|
5
|
+
return Fernet.generate_key().decode()
|
6
|
+
|
7
|
+
|
8
|
+
def fernet_encrypt(*, data: str, key: str) -> str:
|
9
|
+
return Fernet(key).encrypt(data.encode()).decode()
|
10
|
+
|
11
|
+
|
12
|
+
def fernet_decrypt(*, encoded_data: str, key: str) -> str:
|
13
|
+
return Fernet(key).decrypt(encoded_data).decode()
|
mm_std/date.py
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
import random
|
2
|
+
from datetime import UTC, datetime, timedelta
|
3
|
+
|
4
|
+
from dateutil import parser
|
5
|
+
|
6
|
+
|
7
|
+
def utc_now() -> datetime:
|
8
|
+
return datetime.now(UTC)
|
9
|
+
|
10
|
+
|
11
|
+
def utc_delta(
|
12
|
+
*,
|
13
|
+
days: int | None = None,
|
14
|
+
hours: int | None = None,
|
15
|
+
minutes: int | None = None,
|
16
|
+
seconds: int | None = None,
|
17
|
+
) -> datetime:
|
18
|
+
params = {}
|
19
|
+
if days:
|
20
|
+
params["days"] = days
|
21
|
+
if hours:
|
22
|
+
params["hours"] = hours
|
23
|
+
if minutes:
|
24
|
+
params["minutes"] = minutes
|
25
|
+
if seconds:
|
26
|
+
params["seconds"] = seconds
|
27
|
+
return datetime.now(UTC) + timedelta(**params)
|
28
|
+
|
29
|
+
|
30
|
+
def parse_date(value: str, ignore_tz: bool = False) -> datetime:
|
31
|
+
return parser.parse(value, ignoretz=ignore_tz)
|
32
|
+
|
33
|
+
|
34
|
+
def utc_random(
|
35
|
+
*,
|
36
|
+
from_time: datetime | None = None,
|
37
|
+
range_hours: int = 0,
|
38
|
+
range_minutes: int = 0,
|
39
|
+
range_seconds: int = 0,
|
40
|
+
) -> datetime:
|
41
|
+
if from_time is None:
|
42
|
+
from_time = utc_now()
|
43
|
+
to_time = from_time + timedelta(hours=range_hours, minutes=range_minutes, seconds=range_seconds)
|
44
|
+
return from_time + (to_time - from_time) * random.random()
|
45
|
+
|
46
|
+
|
47
|
+
def is_too_old(value: datetime | None, seconds: int) -> bool:
|
48
|
+
return value is None or value < utc_delta(seconds=-1 * seconds)
|
mm_std/dict.py
ADDED
mm_std/env.py
ADDED
mm_std/fs.py
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
|
3
|
+
|
4
|
+
def read_text(path: str | Path) -> str:
|
5
|
+
if isinstance(path, str):
|
6
|
+
path = Path(path)
|
7
|
+
return path.read_text()
|
8
|
+
|
9
|
+
|
10
|
+
def get_filename_without_extension(path: str | Path) -> str:
|
11
|
+
if isinstance(path, str):
|
12
|
+
path = Path(path)
|
13
|
+
return path.stem
|
mm_std/json_.py
ADDED
@@ -0,0 +1,36 @@
|
|
1
|
+
import json
|
2
|
+
from dataclasses import asdict, is_dataclass
|
3
|
+
from datetime import date, datetime
|
4
|
+
from decimal import Decimal
|
5
|
+
from enum import Enum
|
6
|
+
from json import JSONEncoder
|
7
|
+
from typing import Any
|
8
|
+
|
9
|
+
from pydantic import BaseModel
|
10
|
+
|
11
|
+
from mm_std.result import Err, Ok
|
12
|
+
|
13
|
+
|
14
|
+
class CustomJSONEncoder(JSONEncoder):
|
15
|
+
def default(self, o: Any) -> Any:
|
16
|
+
if isinstance(o, Ok):
|
17
|
+
return {"ok": o.ok}
|
18
|
+
if isinstance(o, Err):
|
19
|
+
return {"err": o.err}
|
20
|
+
if isinstance(o, Decimal):
|
21
|
+
return str(o)
|
22
|
+
if isinstance(o, datetime | date):
|
23
|
+
return o.isoformat()
|
24
|
+
if is_dataclass(o) and not isinstance(o, type):
|
25
|
+
return asdict(o)
|
26
|
+
if isinstance(o, Enum):
|
27
|
+
return o.value
|
28
|
+
if isinstance(o, BaseModel):
|
29
|
+
return o.model_dump()
|
30
|
+
if isinstance(o, Exception):
|
31
|
+
return str(o)
|
32
|
+
return JSONEncoder.default(self, o)
|
33
|
+
|
34
|
+
|
35
|
+
def json_dumps(data: object) -> str:
|
36
|
+
return json.dumps(data, cls=CustomJSONEncoder)
|
mm_std/log.py
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
import logging
|
2
|
+
from logging.handlers import RotatingFileHandler
|
3
|
+
from pathlib import Path
|
4
|
+
|
5
|
+
|
6
|
+
def init_logger(name: str, file_path: str | None = None, file_mkdir: bool = True, level: int = logging.DEBUG) -> logging.Logger:
|
7
|
+
log = logging.getLogger(name)
|
8
|
+
log.setLevel(level)
|
9
|
+
log.propagate = False
|
10
|
+
fmt = logging.Formatter(fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
11
|
+
console_handler = logging.StreamHandler()
|
12
|
+
console_handler.setLevel(logging.DEBUG)
|
13
|
+
console_handler.setFormatter(fmt)
|
14
|
+
log.addHandler(console_handler)
|
15
|
+
if file_path:
|
16
|
+
if file_mkdir:
|
17
|
+
Path(file_path).parent.mkdir(exist_ok=True)
|
18
|
+
file_handler = RotatingFileHandler(file_path, maxBytes=10 * 1024 * 1024, backupCount=1)
|
19
|
+
file_handler.setLevel(logging.INFO)
|
20
|
+
file_handler.setFormatter(fmt)
|
21
|
+
log.addHandler(file_handler)
|
22
|
+
return log
|
mm_std/net.py
ADDED
@@ -0,0 +1,154 @@
|
|
1
|
+
import json
|
2
|
+
import socket
|
3
|
+
import time
|
4
|
+
from dataclasses import asdict, dataclass, field
|
5
|
+
from json import JSONDecodeError
|
6
|
+
from typing import Any, TypeVar, cast
|
7
|
+
from urllib.parse import urlencode
|
8
|
+
|
9
|
+
import pydash
|
10
|
+
import requests
|
11
|
+
|
12
|
+
from mm_std.result import Err, Ok, Result
|
13
|
+
|
14
|
+
FIREFOX_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:108.0) Gecko/20100101 Firefox/108.0"
|
15
|
+
SAFARI_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.3 Safari/605.1.15" # fmt: skip # noqa
|
16
|
+
CHROME_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36" # fmt: skip # noqa
|
17
|
+
|
18
|
+
T = TypeVar("T")
|
19
|
+
|
20
|
+
|
21
|
+
@dataclass
|
22
|
+
class HResponse:
|
23
|
+
code: int = 0
|
24
|
+
error: str | None = None
|
25
|
+
body: str = ""
|
26
|
+
headers: dict[str, str] = field(default_factory=dict)
|
27
|
+
|
28
|
+
_json_data: Any = None
|
29
|
+
_json_parsed = False
|
30
|
+
_json_parsed_error = False
|
31
|
+
|
32
|
+
def _parse_json(self) -> None:
|
33
|
+
try:
|
34
|
+
self._json_data = None
|
35
|
+
self._json_data = json.loads(self.body)
|
36
|
+
self._json_parsed_error = False
|
37
|
+
except JSONDecodeError:
|
38
|
+
self._json_parsed_error = True
|
39
|
+
self._json_parsed = True
|
40
|
+
|
41
|
+
@property
|
42
|
+
def json(self) -> Any:
|
43
|
+
if not self._json_parsed:
|
44
|
+
self._parse_json()
|
45
|
+
return self._json_data
|
46
|
+
|
47
|
+
@property
|
48
|
+
def json_parse_error(self) -> bool:
|
49
|
+
if not self._json_parsed:
|
50
|
+
self._parse_json()
|
51
|
+
return self._json_parsed_error
|
52
|
+
|
53
|
+
@property
|
54
|
+
def content_type(self) -> str | None:
|
55
|
+
for key in self.headers.keys():
|
56
|
+
if key.lower() == "content-type":
|
57
|
+
return self.headers[key]
|
58
|
+
return None
|
59
|
+
|
60
|
+
def to_err_result(self, error: str | None = None) -> Err:
|
61
|
+
return Err(error or self.error or "error", data=asdict(self))
|
62
|
+
|
63
|
+
def to_ok_result(self, result: T) -> Result[T]:
|
64
|
+
return Ok(result, data=asdict(self))
|
65
|
+
|
66
|
+
def is_error(self) -> bool:
|
67
|
+
return self.error is not None
|
68
|
+
|
69
|
+
def is_timeout_error(self) -> bool:
|
70
|
+
return self.error == "timeout"
|
71
|
+
|
72
|
+
def is_proxy_error(self) -> bool:
|
73
|
+
return self.error == "proxy_error"
|
74
|
+
|
75
|
+
def is_connection_error(self) -> bool:
|
76
|
+
return self.error is not None and self.error.startswith("connection_error:")
|
77
|
+
|
78
|
+
def to_dict(self) -> dict[str, Any]:
|
79
|
+
return pydash.omit(asdict(self), "_json_data")
|
80
|
+
|
81
|
+
|
82
|
+
def hrequest(
|
83
|
+
url: str,
|
84
|
+
*,
|
85
|
+
method: str = "GET",
|
86
|
+
proxy: str | None = None,
|
87
|
+
params: dict[str, Any] | None = None,
|
88
|
+
headers: dict[str, Any] | None = None,
|
89
|
+
cookies: dict[str, Any] | None = None,
|
90
|
+
timeout: int = 10,
|
91
|
+
user_agent: str | None = None,
|
92
|
+
json_params: bool = True,
|
93
|
+
auth: Any = None,
|
94
|
+
verify: bool = True,
|
95
|
+
) -> HResponse:
|
96
|
+
method = method.upper()
|
97
|
+
proxies = {"http": proxy, "https": proxy} if proxy else None
|
98
|
+
if not headers:
|
99
|
+
headers = {}
|
100
|
+
try:
|
101
|
+
headers["user-agent"] = user_agent
|
102
|
+
request_params = {
|
103
|
+
"proxies": proxies,
|
104
|
+
"timeout": timeout,
|
105
|
+
"headers": headers,
|
106
|
+
"cookies": cookies,
|
107
|
+
"auth": auth,
|
108
|
+
"verify": verify,
|
109
|
+
}
|
110
|
+
if method == "GET":
|
111
|
+
request_params["params"] = params
|
112
|
+
elif json_params:
|
113
|
+
request_params["json"] = params
|
114
|
+
else:
|
115
|
+
request_params["data"] = params
|
116
|
+
r = requests.request(method, url, **request_params)
|
117
|
+
return HResponse(code=r.status_code, body=r.text, headers=dict(r.headers))
|
118
|
+
except requests.exceptions.Timeout:
|
119
|
+
return HResponse(error="timeout")
|
120
|
+
except requests.exceptions.ProxyError:
|
121
|
+
return HResponse(error="proxy_error")
|
122
|
+
except requests.exceptions.ConnectionError as err:
|
123
|
+
return HResponse(error=f"connection_error: {err}")
|
124
|
+
except Exception as err:
|
125
|
+
return HResponse(error=f"exception: {err}")
|
126
|
+
|
127
|
+
|
128
|
+
def check_port(ip: str, port: int, attempts: int = 3, sleep_seconds: float = 1, timeout: float = 1) -> bool:
|
129
|
+
for _ in range(attempts):
|
130
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
131
|
+
sock.settimeout(timeout)
|
132
|
+
res = sock.connect_ex((ip, port)) == 0
|
133
|
+
if res:
|
134
|
+
return True
|
135
|
+
time.sleep(sleep_seconds)
|
136
|
+
return False
|
137
|
+
|
138
|
+
|
139
|
+
def get_free_local_port() -> int:
|
140
|
+
sock = socket.socket()
|
141
|
+
sock.bind(("", 0))
|
142
|
+
port = sock.getsockname()[1]
|
143
|
+
sock.close()
|
144
|
+
return cast(int, port)
|
145
|
+
|
146
|
+
|
147
|
+
def add_query_params_to_url(url: str, params: dict[str, object]) -> str:
|
148
|
+
query_params = urlencode({k: v for k, v in params.items() if v is not None})
|
149
|
+
if query_params:
|
150
|
+
url += f"?{query_params}"
|
151
|
+
return url
|
152
|
+
|
153
|
+
|
154
|
+
hr = hrequest
|
mm_std/print_.py
ADDED
@@ -0,0 +1,54 @@
|
|
1
|
+
import sys
|
2
|
+
from enum import Enum, unique
|
3
|
+
from typing import Any, NoReturn
|
4
|
+
|
5
|
+
import rich
|
6
|
+
from rich.console import Console
|
7
|
+
from rich.table import Table
|
8
|
+
|
9
|
+
from mm_std.json_ import json_dumps
|
10
|
+
|
11
|
+
|
12
|
+
@unique
|
13
|
+
class PrintFormat(str, Enum):
|
14
|
+
PLAIN = "plain"
|
15
|
+
TABLE = "table"
|
16
|
+
JSON = "json"
|
17
|
+
|
18
|
+
|
19
|
+
def fatal(message: str, code: int = 1) -> NoReturn:
|
20
|
+
print(message, file=sys.stderr) # noqa: T201
|
21
|
+
sys.exit(code)
|
22
|
+
|
23
|
+
|
24
|
+
def print_console(*messages: object, print_json: bool = False) -> None:
|
25
|
+
if len(messages) == 1:
|
26
|
+
message = messages[0]
|
27
|
+
if isinstance(message, str):
|
28
|
+
print(message) # noqa: T201
|
29
|
+
else:
|
30
|
+
if print_json:
|
31
|
+
rich.print_json(json_dumps(message))
|
32
|
+
else:
|
33
|
+
rich.print(message)
|
34
|
+
else:
|
35
|
+
rich.print(messages)
|
36
|
+
|
37
|
+
|
38
|
+
def print_plain(messages: object, print_format: PrintFormat | None = None) -> None:
|
39
|
+
if print_format is None or print_format == PrintFormat.PLAIN:
|
40
|
+
print(messages) # noqa: T201
|
41
|
+
|
42
|
+
|
43
|
+
def print_json(data: object, print_format: PrintFormat | None = None) -> None:
|
44
|
+
if print_format is None or print_format == PrintFormat.JSON:
|
45
|
+
rich.print_json(json_dumps(data))
|
46
|
+
|
47
|
+
|
48
|
+
def print_table(title: str, columns: list[str], rows: list[list[Any]], print_format: PrintFormat | None = None) -> None:
|
49
|
+
if print_format is None or print_format == PrintFormat.TABLE:
|
50
|
+
table = Table(*columns, title=title)
|
51
|
+
for row in rows:
|
52
|
+
table.add_row(*(str(cell) for cell in row))
|
53
|
+
console = Console()
|
54
|
+
console.print(table)
|
mm_std/py.typed
ADDED
File without changes
|
mm_std/random_.py
ADDED
@@ -0,0 +1,38 @@
|
|
1
|
+
import random
|
2
|
+
from collections.abc import Sequence
|
3
|
+
from decimal import Decimal
|
4
|
+
from typing import TypeVar
|
5
|
+
|
6
|
+
T = TypeVar("T", contravariant=True)
|
7
|
+
|
8
|
+
|
9
|
+
def random_choice(source: Sequence[T] | T | None) -> T | None:
|
10
|
+
"""Deprecated, don't use it"""
|
11
|
+
if source is None:
|
12
|
+
return None
|
13
|
+
if isinstance(source, str):
|
14
|
+
return source # type: ignore[return-value]
|
15
|
+
if isinstance(source, Sequence):
|
16
|
+
if source:
|
17
|
+
return random.choice(source) # type:ignore[no-any-return]
|
18
|
+
return None
|
19
|
+
return source
|
20
|
+
|
21
|
+
|
22
|
+
def random_str_choice(source: Sequence[str] | str | None) -> str | None:
|
23
|
+
if source is None:
|
24
|
+
return None
|
25
|
+
if isinstance(source, str):
|
26
|
+
return source
|
27
|
+
if isinstance(source, Sequence):
|
28
|
+
if source:
|
29
|
+
return random.choice(source)
|
30
|
+
return None
|
31
|
+
return source
|
32
|
+
|
33
|
+
|
34
|
+
def random_decimal(from_: Decimal, to: Decimal) -> Decimal:
|
35
|
+
from_ndigits = abs(from_.as_tuple().exponent) # type:ignore[arg-type]
|
36
|
+
to_ndigits = abs(to.as_tuple().exponent) # type:ignore[arg-type]
|
37
|
+
ndigits = max(from_ndigits, to_ndigits)
|
38
|
+
return Decimal(str(round(random.uniform(float(from_), float(to)), ndigits)))
|
mm_std/result.py
ADDED
@@ -0,0 +1,266 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import time
|
4
|
+
from collections.abc import Callable
|
5
|
+
from typing import Any, Generic, Literal, NoReturn, TypeAlias, TypeVar
|
6
|
+
|
7
|
+
T = TypeVar("T", covariant=True) # Success type
|
8
|
+
U = TypeVar("U")
|
9
|
+
F = TypeVar("F")
|
10
|
+
TBE = TypeVar("TBE", bound=BaseException)
|
11
|
+
|
12
|
+
|
13
|
+
class Ok(Generic[T]):
|
14
|
+
__match_args__ = ("ok",)
|
15
|
+
|
16
|
+
def __init__(self, value: T, data: Any = None) -> None:
|
17
|
+
self._value = value
|
18
|
+
self.data = data
|
19
|
+
|
20
|
+
def __repr__(self) -> str:
|
21
|
+
if self.data is None:
|
22
|
+
return f"Ok({self._value!r})"
|
23
|
+
else:
|
24
|
+
return f"Ok({self._value!r}, data={self.data!r})"
|
25
|
+
|
26
|
+
def __eq__(self, other: Any) -> bool:
|
27
|
+
return isinstance(other, Ok) and self._value == other._value and self.data == other.data
|
28
|
+
|
29
|
+
def __ne__(self, other: Any) -> bool:
|
30
|
+
return not (self == other)
|
31
|
+
|
32
|
+
def __hash__(self) -> int:
|
33
|
+
return hash((True, self._value, self.data))
|
34
|
+
|
35
|
+
def is_ok(self) -> Literal[True]:
|
36
|
+
return True
|
37
|
+
|
38
|
+
def is_err(self) -> Literal[False]:
|
39
|
+
return False
|
40
|
+
|
41
|
+
@property
|
42
|
+
def ok(self) -> T:
|
43
|
+
return self._value
|
44
|
+
|
45
|
+
@property
|
46
|
+
def err(self) -> None:
|
47
|
+
return None
|
48
|
+
|
49
|
+
def expect(self, _message: str) -> T:
|
50
|
+
return self._value
|
51
|
+
|
52
|
+
def expect_err(self, message: str) -> NoReturn:
|
53
|
+
raise UnwrapError(self, message)
|
54
|
+
|
55
|
+
def unwrap(self) -> T:
|
56
|
+
return self._value
|
57
|
+
|
58
|
+
def unwrap_err(self) -> NoReturn:
|
59
|
+
raise UnwrapError(self, "Called `Result.unwrap_err()` on an `Ok` value")
|
60
|
+
|
61
|
+
def unwrap_or(self, _default: U) -> T:
|
62
|
+
return self._value
|
63
|
+
|
64
|
+
def unwrap_or_else(self, op: object) -> T:
|
65
|
+
return self._value
|
66
|
+
|
67
|
+
def unwrap_or_raise(self, e: object) -> T:
|
68
|
+
return self._value
|
69
|
+
|
70
|
+
def map(self, op: Callable[[T], U]) -> Ok[U]:
|
71
|
+
return Ok(op(self._value), data=self.data)
|
72
|
+
|
73
|
+
def map_or(self, default: object, op: Callable[[T], U]) -> U:
|
74
|
+
return op(self._value)
|
75
|
+
|
76
|
+
def map_or_else(self, err_op: object, ok_op: Callable[[T], U]) -> U:
|
77
|
+
"""
|
78
|
+
The contained result is `Ok`, so return original value mapped to
|
79
|
+
a new value using the passed in `op` function.
|
80
|
+
"""
|
81
|
+
return ok_op(self._value)
|
82
|
+
|
83
|
+
def map_err(self, op: object) -> Ok[T]:
|
84
|
+
"""
|
85
|
+
The contained result is `Ok`, so return `Ok` with the original value
|
86
|
+
"""
|
87
|
+
return self
|
88
|
+
|
89
|
+
def and_then(self, op: Callable[[T], U | Result[U]]) -> Result[U]:
|
90
|
+
"""
|
91
|
+
The contained result is `Ok`, so return the result of `op` with the
|
92
|
+
original value passed in. If return of `op` function is not Result, it will be a Ok value.
|
93
|
+
"""
|
94
|
+
try:
|
95
|
+
res = op(self._value)
|
96
|
+
if not isinstance(res, Ok | Err):
|
97
|
+
res = Ok(res)
|
98
|
+
except Exception as e:
|
99
|
+
res = Err(e)
|
100
|
+
res.data = self.data
|
101
|
+
return res
|
102
|
+
|
103
|
+
def or_else(self, op: object) -> Ok[T]:
|
104
|
+
return self
|
105
|
+
|
106
|
+
def ok_or_err(self) -> T | str:
|
107
|
+
return self._value
|
108
|
+
|
109
|
+
def ok_or_none(self) -> T | None:
|
110
|
+
return self._value
|
111
|
+
|
112
|
+
|
113
|
+
class Err:
|
114
|
+
__match_args__ = ("err",)
|
115
|
+
|
116
|
+
def __init__(self, value: str | Exception, data: Any = None) -> None:
|
117
|
+
self._value = f"exception: {value}" if isinstance(value, Exception) else value
|
118
|
+
self.data = data
|
119
|
+
|
120
|
+
def __repr__(self) -> str:
|
121
|
+
if self.data is None:
|
122
|
+
return f"Err({self._value!r})"
|
123
|
+
else:
|
124
|
+
return f"Err({self._value!r}, data={self.data!r})"
|
125
|
+
|
126
|
+
def __eq__(self, other: Any) -> bool:
|
127
|
+
return isinstance(other, Err) and self._value == other._value and self.data == other.data
|
128
|
+
|
129
|
+
def __ne__(self, other: Any) -> bool:
|
130
|
+
return not (self == other)
|
131
|
+
|
132
|
+
def __hash__(self) -> int:
|
133
|
+
return hash((False, self._value, self.data))
|
134
|
+
|
135
|
+
def is_ok(self) -> Literal[False]:
|
136
|
+
return False
|
137
|
+
|
138
|
+
def is_err(self) -> Literal[True]:
|
139
|
+
return True
|
140
|
+
|
141
|
+
@property
|
142
|
+
def ok(self) -> None:
|
143
|
+
"""
|
144
|
+
Return `None`.
|
145
|
+
"""
|
146
|
+
return None
|
147
|
+
|
148
|
+
@property
|
149
|
+
def err(self) -> str:
|
150
|
+
"""
|
151
|
+
Return the error.
|
152
|
+
"""
|
153
|
+
return self._value
|
154
|
+
|
155
|
+
def expect(self, message: str) -> NoReturn:
|
156
|
+
"""
|
157
|
+
Raises an `UnwrapError`.
|
158
|
+
"""
|
159
|
+
exc = UnwrapError(
|
160
|
+
self,
|
161
|
+
f"{message}: {self._value!r}",
|
162
|
+
)
|
163
|
+
if isinstance(self._value, BaseException):
|
164
|
+
raise exc from self._value
|
165
|
+
raise exc
|
166
|
+
|
167
|
+
def expect_err(self, _message: str) -> str:
|
168
|
+
"""
|
169
|
+
Return the inner value
|
170
|
+
"""
|
171
|
+
return self._value
|
172
|
+
|
173
|
+
def unwrap(self) -> NoReturn:
|
174
|
+
"""
|
175
|
+
Raises an `UnwrapError`.
|
176
|
+
"""
|
177
|
+
exc = UnwrapError(
|
178
|
+
self,
|
179
|
+
f"Called `Result.unwrap()` on an `Err` value: {self._value!r}",
|
180
|
+
)
|
181
|
+
if isinstance(self._value, BaseException):
|
182
|
+
raise exc from self._value
|
183
|
+
raise exc
|
184
|
+
|
185
|
+
def unwrap_err(self) -> str:
|
186
|
+
"""
|
187
|
+
Return the inner value
|
188
|
+
"""
|
189
|
+
return self._value
|
190
|
+
|
191
|
+
def unwrap_or(self, default: U) -> U:
|
192
|
+
"""
|
193
|
+
Return `default`.
|
194
|
+
"""
|
195
|
+
return default
|
196
|
+
|
197
|
+
def unwrap_or_else(self, op: Callable[[str], T]) -> T:
|
198
|
+
"""
|
199
|
+
The contained result is ``Err``, so return the result of applying
|
200
|
+
``op`` to the error value.
|
201
|
+
"""
|
202
|
+
return op(self._value)
|
203
|
+
|
204
|
+
def unwrap_or_raise(self, e: type[TBE]) -> NoReturn:
|
205
|
+
"""
|
206
|
+
The contained result is ``Err``, so raise the exception with the value.
|
207
|
+
"""
|
208
|
+
raise e(self._value)
|
209
|
+
|
210
|
+
def map(self, op: object) -> Err:
|
211
|
+
"""
|
212
|
+
Return `Err` with the same value
|
213
|
+
"""
|
214
|
+
return self
|
215
|
+
|
216
|
+
def map_or(self, default: U, op: object) -> U:
|
217
|
+
"""
|
218
|
+
Return the default value
|
219
|
+
"""
|
220
|
+
return default
|
221
|
+
|
222
|
+
def map_or_else(self, err_op: Callable[[str], U], ok_op: object) -> U:
|
223
|
+
"""
|
224
|
+
Return the result of the default operation
|
225
|
+
"""
|
226
|
+
return err_op(self._value)
|
227
|
+
|
228
|
+
def and_then(self, op: object) -> Err:
|
229
|
+
"""
|
230
|
+
The contained result is `Err`, so return `Err` with the original value
|
231
|
+
"""
|
232
|
+
return self
|
233
|
+
|
234
|
+
def ok_or_err(self) -> T | str:
|
235
|
+
return self._value
|
236
|
+
|
237
|
+
def ok_or_none(self) -> T | None:
|
238
|
+
return None
|
239
|
+
|
240
|
+
|
241
|
+
Result: TypeAlias = Ok[T] | Err
|
242
|
+
|
243
|
+
|
244
|
+
class UnwrapError(Exception):
|
245
|
+
_result: Result[object]
|
246
|
+
|
247
|
+
def __init__(self, result: Result[object], message: str) -> None:
|
248
|
+
self._result = result
|
249
|
+
super().__init__(message)
|
250
|
+
|
251
|
+
@property
|
252
|
+
def result(self) -> Result[Any]:
|
253
|
+
return self._result
|
254
|
+
|
255
|
+
|
256
|
+
def try_ok(fn: Callable[..., Result[T]], *, args: tuple[object], attempts: int, delay: int | float = 0) -> Result[T]:
|
257
|
+
if attempts <= 0:
|
258
|
+
raise ValueError("attempts must be more than zero")
|
259
|
+
res: Result[T] = Err("not started")
|
260
|
+
for _ in range(attempts):
|
261
|
+
res = fn(*args)
|
262
|
+
if res.is_ok():
|
263
|
+
return res
|
264
|
+
if delay:
|
265
|
+
time.sleep(delay)
|
266
|
+
return res
|
mm_std/str.py
ADDED
@@ -0,0 +1,106 @@
|
|
1
|
+
import re
|
2
|
+
from collections.abc import Iterable
|
3
|
+
from decimal import Decimal
|
4
|
+
|
5
|
+
import pydash
|
6
|
+
|
7
|
+
|
8
|
+
def str_to_list(
|
9
|
+
data: str | Iterable[object] | None,
|
10
|
+
lower: bool = False,
|
11
|
+
remove_comments: bool = False,
|
12
|
+
unique: bool = False,
|
13
|
+
split_line: bool = False,
|
14
|
+
) -> list[str]:
|
15
|
+
match data:
|
16
|
+
case None | "" | []:
|
17
|
+
return []
|
18
|
+
case str():
|
19
|
+
if lower:
|
20
|
+
data = data.lower()
|
21
|
+
result = [line.strip() for line in data.split("\n") if line.strip()]
|
22
|
+
if remove_comments:
|
23
|
+
result = [line.split("#")[0].strip() for line in result]
|
24
|
+
result = [line for line in result if line]
|
25
|
+
if unique:
|
26
|
+
result = pydash.uniq(result)
|
27
|
+
|
28
|
+
if split_line:
|
29
|
+
new_result = []
|
30
|
+
for line in result:
|
31
|
+
new_result.extend(line.split())
|
32
|
+
return new_result
|
33
|
+
|
34
|
+
return result
|
35
|
+
case Iterable():
|
36
|
+
return [str(x) for x in data]
|
37
|
+
case _:
|
38
|
+
raise ValueError("data has a wrong type")
|
39
|
+
|
40
|
+
|
41
|
+
def number_with_separator(
|
42
|
+
value: float | str | Decimal | None,
|
43
|
+
prefix: str = "",
|
44
|
+
suffix: str = "",
|
45
|
+
separator: str = "_",
|
46
|
+
hide_zero: bool = False,
|
47
|
+
round_digits: int = 2,
|
48
|
+
) -> str:
|
49
|
+
if value is None or value == "":
|
50
|
+
return ""
|
51
|
+
if float(value) == 0:
|
52
|
+
return "" if hide_zero else f"{prefix}0{suffix}"
|
53
|
+
if float(value) > 1000:
|
54
|
+
value = "".join(
|
55
|
+
reversed([x + (separator if i and not i % 3 else "") for i, x in enumerate(reversed(str(int(value))))]),
|
56
|
+
)
|
57
|
+
else:
|
58
|
+
value = round(value, round_digits) # type:ignore[arg-type,assignment]
|
59
|
+
|
60
|
+
return f"{prefix}{value}{suffix}"
|
61
|
+
|
62
|
+
|
63
|
+
def str_starts_with_any(value: str, prefixes: list[str]) -> bool:
|
64
|
+
"""check if str starts with any of prefixes"""
|
65
|
+
for prefix in prefixes:
|
66
|
+
if value.startswith(prefix):
|
67
|
+
return True
|
68
|
+
return False
|
69
|
+
|
70
|
+
|
71
|
+
def str_ends_with_any(value: str, prefixes: list[str]) -> bool:
|
72
|
+
"""check if str ends with any of prefixes"""
|
73
|
+
for prefix in prefixes:
|
74
|
+
if value.endswith(prefix):
|
75
|
+
return True
|
76
|
+
return False
|
77
|
+
|
78
|
+
|
79
|
+
def split_on_plus_minus_tokens(value: str) -> list[str]:
|
80
|
+
value = "".join(value.split())
|
81
|
+
if not value:
|
82
|
+
raise ValueError("value is empty")
|
83
|
+
if "++" in value:
|
84
|
+
raise ValueError("++ in value")
|
85
|
+
if "--" in value:
|
86
|
+
raise ValueError("-- in value")
|
87
|
+
if value.endswith("-"):
|
88
|
+
raise ValueError("ends with -")
|
89
|
+
if value.endswith("+"):
|
90
|
+
raise ValueError("ends with +")
|
91
|
+
|
92
|
+
if not value.startswith("+") and not value.startswith("-"):
|
93
|
+
value = "+" + value
|
94
|
+
|
95
|
+
result: list[str] = []
|
96
|
+
rest_value = value
|
97
|
+
while True:
|
98
|
+
if not rest_value:
|
99
|
+
return result
|
100
|
+
items = re.split(r"[+\-]", rest_value)
|
101
|
+
if rest_value.startswith("+"):
|
102
|
+
result.append("+" + items[1])
|
103
|
+
rest_value = rest_value.removeprefix("+" + items[1])
|
104
|
+
elif rest_value.startswith("-"):
|
105
|
+
result.append("-" + items[1])
|
106
|
+
rest_value = rest_value.removeprefix("-" + items[1])
|
mm_std/telegram.py
ADDED
@@ -0,0 +1,35 @@
|
|
1
|
+
import time
|
2
|
+
|
3
|
+
import pydash
|
4
|
+
|
5
|
+
from mm_std import Err, Ok, Result
|
6
|
+
from mm_std.net import hrequest
|
7
|
+
|
8
|
+
|
9
|
+
def send_telegram_message(bot_token: str, chat_id: int, message: str, long_message_delay: int = 3) -> Result[list[int]]:
|
10
|
+
messages = _split_string(message, 4096)
|
11
|
+
responses = []
|
12
|
+
result = []
|
13
|
+
while True:
|
14
|
+
text = messages.pop(0)
|
15
|
+
params = {"chat_id": chat_id, "text": text}
|
16
|
+
res = hrequest(f"https://api.telegram.org/bot{bot_token}/sendMessage", method="post", params=params)
|
17
|
+
responses.append(res.json)
|
18
|
+
if res.error is not None:
|
19
|
+
return Err(res.error, data={"last_res": res.to_dict(), "responses": responses})
|
20
|
+
|
21
|
+
message_id = pydash.get(res.json, "result.message_id")
|
22
|
+
if message_id:
|
23
|
+
result.append(message_id)
|
24
|
+
else:
|
25
|
+
return Err("unknown_response", data={"last_res": res.to_dict(), "responses": responses})
|
26
|
+
|
27
|
+
if len(messages):
|
28
|
+
time.sleep(long_message_delay)
|
29
|
+
else:
|
30
|
+
break
|
31
|
+
return Ok(result, data={"responses": responses})
|
32
|
+
|
33
|
+
|
34
|
+
def _split_string(text: str, chars_per_string: int) -> list[str]:
|
35
|
+
return [text[i : i + chars_per_string] for i in range(0, len(text), chars_per_string)]
|
mm_std/types.py
ADDED
mm_std/zip.py
ADDED
@@ -0,0 +1,8 @@
|
|
1
|
+
from zipfile import ZipFile
|
2
|
+
|
3
|
+
|
4
|
+
def read_text_from_zip_archive(zip_archive_path: str, filename: str | None = None, password: str | None = None) -> str:
|
5
|
+
with ZipFile(zip_archive_path) as zipfile:
|
6
|
+
if filename is None:
|
7
|
+
filename = zipfile.filelist[0].filename
|
8
|
+
return zipfile.read(filename, pwd=password.encode() if password else None).decode()
|
@@ -0,0 +1,28 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: mm-std
|
3
|
+
Version: 0.0.1
|
4
|
+
Requires-Python: >=3.11
|
5
|
+
Requires-Dist: requests ~=2.32.3
|
6
|
+
Requires-Dist: PySocks ~=1.7.1
|
7
|
+
Requires-Dist: pydash ~=8.0.0
|
8
|
+
Requires-Dist: python-dateutil ~=2.9.0
|
9
|
+
Requires-Dist: pydantic ~=2.8.2
|
10
|
+
Requires-Dist: python-dotenv ~=1.0.1
|
11
|
+
Requires-Dist: PyYAML ~=6.0.1
|
12
|
+
Requires-Dist: cryptography ~=43.0.0
|
13
|
+
Requires-Dist: rich
|
14
|
+
Provides-Extra: dev
|
15
|
+
Requires-Dist: build ~=1.2.1 ; extra == 'dev'
|
16
|
+
Requires-Dist: twine ~=5.1.0 ; extra == 'dev'
|
17
|
+
Requires-Dist: pytest ~=8.3.2 ; extra == 'dev'
|
18
|
+
Requires-Dist: pytest-xdist ~=3.6.1 ; extra == 'dev'
|
19
|
+
Requires-Dist: pytest-httpserver ~=1.0.8 ; extra == 'dev'
|
20
|
+
Requires-Dist: coverage ~=7.6.0 ; extra == 'dev'
|
21
|
+
Requires-Dist: ruff ~=0.5.2 ; extra == 'dev'
|
22
|
+
Requires-Dist: pip-audit ~=2.7.0 ; extra == 'dev'
|
23
|
+
Requires-Dist: bandit ~=1.7.7 ; extra == 'dev'
|
24
|
+
Requires-Dist: mypy ~=1.11.0 ; extra == 'dev'
|
25
|
+
Requires-Dist: types-python-dateutil ~=2.9.0 ; extra == 'dev'
|
26
|
+
Requires-Dist: types-requests ~=2.32.0.20240523 ; extra == 'dev'
|
27
|
+
Requires-Dist: types-PyYAML ~=6.0.12.12 ; extra == 'dev'
|
28
|
+
|
@@ -0,0 +1,24 @@
|
|
1
|
+
mm_std/__init__.py,sha256=4pa6AjO-0mfrfBi6FS5sqOk1HFAKt2OMovA12ys7UvQ,2288
|
2
|
+
mm_std/command.py,sha256=r1n9ZHyMFhNkNOH9grRCm5J0hhX4_v0c2wdaal8iCZY,1270
|
3
|
+
mm_std/concurrency.py,sha256=XSPYLUsGO1G0mcsdTn_Hwg5nb694-nUMhwQ6TvOrMCc,5340
|
4
|
+
mm_std/config.py,sha256=ftYrsZ4i715I20s2q8vqYyniMSCIAVrh-9TtAzsyMfQ,3147
|
5
|
+
mm_std/crypto.py,sha256=jdk0_TCmeU0pPXMyz9xH6kQHSjjZ9GcGClBwQps5vBo,340
|
6
|
+
mm_std/date.py,sha256=eJFQHY2sxO6e7XBAsXoki0dC5XvqiE-mA4LDUXzw3DM,1200
|
7
|
+
mm_std/dict.py,sha256=kJBPVG9vEqHiSgKKoji8gVGL1yEBbxAmFNn0zz17AUg,180
|
8
|
+
mm_std/env.py,sha256=5zaR9VeIfObN-4yfgxoFeU5IM1GDeZZj9SuYf7t9sOA,125
|
9
|
+
mm_std/fs.py,sha256=RwarNRJq3tIMG6LVX_g03hasfYpjYFh_O27oVDt5IPQ,291
|
10
|
+
mm_std/json_.py,sha256=12uGLwmnrRA63QI0nUx-UU33zXcyShbsKCoOQiIJ8io,1016
|
11
|
+
mm_std/log.py,sha256=6ux6njNKc_ZCQlvWn1FZR6vcSY2Cem-mQzmNXvsg5IE,913
|
12
|
+
mm_std/net.py,sha256=-xt2tEov3vhXR44Rlj4JohL-g1n7k7JUobo4kHTubzg,4702
|
13
|
+
mm_std/print_.py,sha256=mMixwfdrLEYW15ez7_QxXdrV-d38q9XJP8tB8F7P2pI,1553
|
14
|
+
mm_std/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
15
|
+
mm_std/random_.py,sha256=KZauQzHsJaCVIeyHrJ74hiH34hdFE7pUxuv5i0Hl28g,1175
|
16
|
+
mm_std/result.py,sha256=EBump_FnfPk7kU7-e_Y95qrOO-myW25XGyNdjExEqrI,6989
|
17
|
+
mm_std/str.py,sha256=nG5XF5870xM2PAvU0LZrJDk-d54LYwRLGnSahIekOVw,3151
|
18
|
+
mm_std/telegram.py,sha256=QrHPnsy0LTeqpd-g3RaHhk7gWIfHZEgnMs-S5DLW-vU,1220
|
19
|
+
mm_std/types.py,sha256=KpFtJ-BTmDfmmFeOSlgq6cMbCfGGOQjh1oWvdcrW-kw,116
|
20
|
+
mm_std/zip.py,sha256=2EXcae4HO5U4kObj2Lj8jl5F2OUpT-WRlJybTyFzt6I,370
|
21
|
+
mm_std-0.0.1.dist-info/METADATA,sha256=Zm0BysQQEMeSy21mDQSSfm3mgvKpftqCnAQf0tJ0DKE,1055
|
22
|
+
mm_std-0.0.1.dist-info/WHEEL,sha256=Rp8gFpivVLXx-k3U95ozHnQw8yDcPxmhOpn_Gx8d5nc,91
|
23
|
+
mm_std-0.0.1.dist-info/top_level.txt,sha256=KOhw9T6TIeq7JXPPeJXvOxBaQI3gMYLTUH5Mtil3BzA,7
|
24
|
+
mm_std-0.0.1.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
mm_std
|