hh-applicant-tool 0.6.12__py3-none-any.whl → 1.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hh_applicant_tool/__init__.py +1 -0
- hh_applicant_tool/__main__.py +1 -1
- hh_applicant_tool/ai/base.py +2 -0
- hh_applicant_tool/ai/openai.py +24 -30
- hh_applicant_tool/api/client.py +82 -98
- hh_applicant_tool/api/errors.py +57 -8
- hh_applicant_tool/constants.py +0 -3
- hh_applicant_tool/datatypes.py +291 -0
- hh_applicant_tool/main.py +236 -82
- hh_applicant_tool/operations/apply_similar.py +268 -348
- hh_applicant_tool/operations/authorize.py +245 -70
- hh_applicant_tool/operations/call_api.py +18 -8
- hh_applicant_tool/operations/check_negotiations.py +102 -0
- hh_applicant_tool/operations/check_proxy.py +30 -0
- hh_applicant_tool/operations/config.py +119 -18
- hh_applicant_tool/operations/install.py +34 -0
- hh_applicant_tool/operations/list_resumes.py +24 -10
- hh_applicant_tool/operations/log.py +77 -0
- hh_applicant_tool/operations/migrate_db.py +65 -0
- hh_applicant_tool/operations/query.py +120 -0
- hh_applicant_tool/operations/refresh_token.py +14 -13
- hh_applicant_tool/operations/reply_employers.py +148 -167
- hh_applicant_tool/operations/settings.py +95 -0
- hh_applicant_tool/operations/uninstall.py +26 -0
- hh_applicant_tool/operations/update_resumes.py +21 -10
- hh_applicant_tool/operations/whoami.py +40 -7
- hh_applicant_tool/storage/__init__.py +4 -0
- hh_applicant_tool/storage/facade.py +24 -0
- hh_applicant_tool/storage/models/__init__.py +0 -0
- hh_applicant_tool/storage/models/base.py +169 -0
- hh_applicant_tool/storage/models/contact.py +16 -0
- hh_applicant_tool/storage/models/employer.py +12 -0
- hh_applicant_tool/storage/models/negotiation.py +16 -0
- hh_applicant_tool/storage/models/resume.py +19 -0
- hh_applicant_tool/storage/models/setting.py +6 -0
- hh_applicant_tool/storage/models/vacancy.py +36 -0
- hh_applicant_tool/storage/queries/migrations/.gitkeep +0 -0
- hh_applicant_tool/storage/queries/schema.sql +119 -0
- hh_applicant_tool/storage/repositories/__init__.py +0 -0
- hh_applicant_tool/storage/repositories/base.py +176 -0
- hh_applicant_tool/storage/repositories/contacts.py +19 -0
- hh_applicant_tool/storage/repositories/employers.py +13 -0
- hh_applicant_tool/storage/repositories/negotiations.py +12 -0
- hh_applicant_tool/storage/repositories/resumes.py +14 -0
- hh_applicant_tool/storage/repositories/settings.py +34 -0
- hh_applicant_tool/storage/repositories/vacancies.py +8 -0
- hh_applicant_tool/storage/utils.py +49 -0
- hh_applicant_tool/utils/__init__.py +31 -0
- hh_applicant_tool/utils/attrdict.py +6 -0
- hh_applicant_tool/utils/binpack.py +167 -0
- hh_applicant_tool/utils/config.py +55 -0
- hh_applicant_tool/utils/dateutil.py +19 -0
- hh_applicant_tool/{jsonc.py → utils/jsonc.py} +12 -6
- hh_applicant_tool/utils/jsonutil.py +61 -0
- hh_applicant_tool/utils/log.py +144 -0
- hh_applicant_tool/utils/misc.py +12 -0
- hh_applicant_tool/utils/mixins.py +220 -0
- hh_applicant_tool/utils/string.py +27 -0
- hh_applicant_tool/utils/terminal.py +19 -0
- hh_applicant_tool/utils/user_agent.py +17 -0
- hh_applicant_tool-1.4.7.dist-info/METADATA +628 -0
- hh_applicant_tool-1.4.7.dist-info/RECORD +67 -0
- hh_applicant_tool/ai/blackbox.py +0 -55
- hh_applicant_tool/color_log.py +0 -35
- hh_applicant_tool/mixins.py +0 -13
- hh_applicant_tool/operations/clear_negotiations.py +0 -113
- hh_applicant_tool/operations/delete_telemetry.py +0 -30
- hh_applicant_tool/operations/get_employer_contacts.py +0 -293
- hh_applicant_tool/telemetry_client.py +0 -106
- hh_applicant_tool/types.py +0 -45
- hh_applicant_tool/utils.py +0 -104
- hh_applicant_tool-0.6.12.dist-info/METADATA +0 -349
- hh_applicant_tool-0.6.12.dist-info/RECORD +0 -33
- {hh_applicant_tool-0.6.12.dist-info → hh_applicant_tool-1.4.7.dist-info}/WHEEL +0 -0
- {hh_applicant_tool-0.6.12.dist-info → hh_applicant_tool-1.4.7.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from typing import TypeVar
|
|
2
|
+
|
|
3
|
+
from ..models.setting import SettingModel
|
|
4
|
+
from .base import BaseRepository
|
|
5
|
+
|
|
6
|
+
Default = TypeVar("Default")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class SettingsRepository(BaseRepository):
|
|
10
|
+
pkey: str = "key"
|
|
11
|
+
model = SettingModel
|
|
12
|
+
|
|
13
|
+
def get_value(
|
|
14
|
+
self,
|
|
15
|
+
key: str,
|
|
16
|
+
/,
|
|
17
|
+
default: Default = None,
|
|
18
|
+
) -> str | Default:
|
|
19
|
+
setting = self.get(key)
|
|
20
|
+
return setting.value if setting else default
|
|
21
|
+
|
|
22
|
+
def set_value(
|
|
23
|
+
self,
|
|
24
|
+
key: str,
|
|
25
|
+
value: str,
|
|
26
|
+
/,
|
|
27
|
+
commit: bool | None = None,
|
|
28
|
+
) -> None:
|
|
29
|
+
self.save(self.model(key=key, value=value), commit=commit)
|
|
30
|
+
|
|
31
|
+
def delete_value(self, key: str, /, commit: bool | None = None) -> None:
|
|
32
|
+
setting = self.get(key)
|
|
33
|
+
if setting:
|
|
34
|
+
self.delete(setting, commit=commit)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import re
|
|
5
|
+
import sqlite3
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
QUERIES_PATH: Path = Path(__file__).parent / "queries"
|
|
9
|
+
MIGRATION_PATH: Path = QUERIES_PATH / "migrations"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
logger: logging.Logger = logging.getLogger(__package__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def init_db(conn: sqlite3.Connection) -> None:
|
|
16
|
+
"""Создает схему БД"""
|
|
17
|
+
conn.executescript(
|
|
18
|
+
(QUERIES_PATH / "schema.sql").read_text(encoding="utf-8")
|
|
19
|
+
)
|
|
20
|
+
logger.debug("Database unitialized")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def list_migrations() -> list[str]:
|
|
24
|
+
"""Выводит имена миграций без расширения, отсортированные по дате"""
|
|
25
|
+
if not MIGRATION_PATH.exists():
|
|
26
|
+
return []
|
|
27
|
+
return sorted([f.stem for f in MIGRATION_PATH.glob("*.sql")])
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def apply_migration(conn: sqlite3.Connection, name: str) -> None:
|
|
31
|
+
"""Находит файл по имени и выполняет его содержимое"""
|
|
32
|
+
conn.executescript(
|
|
33
|
+
(MIGRATION_PATH / f"{name}.sql").read_text(encoding="utf-8")
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def model2table(o: type) -> str:
|
|
38
|
+
name: str = o.__name__
|
|
39
|
+
if name.endswith("Model"):
|
|
40
|
+
name = name[:-5]
|
|
41
|
+
name = re.sub(r"(?<!^)(?=[A-Z])", "_", name).lower()
|
|
42
|
+
# y -> ies (если перед y согласная: vacancy -> vacancies)
|
|
43
|
+
if name.endswith("y") and not name.endswith(("ay", "ey", "iy", "oy", "uy")):
|
|
44
|
+
return name[:-1] + "ies"
|
|
45
|
+
# s, x, z, ch, sh -> +es (bus -> buses, match -> matches)
|
|
46
|
+
if name.endswith(("s", "x", "z", "ch", "sh")):
|
|
47
|
+
return name + "es"
|
|
48
|
+
# Обычный случай
|
|
49
|
+
return name + "s"
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from .attrdict import AttrDict
|
|
4
|
+
from .config import Config, get_config_path
|
|
5
|
+
from .dateutil import (
|
|
6
|
+
DATETIME_FORMAT,
|
|
7
|
+
parse_api_datetime,
|
|
8
|
+
try_parse_datetime,
|
|
9
|
+
)
|
|
10
|
+
from .misc import calc_hash, print_err
|
|
11
|
+
from .string import bool2str, list2str, rand_text, shorten
|
|
12
|
+
from .terminal import setup_terminal
|
|
13
|
+
from .user_agent import hh_android_useragent
|
|
14
|
+
|
|
15
|
+
# Add all public symbols to __all__ for consistent import behavior
|
|
16
|
+
__all__ = [
|
|
17
|
+
"AttrDict",
|
|
18
|
+
"Config",
|
|
19
|
+
"get_config_path",
|
|
20
|
+
"DATETIME_FORMAT",
|
|
21
|
+
"parse_api_datetime",
|
|
22
|
+
"try_parse_datetime",
|
|
23
|
+
"shorten",
|
|
24
|
+
"rand_text",
|
|
25
|
+
"bool2str",
|
|
26
|
+
"list2str",
|
|
27
|
+
"calc_hash",
|
|
28
|
+
"hh_android_useragent",
|
|
29
|
+
"setup_terminal",
|
|
30
|
+
"print_err",
|
|
31
|
+
]
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
# Формат для передачи данных по сети, который лучше сжимается чем JSON
|
|
2
|
+
# Автогенерированный текст по моей спецификации. Из преимуществ поддержка дат
|
|
3
|
+
# и ключи любого типа в Map
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import gzip
|
|
7
|
+
import io
|
|
8
|
+
import struct
|
|
9
|
+
import zlib
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from typing import Any, Callable, Final
|
|
12
|
+
|
|
13
|
+
# ---- Constants ----
|
|
14
|
+
|
|
15
|
+
BINARY_TYPES: Final = {
|
|
16
|
+
type(None): 0x00,
|
|
17
|
+
dict: 0x01,
|
|
18
|
+
str: 0x02,
|
|
19
|
+
int: 0x03,
|
|
20
|
+
float: 0x04,
|
|
21
|
+
list: 0x05,
|
|
22
|
+
bool: 0x06,
|
|
23
|
+
datetime: 0x07,
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
# Коды типов (для десериализации)
|
|
27
|
+
T_NULL, T_MAP, T_STR, T_INT, T_FLOAT, T_LIST, T_BOOL, T_DT = range(8)
|
|
28
|
+
|
|
29
|
+
# Сжатие
|
|
30
|
+
COMP_NONE, COMP_ZLIB, COMP_GZIP = range(3)
|
|
31
|
+
|
|
32
|
+
# Схемы упаковки
|
|
33
|
+
U32 = struct.Struct("<I")
|
|
34
|
+
S64 = struct.Struct("<q")
|
|
35
|
+
F64 = struct.Struct("<d")
|
|
36
|
+
U8 = struct.Struct("<B")
|
|
37
|
+
|
|
38
|
+
# ---- Compression Logic (Pure functions) ----
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def gzip_compress(data: bytes) -> bytes:
|
|
42
|
+
buf = io.BytesIO()
|
|
43
|
+
with gzip.GzipFile(fileobj=buf, mode="wb") as f:
|
|
44
|
+
f.write(data)
|
|
45
|
+
return buf.getvalue()
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def gzip_decompress(data: bytes) -> bytes:
|
|
49
|
+
with gzip.GzipFile(fileobj=io.BytesIO(data), mode="rb") as f:
|
|
50
|
+
return f.read()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
COMPRESSORS: dict[int, Callable[[bytes], bytes]] = {
|
|
54
|
+
COMP_ZLIB: zlib.compress,
|
|
55
|
+
COMP_GZIP: gzip_compress,
|
|
56
|
+
COMP_NONE: lambda d: d,
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
DECOMPRESSORS: dict[int, Callable[[bytes], bytes]] = {
|
|
60
|
+
COMP_ZLIB: zlib.decompress,
|
|
61
|
+
COMP_GZIP: gzip_decompress,
|
|
62
|
+
COMP_NONE: lambda d: d,
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def get_best_algo() -> int:
|
|
67
|
+
if zlib:
|
|
68
|
+
return COMP_ZLIB
|
|
69
|
+
if gzip:
|
|
70
|
+
return COMP_GZIP
|
|
71
|
+
return COMP_NONE
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# ---- Serialization (Recursive Functions) ----
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def write_value(value: Any) -> bytes:
|
|
78
|
+
"""Рекурсивно преобразует значение в bytes (Pure)"""
|
|
79
|
+
match value:
|
|
80
|
+
case None:
|
|
81
|
+
return bytes([T_NULL])
|
|
82
|
+
|
|
83
|
+
case bool():
|
|
84
|
+
return bytes([T_BOOL]) + U8.pack(1 if value else 0)
|
|
85
|
+
|
|
86
|
+
case datetime():
|
|
87
|
+
return bytes([T_DT]) + F64.pack(value.timestamp())
|
|
88
|
+
|
|
89
|
+
case int():
|
|
90
|
+
return bytes([T_INT]) + S64.pack(value)
|
|
91
|
+
|
|
92
|
+
case float():
|
|
93
|
+
return bytes([T_FLOAT]) + F64.pack(value)
|
|
94
|
+
|
|
95
|
+
case str():
|
|
96
|
+
data = value.encode("utf-8")
|
|
97
|
+
return bytes([T_STR]) + U32.pack(len(data)) + data
|
|
98
|
+
|
|
99
|
+
case list():
|
|
100
|
+
content = b"".join(map(write_value, value))
|
|
101
|
+
return bytes([T_LIST]) + U32.pack(len(value)) + content
|
|
102
|
+
|
|
103
|
+
case dict():
|
|
104
|
+
content = b"".join(
|
|
105
|
+
write_value(k) + write_value(v) for k, v in value.items()
|
|
106
|
+
)
|
|
107
|
+
return bytes([T_MAP]) + U32.pack(len(value)) + content
|
|
108
|
+
|
|
109
|
+
case _:
|
|
110
|
+
raise TypeError(f"Unsupported type: {type(value)}")
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
# ---- Deserialization (Stream-based but stateless) ----
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def read_value(stream: io.BytesIO) -> Any:
|
|
117
|
+
"""Читает значение из потока байт"""
|
|
118
|
+
type_byte = stream.read(1)
|
|
119
|
+
if not type_byte:
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
match type_byte[0]:
|
|
123
|
+
case 0x00: # NULL
|
|
124
|
+
return None
|
|
125
|
+
case 0x06: # BOOL
|
|
126
|
+
return U8.unpack(stream.read(1))[0] == 1
|
|
127
|
+
case 0x07: # DT
|
|
128
|
+
return datetime.fromtimestamp(F64.unpack(stream.read(8))[0])
|
|
129
|
+
case 0x03: # INT
|
|
130
|
+
return S64.unpack(stream.read(8))[0]
|
|
131
|
+
case 0x04: # FLOAT
|
|
132
|
+
return F64.unpack(stream.read(8))[0]
|
|
133
|
+
case 0x02: # STR
|
|
134
|
+
size = U32.unpack(stream.read(4))[0]
|
|
135
|
+
return stream.read(size).decode("utf-8")
|
|
136
|
+
case 0x05: # LIST
|
|
137
|
+
size = U32.unpack(stream.read(4))[0]
|
|
138
|
+
return [read_value(stream) for _ in range(size)]
|
|
139
|
+
case 0x01: # MAP
|
|
140
|
+
size = U32.unpack(stream.read(4))[0]
|
|
141
|
+
return {read_value(stream): read_value(stream) for _ in range(size)}
|
|
142
|
+
case t:
|
|
143
|
+
raise TypeError(f"Unknown type code: {t:#x}")
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
# ---- Public API (Composition) ----
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def serialize(value: Any, compress: bool = True) -> bytes:
|
|
150
|
+
raw_payload = write_value(value)
|
|
151
|
+
algo = get_best_algo() if compress else COMP_NONE
|
|
152
|
+
|
|
153
|
+
compressor = COMPRESSORS.get(algo, COMPRESSORS[COMP_NONE])
|
|
154
|
+
return bytes([algo]) + compressor(raw_payload)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def deserialize(data: bytes) -> Any:
|
|
158
|
+
if not data:
|
|
159
|
+
raise ValueError("Empty payload")
|
|
160
|
+
|
|
161
|
+
algo, payload = data[0], data[1:]
|
|
162
|
+
|
|
163
|
+
if algo not in DECOMPRESSORS:
|
|
164
|
+
raise ValueError(f"Unknown compression type: {algo}")
|
|
165
|
+
|
|
166
|
+
raw_data = DECOMPRESSORS[algo](payload)
|
|
167
|
+
return read_value(io.BytesIO(raw_data))
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import platform
|
|
4
|
+
from functools import cache
|
|
5
|
+
from os import getenv
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from threading import Lock
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
from . import jsonutil as json
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@cache
|
|
14
|
+
def get_config_path() -> Path:
|
|
15
|
+
match platform.system():
|
|
16
|
+
case "Windows":
|
|
17
|
+
return Path(getenv("APPDATA", Path.home() / "AppData" / "Roaming"))
|
|
18
|
+
case "Darwin":
|
|
19
|
+
return Path.home() / "Library" / "Application Support"
|
|
20
|
+
case _:
|
|
21
|
+
return Path(getenv("XDG_CONFIG_HOME", Path.home() / ".config"))
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class Config(dict):
|
|
25
|
+
def __init__(self, config_path: str | Path | None = None):
|
|
26
|
+
self._config_path = Path(config_path or get_config_path())
|
|
27
|
+
self._lock = Lock()
|
|
28
|
+
self.load()
|
|
29
|
+
|
|
30
|
+
def load(self) -> None:
|
|
31
|
+
if self._config_path.exists():
|
|
32
|
+
with self._lock:
|
|
33
|
+
with self._config_path.open(
|
|
34
|
+
"r", encoding="utf-8", errors="replace"
|
|
35
|
+
) as f:
|
|
36
|
+
self.update(json.load(f))
|
|
37
|
+
|
|
38
|
+
def save(self, *args: Any, **kwargs: Any) -> None:
|
|
39
|
+
self.update(*args, **kwargs)
|
|
40
|
+
self._config_path.parent.mkdir(exist_ok=True, parents=True)
|
|
41
|
+
with self._lock:
|
|
42
|
+
with self._config_path.open(
|
|
43
|
+
"w+", encoding="utf-8", errors="replace"
|
|
44
|
+
) as fp:
|
|
45
|
+
json.dump(
|
|
46
|
+
self,
|
|
47
|
+
fp,
|
|
48
|
+
indent=2,
|
|
49
|
+
sort_keys=True,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
__getitem__ = dict.get
|
|
53
|
+
|
|
54
|
+
def __repr__(self) -> str:
|
|
55
|
+
return str(self._config_path)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def parse_api_datetime(dt: str) -> datetime:
|
|
10
|
+
return datetime.strptime(dt, DATETIME_FORMAT)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def try_parse_datetime(dt: Any) -> datetime | Any:
|
|
14
|
+
for parse in (datetime.fromisoformat, parse_api_datetime):
|
|
15
|
+
try:
|
|
16
|
+
return parse(dt)
|
|
17
|
+
except (ValueError, TypeError):
|
|
18
|
+
pass
|
|
19
|
+
return dt
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
# Unused
|
|
2
1
|
"""Парсер JSON с комментариями"""
|
|
3
2
|
|
|
4
|
-
import
|
|
3
|
+
import ast
|
|
5
4
|
import enum
|
|
5
|
+
import re
|
|
6
6
|
from dataclasses import dataclass
|
|
7
|
-
import ast
|
|
8
7
|
from typing import Any, Iterator
|
|
8
|
+
|
|
9
9
|
# from collections import OrderedDict
|
|
10
10
|
|
|
11
11
|
|
|
@@ -42,7 +42,8 @@ def tokenize(s: str) -> Iterator[Token]:
|
|
|
42
42
|
class JSONCParser:
|
|
43
43
|
def parse(self, s: str) -> Any:
|
|
44
44
|
self.token_it = filter(
|
|
45
|
-
lambda t: t.token_type
|
|
45
|
+
lambda t: t.token_type
|
|
46
|
+
not in [TokenType.COMMENT, TokenType.WHITESPACE],
|
|
46
47
|
tokenize(s),
|
|
47
48
|
)
|
|
48
49
|
self.token: Token
|
|
@@ -90,7 +91,9 @@ class JSONCParser:
|
|
|
90
91
|
num = self.token.value
|
|
91
92
|
return float(num) if "." in num else int(num)
|
|
92
93
|
elif self.match(TokenType.KEYWORD):
|
|
93
|
-
return {"null": None, "true": True, "false": False}[
|
|
94
|
+
return {"null": None, "true": True, "false": False}[
|
|
95
|
+
self.token.value
|
|
96
|
+
]
|
|
94
97
|
else:
|
|
95
98
|
raise SyntaxError(f"Unexpected token: {self.token.token_type.name}")
|
|
96
99
|
|
|
@@ -103,7 +106,10 @@ class JSONCParser:
|
|
|
103
106
|
# print(f"{self.token =}, {self.next_token =}")
|
|
104
107
|
|
|
105
108
|
def match(self, token_type: TokenType) -> bool:
|
|
106
|
-
if
|
|
109
|
+
if (
|
|
110
|
+
self.next_token is not None
|
|
111
|
+
and self.next_token.token_type == token_type
|
|
112
|
+
):
|
|
107
113
|
self.advance()
|
|
108
114
|
return True
|
|
109
115
|
return False
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import datetime as dt
|
|
2
|
+
import json
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
# class DateAwareJSONEncoder(json.JSONEncoder):
|
|
6
|
+
# def default(self, o):
|
|
7
|
+
# if isinstance(o, dt.datetime):
|
|
8
|
+
# return o.isoformat()
|
|
9
|
+
|
|
10
|
+
# return super().default(o)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Костыль чтобы в key-value хранить даты
|
|
14
|
+
class DateAwareJSONEncoder(json.JSONEncoder):
|
|
15
|
+
def default(self, o):
|
|
16
|
+
if isinstance(o, dt.datetime):
|
|
17
|
+
return int(o.timestamp())
|
|
18
|
+
|
|
19
|
+
return super().default(o)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# def date_parser_hook(dct):
|
|
23
|
+
# for k, v in dct.items():
|
|
24
|
+
# if isinstance(v, str):
|
|
25
|
+
# try:
|
|
26
|
+
# dct[k] = dt.datetime.fromisoformat(v)
|
|
27
|
+
# except (ValueError, TypeError):
|
|
28
|
+
# pass
|
|
29
|
+
# return dct
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# class DateAwareJSONDecoder(json.JSONDecoder):
|
|
33
|
+
# def __init__(self, *args, **kwargs):
|
|
34
|
+
# super().__init__(*args, object_hook=date_parser_hook, **kwargs)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def dumps(obj, *args: Any, **kwargs: Any) -> str:
|
|
38
|
+
kwargs.setdefault("cls", DateAwareJSONEncoder)
|
|
39
|
+
kwargs.setdefault("ensure_ascii", False)
|
|
40
|
+
return json.dumps(obj, *args, **kwargs)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def dump(fp, obj, *args: Any, **kwargs: Any) -> None:
|
|
44
|
+
kwargs.setdefault("cls", DateAwareJSONEncoder)
|
|
45
|
+
kwargs.setdefault("ensure_ascii", False)
|
|
46
|
+
json.dump(fp, obj, *args, **kwargs)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def loads(s, *args: Any, **kwargs: Any) -> Any:
|
|
50
|
+
# kwargs.setdefault("object_hook", date_parser_hook)
|
|
51
|
+
return json.loads(s, *args, **kwargs)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def load(fp, *args: Any, **kwargs: Any) -> Any:
|
|
55
|
+
# kwargs.setdefault("object_hook", date_parser_hook)
|
|
56
|
+
return json.load(fp, *args, **kwargs)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
if __name__ == "__main__":
|
|
60
|
+
d = {"created_at": dt.datetime.now()}
|
|
61
|
+
print(loads(dumps(d)))
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import enum
|
|
2
|
+
import logging
|
|
3
|
+
import re
|
|
4
|
+
from collections import deque
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from enum import auto
|
|
7
|
+
from logging.handlers import RotatingFileHandler
|
|
8
|
+
from os import PathLike
|
|
9
|
+
from typing import Callable, TextIO
|
|
10
|
+
|
|
11
|
+
# 10MB
|
|
12
|
+
MAX_LOG_SIZE = 10 << 20
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Color(enum.Enum):
|
|
16
|
+
BLACK = 30
|
|
17
|
+
RED = auto()
|
|
18
|
+
GREEN = auto()
|
|
19
|
+
YELLOW = auto()
|
|
20
|
+
BLUE = auto()
|
|
21
|
+
PURPLE = auto()
|
|
22
|
+
CYAN = auto()
|
|
23
|
+
WHITE = auto()
|
|
24
|
+
|
|
25
|
+
def __str__(self) -> str:
|
|
26
|
+
return str(self.value)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ColorHandler(logging.StreamHandler):
|
|
30
|
+
_color_map = {
|
|
31
|
+
"CRITICAL": Color.RED,
|
|
32
|
+
"ERROR": Color.RED,
|
|
33
|
+
"WARNING": Color.RED,
|
|
34
|
+
"INFO": Color.GREEN,
|
|
35
|
+
"DEBUG": Color.BLUE,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
39
|
+
# Подавляем вывод подробного сообщения об ошибке
|
|
40
|
+
orig_exc_info = record.exc_info
|
|
41
|
+
# Детали ошибки показываем только при отладке
|
|
42
|
+
if self.level > logging.DEBUG:
|
|
43
|
+
record.exc_info = None
|
|
44
|
+
message = super().format(record)
|
|
45
|
+
# Обязательно нужно восстановить оригинальное значение или в файловом
|
|
46
|
+
# логе не будет деталей ошибки
|
|
47
|
+
record.exc_info = orig_exc_info
|
|
48
|
+
# isatty = getattr(self.stream, "isatty", None)
|
|
49
|
+
# if isatty and isatty():
|
|
50
|
+
color_code = self._color_map[record.levelname]
|
|
51
|
+
return f"\033[{color_code}m{message}\033[0m"
|
|
52
|
+
# return message
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class RedactingFilter(logging.Filter):
|
|
56
|
+
def __init__(
|
|
57
|
+
self,
|
|
58
|
+
patterns: list[str],
|
|
59
|
+
# По умолчанию количество звездочек равно оригинальной строке
|
|
60
|
+
placeholder: str | Callable = lambda m: "*" * len(m.group(0)),
|
|
61
|
+
):
|
|
62
|
+
super().__init__()
|
|
63
|
+
self.pattern = (
|
|
64
|
+
re.compile(f"({'|'.join(patterns)})") if patterns else None
|
|
65
|
+
)
|
|
66
|
+
self.placeholder = placeholder
|
|
67
|
+
|
|
68
|
+
def filter(self, record: logging.LogRecord) -> bool:
|
|
69
|
+
if self.pattern:
|
|
70
|
+
msg = record.getMessage()
|
|
71
|
+
msg = self.pattern.sub(self.placeholder, msg)
|
|
72
|
+
record.msg, record.args = msg, ()
|
|
73
|
+
|
|
74
|
+
return True
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def setup_logger(
|
|
78
|
+
logger: logging.Logger,
|
|
79
|
+
verbosity_level: int,
|
|
80
|
+
log_file: PathLike,
|
|
81
|
+
) -> None:
|
|
82
|
+
# В лог-файл пишем все!
|
|
83
|
+
logger.setLevel(logging.DEBUG)
|
|
84
|
+
color_handler = ColorHandler()
|
|
85
|
+
# [C] Critical Error Occurred
|
|
86
|
+
color_handler.setFormatter(
|
|
87
|
+
logging.Formatter("[%(levelname).1s] %(message)s")
|
|
88
|
+
)
|
|
89
|
+
color_handler.setLevel(verbosity_level)
|
|
90
|
+
|
|
91
|
+
# Логи
|
|
92
|
+
file_handler = RotatingFileHandler(
|
|
93
|
+
log_file,
|
|
94
|
+
maxBytes=MAX_LOG_SIZE,
|
|
95
|
+
# backupCount=1,
|
|
96
|
+
encoding="utf-8",
|
|
97
|
+
)
|
|
98
|
+
file_handler.setFormatter(
|
|
99
|
+
logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
|
|
100
|
+
)
|
|
101
|
+
file_handler.setLevel(logging.DEBUG)
|
|
102
|
+
|
|
103
|
+
redactor = RedactingFilter(
|
|
104
|
+
[
|
|
105
|
+
r"\b[A-Z0-9]{64,}\b",
|
|
106
|
+
r"\b[a-fA-F0-9]{32,}\b", # request_id, resume_id
|
|
107
|
+
]
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
for h in [color_handler, file_handler]:
|
|
111
|
+
h.addFilter(redactor)
|
|
112
|
+
logger.addHandler(h)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
TS_RE = re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}")
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def collect_traceback_logs(
|
|
119
|
+
fp: TextIO,
|
|
120
|
+
after_dt: datetime,
|
|
121
|
+
maxlen: int = 1000,
|
|
122
|
+
) -> str:
|
|
123
|
+
error_lines = deque(maxlen=maxlen)
|
|
124
|
+
prev_line = ""
|
|
125
|
+
log_dt = None
|
|
126
|
+
collecting_traceback = False
|
|
127
|
+
for line in fp:
|
|
128
|
+
if ts_match := TS_RE.match(line):
|
|
129
|
+
log_dt = datetime.strptime(ts_match.group(0), "%Y-%m-%d %H:%M:%S")
|
|
130
|
+
collecting_traceback = False
|
|
131
|
+
|
|
132
|
+
if (
|
|
133
|
+
line.startswith("Traceback (most recent call last):")
|
|
134
|
+
and log_dt
|
|
135
|
+
and log_dt >= after_dt
|
|
136
|
+
):
|
|
137
|
+
error_lines.append(prev_line)
|
|
138
|
+
collecting_traceback = True
|
|
139
|
+
|
|
140
|
+
if collecting_traceback:
|
|
141
|
+
error_lines.append(line)
|
|
142
|
+
|
|
143
|
+
prev_line = line
|
|
144
|
+
return "".join(error_lines)
|