hh-applicant-tool 0.7.10__py3-none-any.whl → 1.4.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. hh_applicant_tool/__init__.py +1 -0
  2. hh_applicant_tool/__main__.py +1 -1
  3. hh_applicant_tool/ai/base.py +2 -0
  4. hh_applicant_tool/ai/openai.py +25 -35
  5. hh_applicant_tool/api/__init__.py +4 -2
  6. hh_applicant_tool/api/client.py +65 -68
  7. hh_applicant_tool/{constants.py → api/client_keys.py} +3 -6
  8. hh_applicant_tool/api/datatypes.py +293 -0
  9. hh_applicant_tool/api/errors.py +57 -7
  10. hh_applicant_tool/api/user_agent.py +17 -0
  11. hh_applicant_tool/main.py +234 -113
  12. hh_applicant_tool/operations/apply_similar.py +353 -371
  13. hh_applicant_tool/operations/authorize.py +313 -120
  14. hh_applicant_tool/operations/call_api.py +18 -8
  15. hh_applicant_tool/operations/check_proxy.py +30 -0
  16. hh_applicant_tool/operations/clear_negotiations.py +90 -82
  17. hh_applicant_tool/operations/config.py +119 -16
  18. hh_applicant_tool/operations/install.py +34 -0
  19. hh_applicant_tool/operations/list_resumes.py +23 -11
  20. hh_applicant_tool/operations/log.py +77 -0
  21. hh_applicant_tool/operations/migrate_db.py +65 -0
  22. hh_applicant_tool/operations/query.py +122 -0
  23. hh_applicant_tool/operations/refresh_token.py +14 -13
  24. hh_applicant_tool/operations/reply_employers.py +201 -180
  25. hh_applicant_tool/operations/settings.py +95 -0
  26. hh_applicant_tool/operations/uninstall.py +26 -0
  27. hh_applicant_tool/operations/update_resumes.py +23 -11
  28. hh_applicant_tool/operations/whoami.py +40 -7
  29. hh_applicant_tool/storage/__init__.py +8 -0
  30. hh_applicant_tool/storage/facade.py +24 -0
  31. hh_applicant_tool/storage/models/__init__.py +0 -0
  32. hh_applicant_tool/storage/models/base.py +169 -0
  33. hh_applicant_tool/storage/models/contacts.py +28 -0
  34. hh_applicant_tool/storage/models/employer.py +12 -0
  35. hh_applicant_tool/storage/models/negotiation.py +16 -0
  36. hh_applicant_tool/storage/models/resume.py +19 -0
  37. hh_applicant_tool/storage/models/setting.py +6 -0
  38. hh_applicant_tool/storage/models/vacancy.py +36 -0
  39. hh_applicant_tool/storage/queries/migrations/.gitkeep +0 -0
  40. hh_applicant_tool/storage/queries/schema.sql +132 -0
  41. hh_applicant_tool/storage/repositories/__init__.py +0 -0
  42. hh_applicant_tool/storage/repositories/base.py +230 -0
  43. hh_applicant_tool/storage/repositories/contacts.py +14 -0
  44. hh_applicant_tool/storage/repositories/employers.py +14 -0
  45. hh_applicant_tool/storage/repositories/errors.py +19 -0
  46. hh_applicant_tool/storage/repositories/negotiations.py +13 -0
  47. hh_applicant_tool/storage/repositories/resumes.py +9 -0
  48. hh_applicant_tool/storage/repositories/settings.py +35 -0
  49. hh_applicant_tool/storage/repositories/vacancies.py +9 -0
  50. hh_applicant_tool/storage/utils.py +40 -0
  51. hh_applicant_tool/utils/__init__.py +31 -0
  52. hh_applicant_tool/utils/attrdict.py +6 -0
  53. hh_applicant_tool/utils/binpack.py +167 -0
  54. hh_applicant_tool/utils/config.py +55 -0
  55. hh_applicant_tool/utils/date.py +19 -0
  56. hh_applicant_tool/utils/json.py +61 -0
  57. hh_applicant_tool/{jsonc.py → utils/jsonc.py} +12 -6
  58. hh_applicant_tool/utils/log.py +147 -0
  59. hh_applicant_tool/utils/misc.py +12 -0
  60. hh_applicant_tool/utils/mixins.py +221 -0
  61. hh_applicant_tool/utils/string.py +27 -0
  62. hh_applicant_tool/utils/terminal.py +32 -0
  63. hh_applicant_tool-1.4.12.dist-info/METADATA +685 -0
  64. hh_applicant_tool-1.4.12.dist-info/RECORD +68 -0
  65. hh_applicant_tool/ai/blackbox.py +0 -55
  66. hh_applicant_tool/color_log.py +0 -47
  67. hh_applicant_tool/mixins.py +0 -13
  68. hh_applicant_tool/operations/delete_telemetry.py +0 -30
  69. hh_applicant_tool/operations/get_employer_contacts.py +0 -348
  70. hh_applicant_tool/telemetry_client.py +0 -106
  71. hh_applicant_tool/types.py +0 -45
  72. hh_applicant_tool/utils.py +0 -119
  73. hh_applicant_tool-0.7.10.dist-info/METADATA +0 -452
  74. hh_applicant_tool-0.7.10.dist-info/RECORD +0 -33
  75. {hh_applicant_tool-0.7.10.dist-info → hh_applicant_tool-1.4.12.dist-info}/WHEEL +0 -0
  76. {hh_applicant_tool-0.7.10.dist-info → hh_applicant_tool-1.4.12.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,230 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import sqlite3
5
+ from collections.abc import Sequence
6
+ from dataclasses import dataclass
7
+ from typing import Any, ClassVar, Iterator, Mapping, Self, Type
8
+
9
+ from ..models.base import BaseModel
10
+ from .errors import wrap_db_errors
11
+
12
+ DEFAULT_PRIMARY_KEY = "id"
13
+
14
+ logger = logging.getLogger(__package__)
15
+
16
+
17
+ @dataclass
18
+ class BaseRepository:
19
+ model: ClassVar[Type[BaseModel] | None] = None
20
+ pkey: ClassVar[str] = DEFAULT_PRIMARY_KEY
21
+ conflict_columns: ClassVar[tuple[str, ...] | None] = None
22
+ update_excludes: ClassVar[tuple[str, ...]] = ("created_at", "updated_at")
23
+ __table__: ClassVar[str | None] = None
24
+
25
+ conn: sqlite3.Connection
26
+ auto_commit: bool = True
27
+
28
+ @property
29
+ def table_name(self) -> str:
30
+ return self.__table__ or self.model.__name__
31
+
32
+ @wrap_db_errors
33
+ def commit(self):
34
+ if self.conn.in_transaction:
35
+ self.conn.commit()
36
+
37
+ @wrap_db_errors
38
+ def rollback(self):
39
+ if self.conn.in_transaction:
40
+ self.conn.rollback()
41
+
42
+ def __enter__(self) -> Self:
43
+ return self
44
+
45
+ def __exit__(self, exc_type, exc_val, exc_tb):
46
+ if exc_type is None:
47
+ self.commit()
48
+ else:
49
+ self.rollback()
50
+ return False
51
+
52
+ def maybe_commit(self, commit: bool | None = None) -> bool:
53
+ if commit is not None and commit or self.auto_commit:
54
+ self.commit()
55
+
56
+ def _row_to_model(self, cursor: sqlite3.Cursor, row: tuple) -> BaseModel:
57
+ data = {col[0]: value for col, value in zip(cursor.description, row)} # noqa: B905
58
+ return self.model.from_db(data)
59
+
60
+ @wrap_db_errors
61
+ def find(self, **kwargs: Any) -> Iterator[BaseModel]:
62
+ # logger.debug(kwargs)
63
+ operators = {
64
+ "lt": "<",
65
+ "le": "<=",
66
+ "gt": ">",
67
+ "ge": ">=",
68
+ "ne": "!=",
69
+ "eq": "=",
70
+ "like": "LIKE",
71
+ "is": "IS",
72
+ "is_not": "IS NOT",
73
+ "in": "IN",
74
+ "not_in": "NOT IN",
75
+ }
76
+ conditions = []
77
+ sql_params = {}
78
+ for key, value in kwargs.items():
79
+ try:
80
+ key, op = key.rsplit("__", 1)
81
+ except ValueError:
82
+ op = "eq"
83
+ if op in ("in", "not_in"):
84
+ if not isinstance(value, (list, tuple)):
85
+ value = [value]
86
+ in_placeholders = []
87
+ for i, v in enumerate(value, 1):
88
+ p_name = f"{key}_{i}"
89
+ in_placeholders.append(f":{p_name}")
90
+ sql_params[p_name] = v
91
+ conditions.append(
92
+ f"{key} {operators[op]} ({', '.join(in_placeholders)})"
93
+ )
94
+ else:
95
+ placeholder = f":{key}"
96
+ sql_params[key] = value
97
+ conditions.append(f"{key} {operators[op]} {placeholder}")
98
+ sql = f"SELECT * FROM {self.table_name}"
99
+ if conditions:
100
+ sql += f" WHERE {' AND '.join(conditions)}"
101
+ sql += " ORDER BY rowid DESC;"
102
+ try:
103
+ cur = self.conn.execute(sql, sql_params)
104
+ except sqlite3.Error:
105
+ logger.warning("SQL ERROR: %s", sql)
106
+ raise
107
+
108
+ yield from (self._row_to_model(cur, row) for row in cur.fetchall())
109
+
110
+ @wrap_db_errors
111
+ def get(self, pk: Any) -> BaseModel | None:
112
+ return next(self.find(**{f"{self.pkey}": pk}), None)
113
+
114
+ @wrap_db_errors
115
+ def count_total(self) -> int:
116
+ cur = self.conn.execute(f"SELECT count(*) FROM {self.table_name};")
117
+ return cur.fetchone()[0]
118
+
119
+ @wrap_db_errors
120
+ def delete(self, obj_or_pkey: Any, /, commit: bool | None = None) -> None:
121
+ sql = f"DELETE FROM {self.table_name} WHERE {self.pkey} = ?"
122
+ pk_value = (
123
+ getattr(obj_or_pkey, self.pkey)
124
+ if isinstance(obj_or_pkey, BaseModel)
125
+ else obj_or_pkey
126
+ )
127
+ self.conn.execute(sql, (pk_value,))
128
+ self.maybe_commit(commit=commit)
129
+
130
+ remove = delete
131
+
132
+ @wrap_db_errors
133
+ def clear(self, commit: bool | None = None):
134
+ self.conn.execute(f"DELETE FROM {self.table_name};")
135
+ self.maybe_commit(commit)
136
+
137
+ clean = clear
138
+
139
+ def _insert(
140
+ self,
141
+ data: Mapping[str, Any] | list[Mapping[str, Any]],
142
+ /,
143
+ batch: bool = False,
144
+ upsert: bool = True,
145
+ conflict_columns: Sequence[str] | None = None,
146
+ update_excludes: Sequence[str] | None = None,
147
+ commit: bool | None = None,
148
+ ):
149
+ conflict_columns = conflict_columns or self.conflict_columns
150
+ update_excludes = update_excludes or self.update_excludes
151
+
152
+ if batch and not data:
153
+ return
154
+
155
+ columns = list(dict(data[0] if batch else data).keys())
156
+ sql = (
157
+ f"INSERT INTO {self.table_name} ({', '.join(columns)})"
158
+ f" VALUES (:{', :'.join(columns)})"
159
+ )
160
+
161
+ if upsert:
162
+ cols_set = set(columns)
163
+
164
+ # Определяем поля конфликта: или переданные, или pkey
165
+ if conflict_columns:
166
+ conflict_set = set(conflict_columns) & cols_set
167
+ else:
168
+ conflict_set = {self.pkey} & cols_set
169
+
170
+ if conflict_set:
171
+ sql += f" ON CONFLICT({', '.join(conflict_set)})"
172
+
173
+ # Исключаем из обновления:
174
+ # 1. Поля конфликта (нельзя обновлять по законам SQL)
175
+ # 2. Primary key (никогда не меняем)
176
+ # 3. Технические поля (created_at и т.д.)
177
+ update_set = (
178
+ cols_set
179
+ - conflict_set
180
+ - {self.pkey}
181
+ - set(update_excludes or [])
182
+ )
183
+
184
+ if update_set:
185
+ update_clause = ", ".join(
186
+ f"{c} = excluded.{c}" for c in update_set
187
+ )
188
+ sql += f" DO UPDATE SET {update_clause}"
189
+ else:
190
+ sql += " DO NOTHING"
191
+
192
+ sql += ";"
193
+ # logger.debug("%.2000s", sql)
194
+ try:
195
+ if batch:
196
+ self.conn.executemany(sql, data)
197
+ else:
198
+ self.conn.execute(sql, data)
199
+ except sqlite3.Error:
200
+ logger.warning("SQL ERROR: %s", sql)
201
+
202
+ raise
203
+ self.maybe_commit(commit)
204
+
205
+ @wrap_db_errors
206
+ def save(
207
+ self,
208
+ obj: BaseModel | Mapping[str, Any],
209
+ /,
210
+ **kwargs: Any,
211
+ ) -> None:
212
+ if isinstance(obj, Mapping):
213
+ obj = self.model.from_api(obj)
214
+ data = obj.to_db()
215
+ self._insert(data, **kwargs)
216
+
217
+ @wrap_db_errors
218
+ def save_batch(
219
+ self,
220
+ items: list[BaseModel | Mapping[str, Any]],
221
+ /,
222
+ **kwargs: Any,
223
+ ) -> None:
224
+ if not items:
225
+ return
226
+ data = [
227
+ (self.model.from_api(i) if isinstance(i, Mapping) else i).to_db()
228
+ for i in items
229
+ ]
230
+ self._insert(data, batch=True, **kwargs)
@@ -0,0 +1,14 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+
5
+ from ..models.contacts import VacancyContactsModel
6
+ from .base import BaseRepository
7
+
8
+ logger = logging.getLogger(__package__)
9
+
10
+
11
+ class VacancyContactsRepository(BaseRepository):
12
+ __table__ = "vacancy_contacts"
13
+ model = VacancyContactsModel
14
+ conflict_columns = ("vacancy_id", "email")
@@ -0,0 +1,14 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Iterator
4
+
5
+ from ..models.employer import EmployerModel
6
+ from .base import BaseRepository
7
+
8
+
9
+ class EmployersRepository(BaseRepository):
10
+ __table__ = "employers"
11
+ model = EmployerModel
12
+
13
+ def find(self, **kwargs) -> Iterator[EmployerModel]:
14
+ return super().find(**kwargs)
@@ -0,0 +1,19 @@
1
+ import sqlite3
2
+ from functools import wraps
3
+
4
+
5
+ class RepositoryError(sqlite3.Error):
6
+ pass
7
+
8
+
9
+ def wrap_db_errors(func):
10
+ @wraps(func)
11
+ def wrapper(*args, **kwargs):
12
+ try:
13
+ return func(*args, **kwargs)
14
+ except sqlite3.Error as e:
15
+ raise RepositoryError(
16
+ f"Database error in {func.__name__}: {e}"
17
+ ) from e
18
+
19
+ return wrapper
@@ -0,0 +1,13 @@
1
+ from __future__ import annotations
2
+
3
+ from logging import getLogger
4
+
5
+ from ..models.negotiation import NegotiationModel
6
+ from .base import BaseRepository
7
+
8
+ logger = getLogger(__package__)
9
+
10
+
11
+ class NegotiationRepository(BaseRepository):
12
+ __table__ = "negotiations"
13
+ model = NegotiationModel
@@ -0,0 +1,9 @@
1
+ from __future__ import annotations
2
+
3
+ from ..models.resume import ResumeModel
4
+ from .base import BaseRepository
5
+
6
+
7
+ class ResumesRepository(BaseRepository):
8
+ __table__ = "resumes"
9
+ model = ResumeModel
@@ -0,0 +1,35 @@
1
+ from typing import TypeVar
2
+
3
+ from ..models.setting import SettingModel
4
+ from .base import BaseRepository
5
+
6
+ Default = TypeVar("Default")
7
+
8
+
9
+ class SettingsRepository(BaseRepository):
10
+ __table__ = "settings"
11
+ pkey: str = "key"
12
+ model = SettingModel
13
+
14
+ def get_value(
15
+ self,
16
+ key: str,
17
+ /,
18
+ default: Default = None,
19
+ ) -> str | Default:
20
+ setting = self.get(key)
21
+ return setting.value if setting else default
22
+
23
+ def set_value(
24
+ self,
25
+ key: str,
26
+ value: str,
27
+ /,
28
+ commit: bool | None = None,
29
+ ) -> None:
30
+ self.save(self.model(key=key, value=value), commit=commit)
31
+
32
+ def delete_value(self, key: str, /, commit: bool | None = None) -> None:
33
+ setting = self.get(key)
34
+ if setting:
35
+ self.delete(setting, commit=commit)
@@ -0,0 +1,9 @@
1
+ from __future__ import annotations
2
+
3
+ from ..models.vacancy import VacancyModel
4
+ from .base import BaseRepository
5
+
6
+
7
+ class VacanciesRepository(BaseRepository):
8
+ __table__ = "vacancies"
9
+ model = VacancyModel
@@ -0,0 +1,40 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import sqlite3
5
+ from pathlib import Path
6
+
7
+ QUERIES_PATH: Path = Path(__file__).parent / "queries"
8
+ MIGRATION_PATH: Path = QUERIES_PATH / "migrations"
9
+
10
+
11
+ logger: logging.Logger = logging.getLogger(__package__)
12
+
13
+
14
+ def init_db(conn: sqlite3.Connection) -> None:
15
+ """Создает схему БД"""
16
+ conn.executescript(
17
+ (QUERIES_PATH / "schema.sql").read_text(encoding="utf-8")
18
+ )
19
+ logger.debug("Database initialized")
20
+
21
+
22
+ def list_migrations() -> list[str]:
23
+ """Выводит имена миграций без расширения, отсортированные по дате"""
24
+ if not MIGRATION_PATH.exists():
25
+ return []
26
+ return sorted([f.stem for f in MIGRATION_PATH.glob("*.sql")])
27
+
28
+
29
+ def apply_migration(conn: sqlite3.Connection, name: str) -> None:
30
+ """Находит файл по имени и выполняет его содержимое"""
31
+ conn.executescript(
32
+ (MIGRATION_PATH / f"{name}.sql").read_text(encoding="utf-8")
33
+ )
34
+
35
+
36
+ # def model2table(o: type) -> str:
37
+ # name: str = o.__name__
38
+ # if name.endswith("Model"):
39
+ # name = name[:-5]
40
+ # return re.sub(r"(?<!^)(?=[A-Z])", "_", name).lower()
@@ -0,0 +1,31 @@
1
+ from __future__ import annotations
2
+
3
+ from ..api.user_agent import generate_android_useragent
4
+ from .attrdict import AttrDict
5
+ from .config import Config, get_config_path
6
+ from .date import (
7
+ DATETIME_FORMAT,
8
+ parse_api_datetime,
9
+ try_parse_datetime,
10
+ )
11
+ from .misc import calc_hash, print_err
12
+ from .string import bool2str, list2str, rand_text, shorten
13
+ from .terminal import setup_terminal
14
+
15
+ # Add all public symbols to __all__ for consistent import behavior
16
+ __all__ = [
17
+ "AttrDict",
18
+ "Config",
19
+ "get_config_path",
20
+ "DATETIME_FORMAT",
21
+ "parse_api_datetime",
22
+ "try_parse_datetime",
23
+ "shorten",
24
+ "rand_text",
25
+ "bool2str",
26
+ "list2str",
27
+ "calc_hash",
28
+ "generate_android_useragent",
29
+ "setup_terminal",
30
+ "print_err",
31
+ ]
@@ -0,0 +1,6 @@
1
+ from __future__ import annotations
2
+
3
+ class AttrDict(dict):
4
+ __getattr__ = dict.get
5
+ __setattr__ = dict.__setitem__
6
+ __delattr__ = dict.__delitem__
@@ -0,0 +1,167 @@
1
+ # Формат для передачи данных по сети, который лучше сжимается чем JSON
2
+ # Автогенерированный текст по моей спецификации. Из преимуществ поддержка дат
3
+ # и ключи любого типа в Map
4
+ from __future__ import annotations
5
+
6
+ import gzip
7
+ import io
8
+ import struct
9
+ import zlib
10
+ from datetime import datetime
11
+ from typing import Any, Callable, Final
12
+
13
+ # ---- Constants ----
14
+
15
+ BINARY_TYPES: Final = {
16
+ type(None): 0x00,
17
+ dict: 0x01,
18
+ str: 0x02,
19
+ int: 0x03,
20
+ float: 0x04,
21
+ list: 0x05,
22
+ bool: 0x06,
23
+ datetime: 0x07,
24
+ }
25
+
26
+ # Коды типов (для десериализации)
27
+ T_NULL, T_MAP, T_STR, T_INT, T_FLOAT, T_LIST, T_BOOL, T_DT = range(8)
28
+
29
+ # Сжатие
30
+ COMP_NONE, COMP_ZLIB, COMP_GZIP = range(3)
31
+
32
+ # Схемы упаковки
33
+ U32 = struct.Struct("<I")
34
+ S64 = struct.Struct("<q")
35
+ F64 = struct.Struct("<d")
36
+ U8 = struct.Struct("<B")
37
+
38
+ # ---- Compression Logic (Pure functions) ----
39
+
40
+
41
+ def gzip_compress(data: bytes) -> bytes:
42
+ buf = io.BytesIO()
43
+ with gzip.GzipFile(fileobj=buf, mode="wb") as f:
44
+ f.write(data)
45
+ return buf.getvalue()
46
+
47
+
48
+ def gzip_decompress(data: bytes) -> bytes:
49
+ with gzip.GzipFile(fileobj=io.BytesIO(data), mode="rb") as f:
50
+ return f.read()
51
+
52
+
53
+ COMPRESSORS: dict[int, Callable[[bytes], bytes]] = {
54
+ COMP_ZLIB: zlib.compress,
55
+ COMP_GZIP: gzip_compress,
56
+ COMP_NONE: lambda d: d,
57
+ }
58
+
59
+ DECOMPRESSORS: dict[int, Callable[[bytes], bytes]] = {
60
+ COMP_ZLIB: zlib.decompress,
61
+ COMP_GZIP: gzip_decompress,
62
+ COMP_NONE: lambda d: d,
63
+ }
64
+
65
+
66
+ def get_best_algo() -> int:
67
+ if zlib:
68
+ return COMP_ZLIB
69
+ if gzip:
70
+ return COMP_GZIP
71
+ return COMP_NONE
72
+
73
+
74
+ # ---- Serialization (Recursive Functions) ----
75
+
76
+
77
+ def write_value(value: Any) -> bytes:
78
+ """Рекурсивно преобразует значение в bytes (Pure)"""
79
+ match value:
80
+ case None:
81
+ return bytes([T_NULL])
82
+
83
+ case bool():
84
+ return bytes([T_BOOL]) + U8.pack(1 if value else 0)
85
+
86
+ case datetime():
87
+ return bytes([T_DT]) + F64.pack(value.timestamp())
88
+
89
+ case int():
90
+ return bytes([T_INT]) + S64.pack(value)
91
+
92
+ case float():
93
+ return bytes([T_FLOAT]) + F64.pack(value)
94
+
95
+ case str():
96
+ data = value.encode("utf-8")
97
+ return bytes([T_STR]) + U32.pack(len(data)) + data
98
+
99
+ case list():
100
+ content = b"".join(map(write_value, value))
101
+ return bytes([T_LIST]) + U32.pack(len(value)) + content
102
+
103
+ case dict():
104
+ content = b"".join(
105
+ write_value(k) + write_value(v) for k, v in value.items()
106
+ )
107
+ return bytes([T_MAP]) + U32.pack(len(value)) + content
108
+
109
+ case _:
110
+ raise TypeError(f"Unsupported type: {type(value)}")
111
+
112
+
113
+ # ---- Deserialization (Stream-based but stateless) ----
114
+
115
+
116
+ def read_value(stream: io.BytesIO) -> Any:
117
+ """Читает значение из потока байт"""
118
+ type_byte = stream.read(1)
119
+ if not type_byte:
120
+ return None
121
+
122
+ match type_byte[0]:
123
+ case 0x00: # NULL
124
+ return None
125
+ case 0x06: # BOOL
126
+ return U8.unpack(stream.read(1))[0] == 1
127
+ case 0x07: # DT
128
+ return datetime.fromtimestamp(F64.unpack(stream.read(8))[0])
129
+ case 0x03: # INT
130
+ return S64.unpack(stream.read(8))[0]
131
+ case 0x04: # FLOAT
132
+ return F64.unpack(stream.read(8))[0]
133
+ case 0x02: # STR
134
+ size = U32.unpack(stream.read(4))[0]
135
+ return stream.read(size).decode("utf-8")
136
+ case 0x05: # LIST
137
+ size = U32.unpack(stream.read(4))[0]
138
+ return [read_value(stream) for _ in range(size)]
139
+ case 0x01: # MAP
140
+ size = U32.unpack(stream.read(4))[0]
141
+ return {read_value(stream): read_value(stream) for _ in range(size)}
142
+ case t:
143
+ raise TypeError(f"Unknown type code: {t:#x}")
144
+
145
+
146
+ # ---- Public API (Composition) ----
147
+
148
+
149
+ def serialize(value: Any, compress: bool = True) -> bytes:
150
+ raw_payload = write_value(value)
151
+ algo = get_best_algo() if compress else COMP_NONE
152
+
153
+ compressor = COMPRESSORS.get(algo, COMPRESSORS[COMP_NONE])
154
+ return bytes([algo]) + compressor(raw_payload)
155
+
156
+
157
+ def deserialize(data: bytes) -> Any:
158
+ if not data:
159
+ raise ValueError("Empty payload")
160
+
161
+ algo, payload = data[0], data[1:]
162
+
163
+ if algo not in DECOMPRESSORS:
164
+ raise ValueError(f"Unknown compression type: {algo}")
165
+
166
+ raw_data = DECOMPRESSORS[algo](payload)
167
+ return read_value(io.BytesIO(raw_data))
@@ -0,0 +1,55 @@
1
+ from __future__ import annotations
2
+
3
+ import platform
4
+ from functools import cache
5
+ from os import getenv
6
+ from pathlib import Path
7
+ from threading import Lock
8
+ from typing import Any
9
+
10
+ from . import json
11
+
12
+
13
+ @cache
14
+ def get_config_path() -> Path:
15
+ match platform.system():
16
+ case "Windows":
17
+ return Path(getenv("APPDATA", Path.home() / "AppData" / "Roaming"))
18
+ case "Darwin":
19
+ return Path.home() / "Library" / "Application Support"
20
+ case _:
21
+ return Path(getenv("XDG_CONFIG_HOME", Path.home() / ".config"))
22
+
23
+
24
+ class Config(dict):
25
+ def __init__(self, config_path: str | Path | None = None):
26
+ self._config_path = Path(config_path or get_config_path())
27
+ self._lock = Lock()
28
+ self.load()
29
+
30
+ def load(self) -> None:
31
+ if self._config_path.exists():
32
+ with self._lock:
33
+ with self._config_path.open(
34
+ "r", encoding="utf-8", errors="replace"
35
+ ) as f:
36
+ self.update(json.load(f))
37
+
38
+ def save(self, *args: Any, **kwargs: Any) -> None:
39
+ self.update(*args, **kwargs)
40
+ self._config_path.parent.mkdir(exist_ok=True, parents=True)
41
+ with self._lock:
42
+ with self._config_path.open(
43
+ "w+", encoding="utf-8", errors="replace"
44
+ ) as fp:
45
+ json.dump(
46
+ self,
47
+ fp,
48
+ indent=2,
49
+ sort_keys=True,
50
+ )
51
+
52
+ __getitem__ = dict.get
53
+
54
+ def __repr__(self) -> str:
55
+ return str(self._config_path)
@@ -0,0 +1,19 @@
1
+ from __future__ import annotations
2
+
3
+ from datetime import datetime
4
+ from typing import Any
5
+
6
+ DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z"
7
+
8
+
9
+ def parse_api_datetime(dt: str) -> datetime:
10
+ return datetime.strptime(dt, DATETIME_FORMAT)
11
+
12
+
13
+ def try_parse_datetime(dt: Any) -> datetime | Any:
14
+ for parse in (datetime.fromisoformat, parse_api_datetime):
15
+ try:
16
+ return parse(dt)
17
+ except (ValueError, TypeError):
18
+ pass
19
+ return dt