hh-applicant-tool 0.7.10__py3-none-any.whl → 1.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hh_applicant_tool/__init__.py +1 -0
- hh_applicant_tool/__main__.py +1 -1
- hh_applicant_tool/ai/base.py +2 -0
- hh_applicant_tool/ai/openai.py +23 -33
- hh_applicant_tool/api/client.py +50 -64
- hh_applicant_tool/api/errors.py +51 -7
- hh_applicant_tool/constants.py +0 -3
- hh_applicant_tool/datatypes.py +291 -0
- hh_applicant_tool/main.py +233 -111
- hh_applicant_tool/operations/apply_similar.py +266 -362
- hh_applicant_tool/operations/authorize.py +256 -120
- hh_applicant_tool/operations/call_api.py +18 -8
- hh_applicant_tool/operations/check_negotiations.py +102 -0
- hh_applicant_tool/operations/check_proxy.py +30 -0
- hh_applicant_tool/operations/config.py +119 -16
- hh_applicant_tool/operations/install.py +34 -0
- hh_applicant_tool/operations/list_resumes.py +24 -10
- hh_applicant_tool/operations/log.py +77 -0
- hh_applicant_tool/operations/migrate_db.py +65 -0
- hh_applicant_tool/operations/query.py +120 -0
- hh_applicant_tool/operations/refresh_token.py +14 -13
- hh_applicant_tool/operations/reply_employers.py +148 -167
- hh_applicant_tool/operations/settings.py +95 -0
- hh_applicant_tool/operations/uninstall.py +26 -0
- hh_applicant_tool/operations/update_resumes.py +21 -10
- hh_applicant_tool/operations/whoami.py +40 -7
- hh_applicant_tool/storage/__init__.py +4 -0
- hh_applicant_tool/storage/facade.py +24 -0
- hh_applicant_tool/storage/models/__init__.py +0 -0
- hh_applicant_tool/storage/models/base.py +169 -0
- hh_applicant_tool/storage/models/contact.py +16 -0
- hh_applicant_tool/storage/models/employer.py +12 -0
- hh_applicant_tool/storage/models/negotiation.py +16 -0
- hh_applicant_tool/storage/models/resume.py +19 -0
- hh_applicant_tool/storage/models/setting.py +6 -0
- hh_applicant_tool/storage/models/vacancy.py +36 -0
- hh_applicant_tool/storage/queries/migrations/.gitkeep +0 -0
- hh_applicant_tool/storage/queries/schema.sql +119 -0
- hh_applicant_tool/storage/repositories/__init__.py +0 -0
- hh_applicant_tool/storage/repositories/base.py +176 -0
- hh_applicant_tool/storage/repositories/contacts.py +19 -0
- hh_applicant_tool/storage/repositories/employers.py +13 -0
- hh_applicant_tool/storage/repositories/negotiations.py +12 -0
- hh_applicant_tool/storage/repositories/resumes.py +14 -0
- hh_applicant_tool/storage/repositories/settings.py +34 -0
- hh_applicant_tool/storage/repositories/vacancies.py +8 -0
- hh_applicant_tool/storage/utils.py +49 -0
- hh_applicant_tool/utils/__init__.py +31 -0
- hh_applicant_tool/utils/attrdict.py +6 -0
- hh_applicant_tool/utils/binpack.py +167 -0
- hh_applicant_tool/utils/config.py +55 -0
- hh_applicant_tool/utils/dateutil.py +19 -0
- hh_applicant_tool/{jsonc.py → utils/jsonc.py} +12 -6
- hh_applicant_tool/utils/jsonutil.py +61 -0
- hh_applicant_tool/utils/log.py +144 -0
- hh_applicant_tool/utils/misc.py +12 -0
- hh_applicant_tool/utils/mixins.py +220 -0
- hh_applicant_tool/utils/string.py +27 -0
- hh_applicant_tool/utils/terminal.py +19 -0
- hh_applicant_tool/utils/user_agent.py +17 -0
- hh_applicant_tool-1.4.7.dist-info/METADATA +628 -0
- hh_applicant_tool-1.4.7.dist-info/RECORD +67 -0
- hh_applicant_tool/ai/blackbox.py +0 -55
- hh_applicant_tool/color_log.py +0 -47
- hh_applicant_tool/mixins.py +0 -13
- hh_applicant_tool/operations/clear_negotiations.py +0 -109
- hh_applicant_tool/operations/delete_telemetry.py +0 -30
- hh_applicant_tool/operations/get_employer_contacts.py +0 -348
- hh_applicant_tool/telemetry_client.py +0 -106
- hh_applicant_tool/types.py +0 -45
- hh_applicant_tool/utils.py +0 -119
- hh_applicant_tool-0.7.10.dist-info/METADATA +0 -452
- hh_applicant_tool-0.7.10.dist-info/RECORD +0 -33
- {hh_applicant_tool-0.7.10.dist-info → hh_applicant_tool-1.4.7.dist-info}/WHEEL +0 -0
- {hh_applicant_tool-0.7.10.dist-info → hh_applicant_tool-1.4.7.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sqlite3
|
|
4
|
+
|
|
5
|
+
from .repositories.contacts import EmployerContactsRepository
|
|
6
|
+
from .repositories.employers import EmployersRepository
|
|
7
|
+
from .repositories.negotiations import NegotiationRepository
|
|
8
|
+
from .repositories.resumes import ResumesRepository
|
|
9
|
+
from .repositories.settings import SettingsRepository
|
|
10
|
+
from .repositories.vacancies import VacanciesRepository
|
|
11
|
+
from .utils import init_db
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class StorageFacade:
|
|
15
|
+
"""Единая точка доступа к persistence-слою."""
|
|
16
|
+
|
|
17
|
+
def __init__(self, conn: sqlite3.Connection):
|
|
18
|
+
init_db(conn)
|
|
19
|
+
self.employers = EmployersRepository(conn)
|
|
20
|
+
self.vacancies = VacanciesRepository(conn)
|
|
21
|
+
self.employer_contacts = EmployerContactsRepository(conn)
|
|
22
|
+
self.negotiations = NegotiationRepository(conn)
|
|
23
|
+
self.settings = SettingsRepository(conn)
|
|
24
|
+
self.resumes = ResumesRepository(conn)
|
|
File without changes
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
import builtins
|
|
2
|
+
from dataclasses import Field, asdict, dataclass, field, fields
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from logging import getLogger
|
|
5
|
+
from typing import Any, Callable, Mapping, Self, dataclass_transform, get_origin
|
|
6
|
+
|
|
7
|
+
from hh_applicant_tool.utils import jsonutil
|
|
8
|
+
from hh_applicant_tool.utils.dateutil import try_parse_datetime
|
|
9
|
+
|
|
10
|
+
logger = getLogger(__package__)
|
|
11
|
+
|
|
12
|
+
MISSING = object()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def mapped(
|
|
16
|
+
path: str | None = None,
|
|
17
|
+
transform: Callable[[Any], Any] | None = None,
|
|
18
|
+
store_json: bool = False,
|
|
19
|
+
**kwargs: Any,
|
|
20
|
+
):
|
|
21
|
+
metadata = kwargs.get("metadata", {})
|
|
22
|
+
metadata.setdefault("path", path)
|
|
23
|
+
metadata.setdefault("transform", transform)
|
|
24
|
+
metadata.setdefault("store_json", store_json)
|
|
25
|
+
return field(metadata=metadata, **kwargs)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass_transform(field_specifiers=(field, mapped))
|
|
29
|
+
class BaseModel:
|
|
30
|
+
def __init_subclass__(cls, /, **kwargs: Any):
|
|
31
|
+
super().__init_subclass__()
|
|
32
|
+
dataclass(cls, kw_only=True, **kwargs)
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def from_db(cls, data: Mapping[str, Any]) -> Self:
|
|
36
|
+
return cls._from_mapping(data)
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def from_api(cls, data: Mapping[str, Any]) -> Self:
|
|
40
|
+
return cls._from_mapping(data, from_source=True)
|
|
41
|
+
|
|
42
|
+
def to_db(self) -> dict[str, Any]:
|
|
43
|
+
data = self.to_dict()
|
|
44
|
+
for f in fields(self):
|
|
45
|
+
# Если какого-то значения нет в словаре, то не ставим его или
|
|
46
|
+
# ломается установка дефолтных значений.
|
|
47
|
+
value = data.get(f.name, MISSING)
|
|
48
|
+
if value is MISSING:
|
|
49
|
+
continue
|
|
50
|
+
if f.metadata.get("store_json"):
|
|
51
|
+
value = jsonutil.dumps(value)
|
|
52
|
+
# Точно не нужно типы приводить перед сохранением
|
|
53
|
+
# else:
|
|
54
|
+
# value = self._coerce_type(value, f)
|
|
55
|
+
data[f.name] = value
|
|
56
|
+
return data
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def _coerce_type(cls, value: Any, f: Field) -> Any:
|
|
60
|
+
# Лишь создатель знает, что с тобой делать
|
|
61
|
+
if get_origin(f.type):
|
|
62
|
+
return value
|
|
63
|
+
|
|
64
|
+
type_name = f.type if isinstance(f.type, str) else f.type.__name__
|
|
65
|
+
if value is not None and type_name in (
|
|
66
|
+
"bool",
|
|
67
|
+
"str",
|
|
68
|
+
"int",
|
|
69
|
+
"float",
|
|
70
|
+
"datetime",
|
|
71
|
+
):
|
|
72
|
+
if type_name == "datetime":
|
|
73
|
+
return try_parse_datetime(value)
|
|
74
|
+
try:
|
|
75
|
+
t = getattr(builtins, type_name)
|
|
76
|
+
if not isinstance(value, t):
|
|
77
|
+
value = t(value)
|
|
78
|
+
except (TypeError, ValueError):
|
|
79
|
+
pass
|
|
80
|
+
return value
|
|
81
|
+
|
|
82
|
+
@classmethod
|
|
83
|
+
def _from_mapping(
|
|
84
|
+
cls,
|
|
85
|
+
data: Mapping[str, Any],
|
|
86
|
+
/,
|
|
87
|
+
from_source: bool = False,
|
|
88
|
+
) -> Self:
|
|
89
|
+
kwargs = {}
|
|
90
|
+
for f in fields(cls):
|
|
91
|
+
if from_source:
|
|
92
|
+
if path := f.metadata.get("path"):
|
|
93
|
+
found = True
|
|
94
|
+
v = data
|
|
95
|
+
for key in path.split("."):
|
|
96
|
+
if isinstance(v, Mapping):
|
|
97
|
+
v = v.get(key)
|
|
98
|
+
else:
|
|
99
|
+
found = False
|
|
100
|
+
break
|
|
101
|
+
if not found:
|
|
102
|
+
continue
|
|
103
|
+
value = v
|
|
104
|
+
else:
|
|
105
|
+
value = data.get(f.name, MISSING)
|
|
106
|
+
if value is MISSING:
|
|
107
|
+
continue
|
|
108
|
+
|
|
109
|
+
if value is not None and (t := f.metadata.get("transform")):
|
|
110
|
+
if isinstance(t, str):
|
|
111
|
+
t = getattr(cls, t)
|
|
112
|
+
value = t(value)
|
|
113
|
+
|
|
114
|
+
value = cls._coerce_type(value, f)
|
|
115
|
+
else:
|
|
116
|
+
value = data.get(f.name, MISSING)
|
|
117
|
+
if value is MISSING:
|
|
118
|
+
continue
|
|
119
|
+
|
|
120
|
+
if f.metadata.get("store_json"):
|
|
121
|
+
value = jsonutil.loads(value)
|
|
122
|
+
else:
|
|
123
|
+
value = cls._coerce_type(value, f)
|
|
124
|
+
|
|
125
|
+
kwargs[f.name] = value
|
|
126
|
+
return cls(**kwargs)
|
|
127
|
+
|
|
128
|
+
def to_dict(self) -> dict[str, Any]:
|
|
129
|
+
return asdict(self) # pyright: ignore[reportArgumentType]
|
|
130
|
+
|
|
131
|
+
# def to_json(self, **kwargs: Any) -> str:
|
|
132
|
+
# """Serializes the model to a JSON string."""
|
|
133
|
+
# kwargs.setdefault("ensure_ascii", False)
|
|
134
|
+
# return json_utils.dumps(self.to_dict(), **kwargs)
|
|
135
|
+
|
|
136
|
+
# @classmethod
|
|
137
|
+
# def from_json(cls, json_str: str, **kwargs: Any) -> Self:
|
|
138
|
+
# """Deserializes a model from a JSON string."""
|
|
139
|
+
# data = json_utils.loads(json_str, **kwargs)
|
|
140
|
+
# # from_api is probably more appropriate as JSON is a common API format
|
|
141
|
+
# # and it handles nested data sources.
|
|
142
|
+
# return cls.from_api(data)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
if __name__ == "__main__":
|
|
146
|
+
|
|
147
|
+
class CompanyModel(BaseModel):
|
|
148
|
+
id: "int"
|
|
149
|
+
name: str
|
|
150
|
+
city_id: int = mapped(path="location.city.id")
|
|
151
|
+
city: str = mapped(path="location.city.name")
|
|
152
|
+
created_at: datetime
|
|
153
|
+
|
|
154
|
+
c = CompanyModel.from_api(
|
|
155
|
+
{
|
|
156
|
+
"id": "42",
|
|
157
|
+
"name": "ACME",
|
|
158
|
+
"location": {
|
|
159
|
+
"city": {
|
|
160
|
+
"id": "1",
|
|
161
|
+
"name": "Moscow",
|
|
162
|
+
},
|
|
163
|
+
},
|
|
164
|
+
"created_at": "2026-01-09T04:12:00.114858",
|
|
165
|
+
}
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
print(c)
|
|
169
|
+
# assert c == CompanyModel(id=42, name="ACME", city_id=1, city="Moscow")
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from .base import BaseModel, mapped
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
# Из вакансии извлекается
|
|
5
|
+
class EmployerContactModel(BaseModel):
|
|
6
|
+
id: int
|
|
7
|
+
employer_id: int = mapped(path="employer.id")
|
|
8
|
+
email: str = mapped(path="contacts.email")
|
|
9
|
+
name: str = mapped(path="contacts.name", default=None)
|
|
10
|
+
phone_numbers: str = mapped(
|
|
11
|
+
path="contacts.phones",
|
|
12
|
+
transform=lambda phones: ", ".join(
|
|
13
|
+
p["formatted"] for p in phones if p.get("number")
|
|
14
|
+
),
|
|
15
|
+
default=None,
|
|
16
|
+
)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from .base import BaseModel, mapped
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class EmployerModel(BaseModel):
|
|
5
|
+
id: int
|
|
6
|
+
name: str
|
|
7
|
+
type: str | None = None
|
|
8
|
+
description: str | None = None
|
|
9
|
+
site_url: str | None = None
|
|
10
|
+
alternate_url: str | None = None
|
|
11
|
+
area_id: int = mapped(path="area.id", default=None)
|
|
12
|
+
area_name: str = mapped(path="area.name", default=None)
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from .base import BaseModel, mapped
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class NegotiationModel(BaseModel):
|
|
9
|
+
id: int
|
|
10
|
+
chat_id: int
|
|
11
|
+
state: str = mapped(path="state.id")
|
|
12
|
+
vacancy_id: int = mapped(path="vacancy.id")
|
|
13
|
+
employer_id: int = mapped(path="vacancy.employer.id", default=None)
|
|
14
|
+
resume_id: str = mapped(path="resume.id")
|
|
15
|
+
created_at: datetime | None = None
|
|
16
|
+
updated_at: datetime | None = None
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from .base import BaseModel, mapped
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ResumeModel(BaseModel):
|
|
9
|
+
id: str
|
|
10
|
+
title: str
|
|
11
|
+
url: str
|
|
12
|
+
alternate_url: str
|
|
13
|
+
status_id: str = mapped(path="status.id")
|
|
14
|
+
status_name: str = mapped(path="status.name")
|
|
15
|
+
can_publish_or_update: bool = False
|
|
16
|
+
total_views: int = mapped(path="counters.total_views", default=0)
|
|
17
|
+
new_views: int = mapped(path="counters.new_views", default=0)
|
|
18
|
+
created_at: datetime | None = None
|
|
19
|
+
updated_at: datetime | None = None
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from .base import BaseModel, mapped
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class VacancyModel(BaseModel):
|
|
9
|
+
id: int
|
|
10
|
+
name: str
|
|
11
|
+
alternate_url: str
|
|
12
|
+
area_id: int = mapped(path="area.id")
|
|
13
|
+
area_name: str = mapped(path="area.name")
|
|
14
|
+
salary_from: int = mapped(path="salary.from", default=None)
|
|
15
|
+
salary_to: int = mapped(path="salary.to", default=None)
|
|
16
|
+
currency: str = mapped(path="salary.currency", default="RUR")
|
|
17
|
+
gross: bool = mapped(path="salary.gross", default=False)
|
|
18
|
+
|
|
19
|
+
remote: bool = mapped(
|
|
20
|
+
path="schedule.id",
|
|
21
|
+
transform=lambda v: v == "remote",
|
|
22
|
+
default=False,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
experience: str = mapped(path="experience.id", default=None)
|
|
26
|
+
professional_roles: list[dict] = mapped(
|
|
27
|
+
store_json=True, default_factory=list
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
created_at: datetime | None = None
|
|
31
|
+
published_at: datetime | None = None
|
|
32
|
+
updated_at: datetime | None = None
|
|
33
|
+
|
|
34
|
+
def __post_init__(self):
|
|
35
|
+
self.salary_from = self.salary_from or self.salary_to or 0
|
|
36
|
+
self.salary_to = self.salary_to or self.salary_from or 0
|
|
File without changes
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
PRAGMA foreign_keys = OFF;
|
|
2
|
+
-- На всякий случай выключаем проверки
|
|
3
|
+
BEGIN;
|
|
4
|
+
/* ===================== employers ===================== */
|
|
5
|
+
CREATE TABLE IF NOT EXISTS employers (
|
|
6
|
+
id INTEGER PRIMARY KEY,
|
|
7
|
+
name TEXT NOT NULL,
|
|
8
|
+
type TEXT,
|
|
9
|
+
description TEXT,
|
|
10
|
+
site_url TEXT,
|
|
11
|
+
area_id INTEGER,
|
|
12
|
+
area_name TEXT,
|
|
13
|
+
alternate_url TEXT,
|
|
14
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
15
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
16
|
+
);
|
|
17
|
+
/* ===================== employer_contacts ===================== */
|
|
18
|
+
CREATE TABLE IF NOT EXISTS employer_contacts (
|
|
19
|
+
id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(16)))),
|
|
20
|
+
employer_id INTEGER NOT NULL,
|
|
21
|
+
-- Просто поле, без REFERENCES
|
|
22
|
+
name TEXT,
|
|
23
|
+
email TEXT,
|
|
24
|
+
phone_numbers TEXT NOT NULL,
|
|
25
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
26
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
27
|
+
UNIQUE (employer_id, email)
|
|
28
|
+
);
|
|
29
|
+
/* ===================== vacancies ===================== */
|
|
30
|
+
CREATE TABLE IF NOT EXISTS vacancies (
|
|
31
|
+
id INTEGER PRIMARY KEY,
|
|
32
|
+
name TEXT NOT NULL,
|
|
33
|
+
area_id INTEGER,
|
|
34
|
+
area_name TEXT,
|
|
35
|
+
salary_from INTEGER,
|
|
36
|
+
salary_to INTEGER,
|
|
37
|
+
currency VARCHAR(3),
|
|
38
|
+
gross BOOLEAN,
|
|
39
|
+
published_at DATETIME,
|
|
40
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
41
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
42
|
+
remote BOOLEAN,
|
|
43
|
+
experience TEXT,
|
|
44
|
+
professional_roles TEXT,
|
|
45
|
+
alternate_url TEXT
|
|
46
|
+
);
|
|
47
|
+
/* ===================== negotiations ===================== */
|
|
48
|
+
CREATE TABLE IF NOT EXISTS negotiations (
|
|
49
|
+
id INTEGER PRIMARY KEY,
|
|
50
|
+
state TEXT NOT NULL,
|
|
51
|
+
vacancy_id INTEGER NOT NULL,
|
|
52
|
+
employer_id INTEGER NOT NULL,
|
|
53
|
+
chat_id INTEGER NOT NULL,
|
|
54
|
+
resume_id TEXT,
|
|
55
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
56
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
57
|
+
);
|
|
58
|
+
/* ===================== settings ===================== */
|
|
59
|
+
CREATE TABLE IF NOT EXISTS settings (
|
|
60
|
+
key TEXT PRIMARY KEY,
|
|
61
|
+
value TEXT NOT NULL
|
|
62
|
+
);
|
|
63
|
+
/* ===================== resumes ===================== */
|
|
64
|
+
CREATE TABLE IF NOT EXISTS resumes (
|
|
65
|
+
id TEXT PRIMARY KEY,
|
|
66
|
+
title TEXT NOT NULL,
|
|
67
|
+
url TEXT,
|
|
68
|
+
alternate_url TEXT,
|
|
69
|
+
status_id TEXT,
|
|
70
|
+
status_name TEXT,
|
|
71
|
+
can_publish_or_update BOOLEAN,
|
|
72
|
+
total_views INTEGER DEFAULT 0,
|
|
73
|
+
new_views INTEGER DEFAULT 0,
|
|
74
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
75
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
76
|
+
);
|
|
77
|
+
/* ===================== ИНДЕКСЫ ДЛЯ СТАТИСТИКИ ===================== */
|
|
78
|
+
-- Чтобы выборка для отправки на сервер по updated_at не тормозила
|
|
79
|
+
CREATE INDEX IF NOT EXISTS idx_vac_upd ON vacancies(updated_at);
|
|
80
|
+
CREATE INDEX IF NOT EXISTS idx_emp_upd ON employers(updated_at);
|
|
81
|
+
CREATE INDEX IF NOT EXISTS idx_neg_upd ON negotiations(updated_at);
|
|
82
|
+
/* ===================== ТРИГГЕРЫ (Всегда обновляют дату) ===================== */
|
|
83
|
+
-- Убрал условие WHEN. Теперь при любом UPDATE дата актуализируется принудительно.
|
|
84
|
+
CREATE TRIGGER IF NOT EXISTS trg_resumes_updated
|
|
85
|
+
AFTER
|
|
86
|
+
UPDATE ON resumes BEGIN
|
|
87
|
+
UPDATE resumes
|
|
88
|
+
SET updated_at = CURRENT_TIMESTAMP
|
|
89
|
+
WHERE id = OLD.id;
|
|
90
|
+
END;
|
|
91
|
+
CREATE TRIGGER IF NOT EXISTS trg_employers_updated
|
|
92
|
+
AFTER
|
|
93
|
+
UPDATE ON employers BEGIN
|
|
94
|
+
UPDATE employers
|
|
95
|
+
SET updated_at = CURRENT_TIMESTAMP
|
|
96
|
+
WHERE id = OLD.id;
|
|
97
|
+
END;
|
|
98
|
+
CREATE TRIGGER IF NOT EXISTS trg_employer_contacts_updated
|
|
99
|
+
AFTER
|
|
100
|
+
UPDATE ON employer_contacts BEGIN
|
|
101
|
+
UPDATE employer_contacts
|
|
102
|
+
SET updated_at = CURRENT_TIMESTAMP
|
|
103
|
+
WHERE id = OLD.id;
|
|
104
|
+
END;
|
|
105
|
+
CREATE TRIGGER IF NOT EXISTS trg_vacancies_updated
|
|
106
|
+
AFTER
|
|
107
|
+
UPDATE ON vacancies BEGIN
|
|
108
|
+
UPDATE vacancies
|
|
109
|
+
SET updated_at = CURRENT_TIMESTAMP
|
|
110
|
+
WHERE id = OLD.id;
|
|
111
|
+
END;
|
|
112
|
+
CREATE TRIGGER IF NOT EXISTS trg_negotiations_updated
|
|
113
|
+
AFTER
|
|
114
|
+
UPDATE ON negotiations BEGIN
|
|
115
|
+
UPDATE negotiations
|
|
116
|
+
SET updated_at = CURRENT_TIMESTAMP
|
|
117
|
+
WHERE id = OLD.id;
|
|
118
|
+
END;
|
|
119
|
+
COMMIT;
|
|
File without changes
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import sqlite3
|
|
5
|
+
from collections.abc import Sequence
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from functools import cached_property
|
|
8
|
+
from typing import Any, ClassVar, Iterator, Mapping, Self, Type
|
|
9
|
+
|
|
10
|
+
from ..models.base import BaseModel
|
|
11
|
+
from ..utils import model2table
|
|
12
|
+
|
|
13
|
+
DEFAULT_PRIMARY_KEY = "id"
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__package__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class BaseRepository:
|
|
20
|
+
model: ClassVar[Type[BaseModel] | None] = None
|
|
21
|
+
pkey: ClassVar[str] = DEFAULT_PRIMARY_KEY
|
|
22
|
+
|
|
23
|
+
conn: sqlite3.Connection
|
|
24
|
+
auto_commit: bool = True
|
|
25
|
+
|
|
26
|
+
@cached_property
|
|
27
|
+
def table_name(self) -> str:
|
|
28
|
+
return model2table(self.model)
|
|
29
|
+
|
|
30
|
+
def commit(self):
|
|
31
|
+
if self.conn.in_transaction:
|
|
32
|
+
self.conn.commit()
|
|
33
|
+
|
|
34
|
+
def rollback(self):
|
|
35
|
+
if self.conn.in_transaction:
|
|
36
|
+
self.conn.rollback()
|
|
37
|
+
|
|
38
|
+
def __enter__(self) -> Self:
|
|
39
|
+
return self
|
|
40
|
+
|
|
41
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
42
|
+
if exc_type is None:
|
|
43
|
+
self.commit()
|
|
44
|
+
else:
|
|
45
|
+
self.rollback()
|
|
46
|
+
return False
|
|
47
|
+
|
|
48
|
+
def maybe_commit(self, commit: bool | None = None) -> bool:
|
|
49
|
+
if commit is not None and commit or self.auto_commit:
|
|
50
|
+
self.commit()
|
|
51
|
+
|
|
52
|
+
def _row_to_model(self, cursor: sqlite3.Cursor, row: tuple) -> BaseModel:
|
|
53
|
+
data = {col[0]: value for col, value in zip(cursor.description, row)} # noqa: B905
|
|
54
|
+
return self.model.from_db(data)
|
|
55
|
+
|
|
56
|
+
def find(self, **kwargs: Any) -> Iterator[BaseModel]:
|
|
57
|
+
# logger.debug(kwargs)
|
|
58
|
+
operators = {
|
|
59
|
+
"lt": "<",
|
|
60
|
+
"le": "<=",
|
|
61
|
+
"gt": ">",
|
|
62
|
+
"ge": ">=",
|
|
63
|
+
"ne": "!=",
|
|
64
|
+
"eq": "=",
|
|
65
|
+
"like": "LIKE",
|
|
66
|
+
"is": "IS",
|
|
67
|
+
"is_not": "IS NOT",
|
|
68
|
+
"in": "IN",
|
|
69
|
+
"not_in": "NOT IN",
|
|
70
|
+
}
|
|
71
|
+
conditions = []
|
|
72
|
+
sql_params = {}
|
|
73
|
+
for key, value in kwargs.items():
|
|
74
|
+
try:
|
|
75
|
+
key, op = key.rsplit("__", 1)
|
|
76
|
+
except ValueError:
|
|
77
|
+
op = "eq"
|
|
78
|
+
if op in ("in", "not_in"):
|
|
79
|
+
if not isinstance(value, (list, tuple)):
|
|
80
|
+
value = [value]
|
|
81
|
+
in_placeholders = []
|
|
82
|
+
for i, v in enumerate(value, 1):
|
|
83
|
+
p_name = f"{key}_{i}"
|
|
84
|
+
in_placeholders.append(f":{p_name}")
|
|
85
|
+
sql_params[p_name] = v
|
|
86
|
+
conditions.append(
|
|
87
|
+
f"{key} {operators[op]} ({', '.join(in_placeholders)})"
|
|
88
|
+
)
|
|
89
|
+
else:
|
|
90
|
+
placeholder = f":{key}"
|
|
91
|
+
sql_params[key] = value
|
|
92
|
+
conditions.append(f"{key} {operators[op]} {placeholder}")
|
|
93
|
+
sql = f"SELECT * FROM {self.table_name}"
|
|
94
|
+
if conditions:
|
|
95
|
+
sql += f" WHERE {' AND '.join(conditions)}"
|
|
96
|
+
sql += " ORDER BY rowid DESC;"
|
|
97
|
+
logger.debug("%.2000s", sql)
|
|
98
|
+
cur = self.conn.execute(sql, sql_params)
|
|
99
|
+
yield from (self._row_to_model(cur, row) for row in cur.fetchall())
|
|
100
|
+
|
|
101
|
+
def get(self, pk: Any) -> BaseModel | None:
|
|
102
|
+
return next(self.find(**{f"{self.pkey}": pk}), None)
|
|
103
|
+
|
|
104
|
+
def count_total(self) -> int:
|
|
105
|
+
cur = self.conn.execute(f"SELECT count(*) FROM {self.table_name};")
|
|
106
|
+
return cur.fetchone()[0]
|
|
107
|
+
|
|
108
|
+
def delete(self, o: BaseModel, /, commit: bool | None = None) -> None:
|
|
109
|
+
sql = f"DELETE FROM {self.table_name} WHERE {self.pkey} = ?"
|
|
110
|
+
pk_value = getattr(o, self.pkey)
|
|
111
|
+
self.conn.execute(sql, (pk_value,))
|
|
112
|
+
self.maybe_commit(commit=commit)
|
|
113
|
+
|
|
114
|
+
remove = delete
|
|
115
|
+
|
|
116
|
+
def clear(self, commit: bool | None = None):
|
|
117
|
+
self.conn.execute(f"DELETE FROM {self.table_name};")
|
|
118
|
+
self.maybe_commit(commit)
|
|
119
|
+
|
|
120
|
+
clean = clear
|
|
121
|
+
|
|
122
|
+
def _insert(
|
|
123
|
+
self,
|
|
124
|
+
data: Mapping[str, Any],
|
|
125
|
+
/,
|
|
126
|
+
upsert: bool = True,
|
|
127
|
+
conflict_columns: Sequence[str] | None = None,
|
|
128
|
+
update_excludes: Sequence[str] = ("created_at", "updated_at"),
|
|
129
|
+
commit: bool | None = None,
|
|
130
|
+
):
|
|
131
|
+
columns = list(data.keys())
|
|
132
|
+
sql = (
|
|
133
|
+
f"INSERT INTO {self.table_name} ({', '.join(columns)})"
|
|
134
|
+
f" VALUES (:{', :'.join(columns)})"
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
if upsert:
|
|
138
|
+
cols_set = set(columns)
|
|
139
|
+
|
|
140
|
+
# Определяем поля конфликта: или переданные, или pkey
|
|
141
|
+
if conflict_columns:
|
|
142
|
+
conflict_set = set(conflict_columns) & cols_set
|
|
143
|
+
else:
|
|
144
|
+
conflict_set = {self.pkey} & cols_set
|
|
145
|
+
|
|
146
|
+
if conflict_set:
|
|
147
|
+
sql += f" ON CONFLICT({', '.join(conflict_set)})"
|
|
148
|
+
|
|
149
|
+
# Исключаем из обновления:
|
|
150
|
+
# 1. Поля конфликта (нельзя обновлять по законам SQL)
|
|
151
|
+
# 2. Primary key (никогда не меняем)
|
|
152
|
+
# 3. Технические поля (created_at и т.д.)
|
|
153
|
+
update_set = (
|
|
154
|
+
cols_set - conflict_set - {self.pkey} - set(update_excludes)
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
if update_set:
|
|
158
|
+
update_clause = ", ".join(
|
|
159
|
+
f"{c} = excluded.{c}" for c in update_set
|
|
160
|
+
)
|
|
161
|
+
sql += f" DO UPDATE SET {update_clause}"
|
|
162
|
+
else:
|
|
163
|
+
sql += " DO NOTHING"
|
|
164
|
+
|
|
165
|
+
sql += ";"
|
|
166
|
+
logger.debug("%.2000s", sql)
|
|
167
|
+
self.conn.execute(sql, data)
|
|
168
|
+
self.maybe_commit(commit)
|
|
169
|
+
|
|
170
|
+
def save(
|
|
171
|
+
self, obj: BaseModel | Mapping[str, Any], /, **kwargs: Any
|
|
172
|
+
) -> None:
|
|
173
|
+
if isinstance(obj, Mapping):
|
|
174
|
+
obj = self.model.from_api(obj)
|
|
175
|
+
data = obj.to_db()
|
|
176
|
+
self._insert(data, **kwargs)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
|
|
5
|
+
from ..models.contact import EmployerContactModel
|
|
6
|
+
from .base import BaseRepository
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__package__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EmployerContactsRepository(BaseRepository):
|
|
12
|
+
model = EmployerContactModel
|
|
13
|
+
|
|
14
|
+
def save(self, contact: EmployerContactModel) -> None:
|
|
15
|
+
# logger.debug(contact)
|
|
16
|
+
super().save(
|
|
17
|
+
contact,
|
|
18
|
+
conflict_columns=["employer_id", "email"],
|
|
19
|
+
)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Iterator
|
|
4
|
+
|
|
5
|
+
from ..models.employer import EmployerModel
|
|
6
|
+
from .base import BaseRepository
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class EmployersRepository(BaseRepository):
|
|
10
|
+
model = EmployerModel
|
|
11
|
+
|
|
12
|
+
def find(self, **kwargs) -> Iterator[EmployerModel]:
|
|
13
|
+
return super().find(**kwargs)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from logging import getLogger
|
|
4
|
+
|
|
5
|
+
from ..models.negotiation import NegotiationModel
|
|
6
|
+
from .base import BaseRepository
|
|
7
|
+
|
|
8
|
+
logger = getLogger(__package__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class NegotiationRepository(BaseRepository):
|
|
12
|
+
model = NegotiationModel
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sqlite3
|
|
4
|
+
|
|
5
|
+
from ..models.resume import ResumeModel
|
|
6
|
+
from .base import BaseRepository
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ResumesRepository(BaseRepository):
|
|
10
|
+
"""Репозиторий для хранения резюме."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, conn: sqlite3.Connection):
|
|
13
|
+
super().__init__(conn)
|
|
14
|
+
self.model = ResumeModel
|