arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- arthexis-0.1.26.dist-info/METADATA +272 -0
- arthexis-0.1.26.dist-info/RECORD +111 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
- config/__init__.py +5 -5
- config/active_app.py +15 -15
- config/asgi.py +29 -29
- config/auth_app.py +7 -7
- config/celery.py +32 -25
- config/context_processors.py +67 -68
- config/horologia_app.py +7 -7
- config/loadenv.py +11 -11
- config/logging.py +59 -48
- config/middleware.py +71 -25
- config/offline.py +49 -49
- config/settings.py +676 -492
- config/settings_helpers.py +109 -0
- config/urls.py +228 -159
- config/wsgi.py +17 -17
- core/admin.py +4052 -2066
- core/admin_history.py +50 -50
- core/admindocs.py +192 -151
- core/apps.py +350 -223
- core/auto_upgrade.py +72 -0
- core/backends.py +311 -124
- core/changelog.py +403 -0
- core/entity.py +149 -133
- core/environment.py +60 -43
- core/fields.py +168 -75
- core/form_fields.py +75 -0
- core/github_helper.py +188 -25
- core/github_issues.py +183 -172
- core/github_repos.py +72 -0
- core/lcd_screen.py +78 -78
- core/liveupdate.py +25 -25
- core/log_paths.py +114 -100
- core/mailer.py +89 -83
- core/middleware.py +91 -91
- core/models.py +5041 -2195
- core/notifications.py +105 -105
- core/public_wifi.py +267 -227
- core/reference_utils.py +107 -0
- core/release.py +940 -346
- core/rfid_import_export.py +113 -0
- core/sigil_builder.py +149 -131
- core/sigil_context.py +20 -20
- core/sigil_resolver.py +250 -284
- core/system.py +1425 -230
- core/tasks.py +538 -199
- core/temp_passwords.py +181 -0
- core/test_system_info.py +202 -43
- core/tests.py +2673 -1069
- core/tests_liveupdate.py +17 -17
- core/urls.py +11 -11
- core/user_data.py +681 -495
- core/views.py +2484 -789
- core/widgets.py +213 -51
- nodes/admin.py +2236 -445
- nodes/apps.py +98 -70
- nodes/backends.py +160 -53
- nodes/dns.py +203 -0
- nodes/feature_checks.py +133 -0
- nodes/lcd.py +165 -165
- nodes/models.py +2375 -870
- nodes/reports.py +411 -0
- nodes/rfid_sync.py +210 -0
- nodes/signals.py +18 -0
- nodes/tasks.py +141 -46
- nodes/tests.py +5045 -1489
- nodes/urls.py +29 -13
- nodes/utils.py +172 -73
- nodes/views.py +1768 -304
- ocpp/admin.py +1775 -481
- ocpp/apps.py +25 -25
- ocpp/consumers.py +1843 -630
- ocpp/evcs.py +844 -928
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +1417 -640
- ocpp/network.py +398 -0
- ocpp/reference_utils.py +42 -0
- ocpp/routing.py +11 -9
- ocpp/simulator.py +745 -368
- ocpp/status_display.py +26 -0
- ocpp/store.py +603 -403
- ocpp/tasks.py +479 -31
- ocpp/test_export_import.py +131 -130
- ocpp/test_rfid.py +1072 -540
- ocpp/tests.py +5494 -2296
- ocpp/transactions_io.py +197 -165
- ocpp/urls.py +50 -50
- ocpp/views.py +2024 -912
- pages/admin.py +1123 -396
- pages/apps.py +45 -10
- pages/checks.py +40 -40
- pages/context_processors.py +151 -85
- pages/defaults.py +13 -0
- pages/forms.py +221 -0
- pages/middleware.py +213 -153
- pages/models.py +720 -252
- pages/module_defaults.py +156 -0
- pages/site_config.py +137 -0
- pages/tasks.py +74 -0
- pages/tests.py +4009 -1389
- pages/urls.py +38 -20
- pages/utils.py +93 -12
- pages/views.py +1736 -762
- arthexis-0.1.9.dist-info/METADATA +0 -168
- arthexis-0.1.9.dist-info/RECORD +0 -92
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- nodes/actions.py +0 -70
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
core/changelog.py
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
"""Utilities for building and parsing the project changelog."""
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
import re
|
|
7
|
+
import subprocess
|
|
8
|
+
from typing import Iterable, List, Optional
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass(frozen=True)
|
|
12
|
+
class Commit:
|
|
13
|
+
"""A simplified representation of a git commit."""
|
|
14
|
+
|
|
15
|
+
sha: str
|
|
16
|
+
date: str
|
|
17
|
+
subject: str
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class ChangelogSection:
|
|
22
|
+
"""A rendered changelog section."""
|
|
23
|
+
|
|
24
|
+
title: str
|
|
25
|
+
entries: List[str]
|
|
26
|
+
version: Optional[str] = None
|
|
27
|
+
date: Optional[str] = None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
_RE_RELEASE = re.compile(
|
|
31
|
+
r"^(?:pre-release commit|Release)\s+v?(?P<version>[0-9A-Za-z][0-9A-Za-z.\-_]*)",
|
|
32
|
+
re.IGNORECASE,
|
|
33
|
+
)
|
|
34
|
+
_RE_TITLE_VERSION = re.compile(r"^v(?P<version>[0-9A-Za-z][0-9A-Za-z.\-_]*)")
|
|
35
|
+
_RE_TITLE_DATE = re.compile(r"\((?P<date>\d{4}-\d{2}-\d{2})\)")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _read_commits(range_spec: str) -> List[Commit]:
|
|
39
|
+
"""Return commits for *range_spec* ordered newest first."""
|
|
40
|
+
|
|
41
|
+
cmd = [
|
|
42
|
+
"git",
|
|
43
|
+
"log",
|
|
44
|
+
range_spec,
|
|
45
|
+
"--no-merges",
|
|
46
|
+
"--date=short",
|
|
47
|
+
"--pretty=format:%H%x00%ad%x00%s",
|
|
48
|
+
]
|
|
49
|
+
proc = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
|
50
|
+
commits: list[Commit] = []
|
|
51
|
+
for raw in proc.stdout.splitlines():
|
|
52
|
+
parts = raw.split("\x00")
|
|
53
|
+
if len(parts) != 3:
|
|
54
|
+
continue
|
|
55
|
+
sha, date, subject = parts
|
|
56
|
+
commits.append(Commit(sha=sha, date=date, subject=subject))
|
|
57
|
+
return commits
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _extract_release_version(subject: str) -> Optional[str]:
|
|
61
|
+
match = _RE_RELEASE.match(subject)
|
|
62
|
+
if match:
|
|
63
|
+
return match.group("version")
|
|
64
|
+
return None
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _should_include_subject(subject: str) -> bool:
|
|
68
|
+
return len(subject.split()) > 3
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _format_title(version: str, date: Optional[str]) -> str:
|
|
72
|
+
if date:
|
|
73
|
+
return f"v{version} ({date})"
|
|
74
|
+
return f"v{version}"
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _sections_from_commits(commits: Iterable[Commit]) -> List[ChangelogSection]:
|
|
78
|
+
unreleased: list[str] = []
|
|
79
|
+
releases: list[ChangelogSection] = []
|
|
80
|
+
release_map: dict[str, ChangelogSection] = {}
|
|
81
|
+
current_release: ChangelogSection | None = None
|
|
82
|
+
|
|
83
|
+
for commit in commits:
|
|
84
|
+
version = _extract_release_version(commit.subject)
|
|
85
|
+
if version:
|
|
86
|
+
section = release_map.get(version)
|
|
87
|
+
if section is None:
|
|
88
|
+
section = ChangelogSection(
|
|
89
|
+
title=_format_title(version, commit.date),
|
|
90
|
+
entries=[],
|
|
91
|
+
version=version,
|
|
92
|
+
date=commit.date,
|
|
93
|
+
)
|
|
94
|
+
releases.append(section)
|
|
95
|
+
release_map[version] = section
|
|
96
|
+
else:
|
|
97
|
+
if commit.date and not section.date:
|
|
98
|
+
section.date = commit.date
|
|
99
|
+
section.title = _format_title(version, commit.date)
|
|
100
|
+
current_release = section
|
|
101
|
+
continue
|
|
102
|
+
if not _should_include_subject(commit.subject):
|
|
103
|
+
continue
|
|
104
|
+
entry = f"- {commit.sha[:8]} {commit.subject}"
|
|
105
|
+
if current_release is None:
|
|
106
|
+
if entry not in unreleased:
|
|
107
|
+
unreleased.append(entry)
|
|
108
|
+
else:
|
|
109
|
+
if entry not in current_release.entries:
|
|
110
|
+
current_release.entries.append(entry)
|
|
111
|
+
|
|
112
|
+
sections: list[ChangelogSection] = [
|
|
113
|
+
ChangelogSection(title="Unreleased", entries=unreleased, version=None, date=None)
|
|
114
|
+
]
|
|
115
|
+
sections.extend(releases)
|
|
116
|
+
return sections
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _parse_sections(text: str) -> List[ChangelogSection]:
|
|
120
|
+
lines = text.splitlines()
|
|
121
|
+
sections: list[ChangelogSection] = []
|
|
122
|
+
i = 0
|
|
123
|
+
total = len(lines)
|
|
124
|
+
while i < total:
|
|
125
|
+
title = lines[i]
|
|
126
|
+
underline_index = i + 1
|
|
127
|
+
if underline_index >= total:
|
|
128
|
+
break
|
|
129
|
+
underline = lines[underline_index]
|
|
130
|
+
if set(underline) == {"-"} and len(underline) == len(title):
|
|
131
|
+
entries: list[str] = []
|
|
132
|
+
i = underline_index + 1
|
|
133
|
+
# Skip single blank line immediately after the heading if present.
|
|
134
|
+
if i < total and lines[i] == "":
|
|
135
|
+
i += 1
|
|
136
|
+
while i < total and lines[i] != "":
|
|
137
|
+
entries.append(lines[i])
|
|
138
|
+
i += 1
|
|
139
|
+
version = None
|
|
140
|
+
date = None
|
|
141
|
+
match_version = _RE_TITLE_VERSION.match(title)
|
|
142
|
+
if match_version:
|
|
143
|
+
version = match_version.group("version")
|
|
144
|
+
match_date = _RE_TITLE_DATE.search(title)
|
|
145
|
+
if match_date:
|
|
146
|
+
date = match_date.group("date")
|
|
147
|
+
sections.append(
|
|
148
|
+
ChangelogSection(title=title, entries=entries, version=version, date=date)
|
|
149
|
+
)
|
|
150
|
+
while i < total and lines[i] == "":
|
|
151
|
+
i += 1
|
|
152
|
+
continue
|
|
153
|
+
i += 1
|
|
154
|
+
return sections
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _latest_release_version(previous_text: str) -> Optional[str]:
|
|
158
|
+
for section in _parse_sections(previous_text):
|
|
159
|
+
if section.version:
|
|
160
|
+
return section.version
|
|
161
|
+
return None
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _find_release_commit(version: str) -> Optional[str]:
|
|
165
|
+
normalized = version.lstrip("v")
|
|
166
|
+
search_terms = [
|
|
167
|
+
f"Release v{normalized}",
|
|
168
|
+
f"Release {normalized}",
|
|
169
|
+
f"pre-release commit v{normalized}",
|
|
170
|
+
f"pre-release commit {normalized}",
|
|
171
|
+
]
|
|
172
|
+
for term in search_terms:
|
|
173
|
+
proc = subprocess.run(
|
|
174
|
+
[
|
|
175
|
+
"git",
|
|
176
|
+
"log",
|
|
177
|
+
"--max-count=1",
|
|
178
|
+
"--format=%H",
|
|
179
|
+
"--fixed-strings",
|
|
180
|
+
f"--grep={term}",
|
|
181
|
+
],
|
|
182
|
+
capture_output=True,
|
|
183
|
+
text=True,
|
|
184
|
+
check=False,
|
|
185
|
+
)
|
|
186
|
+
sha = proc.stdout.strip()
|
|
187
|
+
if sha:
|
|
188
|
+
return sha.splitlines()[0]
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _resolve_release_commit_from_text(previous_text: str) -> Optional[str]:
|
|
193
|
+
version = _latest_release_version(previous_text)
|
|
194
|
+
if not version:
|
|
195
|
+
return None
|
|
196
|
+
return _find_release_commit(version)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def _merge_sections(
|
|
200
|
+
new_sections: Iterable[ChangelogSection],
|
|
201
|
+
old_sections: Iterable[ChangelogSection],
|
|
202
|
+
*,
|
|
203
|
+
reopen_latest: bool = False,
|
|
204
|
+
) -> List[ChangelogSection]:
|
|
205
|
+
merged = list(new_sections)
|
|
206
|
+
old_sections_list = list(old_sections)
|
|
207
|
+
version_to_section: dict[str, ChangelogSection] = {}
|
|
208
|
+
unreleased_section: ChangelogSection | None = None
|
|
209
|
+
|
|
210
|
+
for section in merged:
|
|
211
|
+
if section.version is None and unreleased_section is None:
|
|
212
|
+
unreleased_section = section
|
|
213
|
+
if section.version:
|
|
214
|
+
version_to_section[section.version] = section
|
|
215
|
+
|
|
216
|
+
first_release_version: str | None = None
|
|
217
|
+
for old in old_sections_list:
|
|
218
|
+
if old.version:
|
|
219
|
+
first_release_version = old.version
|
|
220
|
+
break
|
|
221
|
+
|
|
222
|
+
reopened_latest_version = False
|
|
223
|
+
|
|
224
|
+
for old in old_sections_list:
|
|
225
|
+
if old.version is None:
|
|
226
|
+
if unreleased_section is None:
|
|
227
|
+
unreleased_section = ChangelogSection(
|
|
228
|
+
title=old.title,
|
|
229
|
+
entries=list(old.entries),
|
|
230
|
+
version=None,
|
|
231
|
+
date=None,
|
|
232
|
+
)
|
|
233
|
+
merged.insert(0, unreleased_section)
|
|
234
|
+
else:
|
|
235
|
+
# Preserve the freshly generated ``Unreleased`` entries instead of
|
|
236
|
+
# merging in stale content from the previous changelog text.
|
|
237
|
+
# The older implementation discarded the previous ``Unreleased``
|
|
238
|
+
# notes entirely, so keep that behaviour to avoid resurrecting
|
|
239
|
+
# entries that were already promoted to a tagged release.
|
|
240
|
+
continue
|
|
241
|
+
continue
|
|
242
|
+
|
|
243
|
+
existing = version_to_section.get(old.version)
|
|
244
|
+
if existing is None:
|
|
245
|
+
if (
|
|
246
|
+
reopen_latest
|
|
247
|
+
and first_release_version
|
|
248
|
+
and old.version == first_release_version
|
|
249
|
+
and not reopened_latest_version
|
|
250
|
+
and unreleased_section is not None
|
|
251
|
+
):
|
|
252
|
+
for entry in old.entries:
|
|
253
|
+
if entry not in unreleased_section.entries:
|
|
254
|
+
unreleased_section.entries.append(entry)
|
|
255
|
+
reopened_latest_version = True
|
|
256
|
+
continue
|
|
257
|
+
copied = ChangelogSection(
|
|
258
|
+
title=old.title,
|
|
259
|
+
entries=list(old.entries),
|
|
260
|
+
version=old.version,
|
|
261
|
+
date=old.date,
|
|
262
|
+
)
|
|
263
|
+
merged.append(copied)
|
|
264
|
+
version_to_section[old.version] = copied
|
|
265
|
+
continue
|
|
266
|
+
|
|
267
|
+
if old.date and not existing.date:
|
|
268
|
+
existing.date = old.date
|
|
269
|
+
existing.title = _format_title(old.version, old.date)
|
|
270
|
+
for entry in old.entries:
|
|
271
|
+
if entry not in existing.entries:
|
|
272
|
+
existing.entries.append(entry)
|
|
273
|
+
|
|
274
|
+
return merged
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def _resolve_start_tag(explicit: str | None = None) -> Optional[str]:
|
|
278
|
+
"""Return the most recent tag that should seed the changelog range."""
|
|
279
|
+
|
|
280
|
+
if explicit:
|
|
281
|
+
return explicit
|
|
282
|
+
|
|
283
|
+
exact = subprocess.run(
|
|
284
|
+
["git", "describe", "--tags", "--exact-match", "HEAD"],
|
|
285
|
+
capture_output=True,
|
|
286
|
+
text=True,
|
|
287
|
+
check=False,
|
|
288
|
+
)
|
|
289
|
+
if exact.returncode == 0:
|
|
290
|
+
has_parent = subprocess.run(
|
|
291
|
+
["git", "rev-parse", "--verify", "HEAD^"],
|
|
292
|
+
capture_output=True,
|
|
293
|
+
text=True,
|
|
294
|
+
check=False,
|
|
295
|
+
)
|
|
296
|
+
if has_parent.returncode == 0:
|
|
297
|
+
previous = subprocess.run(
|
|
298
|
+
["git", "describe", "--tags", "--abbrev=0", "HEAD^"],
|
|
299
|
+
capture_output=True,
|
|
300
|
+
text=True,
|
|
301
|
+
check=False,
|
|
302
|
+
)
|
|
303
|
+
if previous.returncode == 0:
|
|
304
|
+
tag = previous.stdout.strip()
|
|
305
|
+
if tag:
|
|
306
|
+
return tag
|
|
307
|
+
return None
|
|
308
|
+
|
|
309
|
+
describe = subprocess.run(
|
|
310
|
+
["git", "describe", "--tags", "--abbrev=0"],
|
|
311
|
+
capture_output=True,
|
|
312
|
+
text=True,
|
|
313
|
+
check=False,
|
|
314
|
+
)
|
|
315
|
+
if describe.returncode == 0:
|
|
316
|
+
tag = describe.stdout.strip()
|
|
317
|
+
if tag:
|
|
318
|
+
return tag
|
|
319
|
+
return None
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
def determine_range_spec(
|
|
323
|
+
start_tag: str | None = None, *, previous_text: str | None = None
|
|
324
|
+
) -> str:
|
|
325
|
+
"""Return the git range specification to build the changelog."""
|
|
326
|
+
|
|
327
|
+
resolved = _resolve_start_tag(start_tag)
|
|
328
|
+
if resolved:
|
|
329
|
+
return f"{resolved}..HEAD"
|
|
330
|
+
|
|
331
|
+
if previous_text:
|
|
332
|
+
release_commit = _resolve_release_commit_from_text(previous_text)
|
|
333
|
+
if release_commit:
|
|
334
|
+
return f"{release_commit}..HEAD"
|
|
335
|
+
|
|
336
|
+
return "HEAD"
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
def collect_sections(
|
|
340
|
+
*,
|
|
341
|
+
range_spec: str = "HEAD",
|
|
342
|
+
previous_text: str | None = None,
|
|
343
|
+
reopen_latest: bool = False,
|
|
344
|
+
) -> List[ChangelogSection]:
|
|
345
|
+
"""Return changelog sections for *range_spec*.
|
|
346
|
+
|
|
347
|
+
When ``previous_text`` is provided, sections not regenerated in the current run
|
|
348
|
+
are appended so long as they can be parsed from the existing changelog. Set
|
|
349
|
+
``reopen_latest`` to ``True`` when the caller intends to move the most recent
|
|
350
|
+
release notes back into the ``Unreleased`` section (for example, when
|
|
351
|
+
preparing a release retry before a new tag is created).
|
|
352
|
+
"""
|
|
353
|
+
|
|
354
|
+
commits = _read_commits(range_spec)
|
|
355
|
+
sections = _sections_from_commits(commits)
|
|
356
|
+
if previous_text:
|
|
357
|
+
old_sections = _parse_sections(previous_text)
|
|
358
|
+
sections = _merge_sections(
|
|
359
|
+
sections, old_sections, reopen_latest=reopen_latest
|
|
360
|
+
)
|
|
361
|
+
return sections
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def render_changelog(sections: Iterable[ChangelogSection]) -> str:
|
|
365
|
+
lines: list[str] = ["Changelog", "=========", ""]
|
|
366
|
+
for section in sections:
|
|
367
|
+
lines.append(section.title)
|
|
368
|
+
lines.append("-" * len(section.title))
|
|
369
|
+
lines.append("")
|
|
370
|
+
lines.extend(section.entries)
|
|
371
|
+
lines.append("")
|
|
372
|
+
while lines and lines[-1] == "":
|
|
373
|
+
lines.pop()
|
|
374
|
+
lines.append("")
|
|
375
|
+
return "\n".join(lines)
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
def extract_release_notes(text: str, version: str) -> str:
|
|
379
|
+
"""Return the changelog entries matching *version*.
|
|
380
|
+
|
|
381
|
+
When no dedicated section for the release exists, the ``Unreleased`` section is
|
|
382
|
+
returned instead to capture the pending notes for the current release.
|
|
383
|
+
"""
|
|
384
|
+
|
|
385
|
+
sections = _parse_sections(text)
|
|
386
|
+
normalized = version.lstrip("v")
|
|
387
|
+
for section in sections:
|
|
388
|
+
if section.version and section.version.lstrip("v") == normalized:
|
|
389
|
+
return "\n".join(section.entries).strip()
|
|
390
|
+
for section in sections:
|
|
391
|
+
if section.version is None:
|
|
392
|
+
return "\n".join(section.entries).strip()
|
|
393
|
+
return ""
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
__all__ = [
|
|
397
|
+
"ChangelogSection",
|
|
398
|
+
"Commit",
|
|
399
|
+
"determine_range_spec",
|
|
400
|
+
"collect_sections",
|
|
401
|
+
"extract_release_notes",
|
|
402
|
+
"render_changelog",
|
|
403
|
+
]
|
core/entity.py
CHANGED
|
@@ -1,133 +1,149 @@
|
|
|
1
|
-
import copy
|
|
2
|
-
import logging
|
|
3
|
-
|
|
4
|
-
from django.contrib.auth.models import UserManager as DjangoUserManager
|
|
5
|
-
from django.core.exceptions import FieldDoesNotExist
|
|
6
|
-
from django.db import models
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
fields = []
|
|
97
|
-
break
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
1
|
+
import copy
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
from django.contrib.auth.models import UserManager as DjangoUserManager
|
|
5
|
+
from django.core.exceptions import FieldDoesNotExist
|
|
6
|
+
from django.db import models
|
|
7
|
+
from django.dispatch import Signal
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
user_data_flag_updated = Signal()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class EntityQuerySet(models.QuerySet):
|
|
16
|
+
def delete(self): # pragma: no cover - delegates to instance delete
|
|
17
|
+
deleted = 0
|
|
18
|
+
for obj in self:
|
|
19
|
+
obj.delete()
|
|
20
|
+
deleted += 1
|
|
21
|
+
return deleted, {}
|
|
22
|
+
|
|
23
|
+
def update(self, **kwargs):
|
|
24
|
+
invalidate_user_data_cache = "is_user_data" in kwargs
|
|
25
|
+
updated = super().update(**kwargs)
|
|
26
|
+
if invalidate_user_data_cache and updated:
|
|
27
|
+
user_data_flag_updated.send(sender=self.model)
|
|
28
|
+
return updated
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class EntityManager(models.Manager):
|
|
32
|
+
def get_queryset(self):
|
|
33
|
+
return EntityQuerySet(self.model, using=self._db).filter(is_deleted=False)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class EntityAllManager(models.Manager):
|
|
37
|
+
def get_queryset(self):
|
|
38
|
+
return EntityQuerySet(self.model, using=self._db)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class EntityUserManager(DjangoUserManager):
|
|
42
|
+
def get_queryset(self):
|
|
43
|
+
return EntityQuerySet(self.model, using=self._db).filter(is_deleted=False)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class Entity(models.Model):
|
|
47
|
+
"""Base model providing seed data tracking and soft deletion."""
|
|
48
|
+
|
|
49
|
+
is_seed_data = models.BooleanField(default=False, editable=False)
|
|
50
|
+
is_user_data = models.BooleanField(default=False, editable=False)
|
|
51
|
+
is_deleted = models.BooleanField(default=False, editable=False)
|
|
52
|
+
|
|
53
|
+
objects = EntityManager()
|
|
54
|
+
all_objects = EntityAllManager()
|
|
55
|
+
|
|
56
|
+
class Meta:
|
|
57
|
+
abstract = True
|
|
58
|
+
|
|
59
|
+
def clone(self):
|
|
60
|
+
"""Return an unsaved copy of this instance."""
|
|
61
|
+
new = copy.copy(self)
|
|
62
|
+
new.pk = None
|
|
63
|
+
return new
|
|
64
|
+
|
|
65
|
+
def save(self, *args, **kwargs):
|
|
66
|
+
if self.pk:
|
|
67
|
+
try:
|
|
68
|
+
old = type(self).all_objects.get(pk=self.pk)
|
|
69
|
+
except type(self).DoesNotExist:
|
|
70
|
+
pass
|
|
71
|
+
else:
|
|
72
|
+
self.is_seed_data = old.is_seed_data
|
|
73
|
+
self.is_user_data = old.is_user_data
|
|
74
|
+
super().save(*args, **kwargs)
|
|
75
|
+
|
|
76
|
+
@classmethod
|
|
77
|
+
def _unique_field_groups(cls):
|
|
78
|
+
"""Return concrete field tuples enforcing uniqueness for this model."""
|
|
79
|
+
|
|
80
|
+
opts = cls._meta
|
|
81
|
+
groups: list[tuple[models.Field, ...]] = []
|
|
82
|
+
|
|
83
|
+
for field in opts.concrete_fields:
|
|
84
|
+
if field.unique and not field.primary_key:
|
|
85
|
+
groups.append((field,))
|
|
86
|
+
|
|
87
|
+
for unique in opts.unique_together:
|
|
88
|
+
fields: list[models.Field] = []
|
|
89
|
+
for name in unique:
|
|
90
|
+
try:
|
|
91
|
+
field = opts.get_field(name)
|
|
92
|
+
except FieldDoesNotExist:
|
|
93
|
+
fields = []
|
|
94
|
+
break
|
|
95
|
+
if not getattr(field, "concrete", False) or field.primary_key:
|
|
96
|
+
fields = []
|
|
97
|
+
break
|
|
98
|
+
fields.append(field)
|
|
99
|
+
if fields:
|
|
100
|
+
groups.append(tuple(fields))
|
|
101
|
+
|
|
102
|
+
for constraint in opts.constraints:
|
|
103
|
+
if not isinstance(constraint, models.UniqueConstraint):
|
|
104
|
+
continue
|
|
105
|
+
if not constraint.fields or constraint.condition is not None:
|
|
106
|
+
continue
|
|
107
|
+
fields = []
|
|
108
|
+
for name in constraint.fields:
|
|
109
|
+
try:
|
|
110
|
+
field = opts.get_field(name)
|
|
111
|
+
except FieldDoesNotExist:
|
|
112
|
+
fields = []
|
|
113
|
+
break
|
|
114
|
+
if not getattr(field, "concrete", False) or field.primary_key:
|
|
115
|
+
fields = []
|
|
116
|
+
break
|
|
117
|
+
fields.append(field)
|
|
118
|
+
if fields:
|
|
119
|
+
groups.append(tuple(fields))
|
|
120
|
+
|
|
121
|
+
unique_groups: list[tuple[models.Field, ...]] = []
|
|
122
|
+
seen: set[tuple[str, ...]] = set()
|
|
123
|
+
for fields in groups:
|
|
124
|
+
key = tuple(field.attname for field in fields)
|
|
125
|
+
if key in seen:
|
|
126
|
+
continue
|
|
127
|
+
seen.add(key)
|
|
128
|
+
unique_groups.append(fields)
|
|
129
|
+
return unique_groups
|
|
130
|
+
|
|
131
|
+
def resolve_sigils(self, field: str) -> str:
|
|
132
|
+
"""Return ``field`` value with [ROOT.KEY] tokens resolved."""
|
|
133
|
+
name = field.lower()
|
|
134
|
+
fobj = next((f for f in self._meta.fields if f.name.lower() == name), None)
|
|
135
|
+
if not fobj:
|
|
136
|
+
return ""
|
|
137
|
+
value = self.__dict__.get(fobj.attname, "")
|
|
138
|
+
if value is None:
|
|
139
|
+
return ""
|
|
140
|
+
from .sigil_resolver import resolve_sigils as _resolve
|
|
141
|
+
|
|
142
|
+
return _resolve(str(value), current=self)
|
|
143
|
+
|
|
144
|
+
def delete(self, using=None, keep_parents=False):
|
|
145
|
+
if self.is_seed_data:
|
|
146
|
+
self.is_deleted = True
|
|
147
|
+
self.save(update_fields=["is_deleted"])
|
|
148
|
+
else:
|
|
149
|
+
super().delete(using=using, keep_parents=keep_parents)
|