arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- arthexis-0.1.26.dist-info/METADATA +272 -0
- arthexis-0.1.26.dist-info/RECORD +111 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
- config/__init__.py +5 -5
- config/active_app.py +15 -15
- config/asgi.py +29 -29
- config/auth_app.py +7 -7
- config/celery.py +32 -25
- config/context_processors.py +67 -68
- config/horologia_app.py +7 -7
- config/loadenv.py +11 -11
- config/logging.py +59 -48
- config/middleware.py +71 -25
- config/offline.py +49 -49
- config/settings.py +676 -492
- config/settings_helpers.py +109 -0
- config/urls.py +228 -159
- config/wsgi.py +17 -17
- core/admin.py +4052 -2066
- core/admin_history.py +50 -50
- core/admindocs.py +192 -151
- core/apps.py +350 -223
- core/auto_upgrade.py +72 -0
- core/backends.py +311 -124
- core/changelog.py +403 -0
- core/entity.py +149 -133
- core/environment.py +60 -43
- core/fields.py +168 -75
- core/form_fields.py +75 -0
- core/github_helper.py +188 -25
- core/github_issues.py +183 -172
- core/github_repos.py +72 -0
- core/lcd_screen.py +78 -78
- core/liveupdate.py +25 -25
- core/log_paths.py +114 -100
- core/mailer.py +89 -83
- core/middleware.py +91 -91
- core/models.py +5041 -2195
- core/notifications.py +105 -105
- core/public_wifi.py +267 -227
- core/reference_utils.py +107 -0
- core/release.py +940 -346
- core/rfid_import_export.py +113 -0
- core/sigil_builder.py +149 -131
- core/sigil_context.py +20 -20
- core/sigil_resolver.py +250 -284
- core/system.py +1425 -230
- core/tasks.py +538 -199
- core/temp_passwords.py +181 -0
- core/test_system_info.py +202 -43
- core/tests.py +2673 -1069
- core/tests_liveupdate.py +17 -17
- core/urls.py +11 -11
- core/user_data.py +681 -495
- core/views.py +2484 -789
- core/widgets.py +213 -51
- nodes/admin.py +2236 -445
- nodes/apps.py +98 -70
- nodes/backends.py +160 -53
- nodes/dns.py +203 -0
- nodes/feature_checks.py +133 -0
- nodes/lcd.py +165 -165
- nodes/models.py +2375 -870
- nodes/reports.py +411 -0
- nodes/rfid_sync.py +210 -0
- nodes/signals.py +18 -0
- nodes/tasks.py +141 -46
- nodes/tests.py +5045 -1489
- nodes/urls.py +29 -13
- nodes/utils.py +172 -73
- nodes/views.py +1768 -304
- ocpp/admin.py +1775 -481
- ocpp/apps.py +25 -25
- ocpp/consumers.py +1843 -630
- ocpp/evcs.py +844 -928
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +1417 -640
- ocpp/network.py +398 -0
- ocpp/reference_utils.py +42 -0
- ocpp/routing.py +11 -9
- ocpp/simulator.py +745 -368
- ocpp/status_display.py +26 -0
- ocpp/store.py +603 -403
- ocpp/tasks.py +479 -31
- ocpp/test_export_import.py +131 -130
- ocpp/test_rfid.py +1072 -540
- ocpp/tests.py +5494 -2296
- ocpp/transactions_io.py +197 -165
- ocpp/urls.py +50 -50
- ocpp/views.py +2024 -912
- pages/admin.py +1123 -396
- pages/apps.py +45 -10
- pages/checks.py +40 -40
- pages/context_processors.py +151 -85
- pages/defaults.py +13 -0
- pages/forms.py +221 -0
- pages/middleware.py +213 -153
- pages/models.py +720 -252
- pages/module_defaults.py +156 -0
- pages/site_config.py +137 -0
- pages/tasks.py +74 -0
- pages/tests.py +4009 -1389
- pages/urls.py +38 -20
- pages/utils.py +93 -12
- pages/views.py +1736 -762
- arthexis-0.1.9.dist-info/METADATA +0 -168
- arthexis-0.1.9.dist-info/RECORD +0 -92
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- nodes/actions.py +0 -70
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
core/system.py
CHANGED
|
@@ -1,230 +1,1425 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from
|
|
4
|
-
import
|
|
5
|
-
import
|
|
6
|
-
import
|
|
7
|
-
import
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
from
|
|
16
|
-
from
|
|
17
|
-
|
|
18
|
-
from django
|
|
19
|
-
from django.
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
else:
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
"
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections import deque
|
|
4
|
+
from contextlib import closing
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from functools import lru_cache
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
import json
|
|
10
|
+
import re
|
|
11
|
+
import socket
|
|
12
|
+
import subprocess
|
|
13
|
+
import shutil
|
|
14
|
+
import logging
|
|
15
|
+
from typing import Callable, Iterable, Optional
|
|
16
|
+
from urllib.parse import urlparse
|
|
17
|
+
|
|
18
|
+
from django import forms
|
|
19
|
+
from django.conf import settings
|
|
20
|
+
from django.contrib import admin, messages
|
|
21
|
+
from django.forms import modelformset_factory
|
|
22
|
+
from django.template.response import TemplateResponse
|
|
23
|
+
from django.http import HttpResponseRedirect
|
|
24
|
+
from django.urls import path, reverse
|
|
25
|
+
from django.utils import timezone
|
|
26
|
+
from django.utils.formats import date_format
|
|
27
|
+
from django.utils.html import format_html, format_html_join
|
|
28
|
+
from django.utils.translation import gettext_lazy as _, ngettext
|
|
29
|
+
|
|
30
|
+
from core.auto_upgrade import AUTO_UPGRADE_TASK_NAME, AUTO_UPGRADE_TASK_PATH
|
|
31
|
+
from core import changelog as changelog_utils
|
|
32
|
+
from core.release import (
|
|
33
|
+
_git_authentication_missing,
|
|
34
|
+
_git_remote_url,
|
|
35
|
+
_manager_git_credentials,
|
|
36
|
+
_remote_with_credentials,
|
|
37
|
+
)
|
|
38
|
+
from core.tasks import check_github_updates
|
|
39
|
+
from core.models import Todo
|
|
40
|
+
from utils import revision
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
AUTO_UPGRADE_LOCK_NAME = "auto_upgrade.lck"
|
|
44
|
+
AUTO_UPGRADE_SKIP_LOCK_NAME = "auto_upgrade_skip_revisions.lck"
|
|
45
|
+
AUTO_UPGRADE_LOG_NAME = "auto-upgrade.log"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
logger = logging.getLogger(__name__)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _github_repo_path(remote_url: str | None) -> str:
|
|
52
|
+
"""Return the ``owner/repo`` path for a GitHub *remote_url* if possible."""
|
|
53
|
+
|
|
54
|
+
if not remote_url:
|
|
55
|
+
return ""
|
|
56
|
+
|
|
57
|
+
normalized = remote_url.strip()
|
|
58
|
+
if not normalized:
|
|
59
|
+
return ""
|
|
60
|
+
|
|
61
|
+
path = ""
|
|
62
|
+
if normalized.startswith("git@"):
|
|
63
|
+
host, _, remainder = normalized.partition(":")
|
|
64
|
+
if "github.com" not in host.lower():
|
|
65
|
+
return ""
|
|
66
|
+
path = remainder
|
|
67
|
+
else:
|
|
68
|
+
parsed = urlparse(normalized)
|
|
69
|
+
if "github.com" not in parsed.netloc.lower():
|
|
70
|
+
return ""
|
|
71
|
+
path = parsed.path
|
|
72
|
+
|
|
73
|
+
path = path.strip("/")
|
|
74
|
+
if path.endswith(".git"):
|
|
75
|
+
path = path[: -len(".git")]
|
|
76
|
+
|
|
77
|
+
if not path:
|
|
78
|
+
return ""
|
|
79
|
+
|
|
80
|
+
segments = [segment for segment in path.split("/") if segment]
|
|
81
|
+
if len(segments) < 2:
|
|
82
|
+
return ""
|
|
83
|
+
|
|
84
|
+
owner, repo = segments[-2], segments[-1]
|
|
85
|
+
return f"{owner}/{repo}"
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@lru_cache()
|
|
89
|
+
def _github_commit_url_base() -> str:
|
|
90
|
+
"""Return the GitHub commit URL template for the configured repository."""
|
|
91
|
+
|
|
92
|
+
try:
|
|
93
|
+
remote_url = _git_remote_url()
|
|
94
|
+
except FileNotFoundError: # pragma: no cover - depends on environment setup
|
|
95
|
+
logger.debug("Skipping GitHub commit URL generation; git executable not found")
|
|
96
|
+
remote_url = None
|
|
97
|
+
|
|
98
|
+
repo_path = _github_repo_path(remote_url)
|
|
99
|
+
if not repo_path:
|
|
100
|
+
return ""
|
|
101
|
+
return f"https://github.com/{repo_path}/commit/{{sha}}"
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _github_commit_url(sha: str) -> str:
|
|
105
|
+
"""Return the GitHub commit URL for *sha* when available."""
|
|
106
|
+
|
|
107
|
+
base = _github_commit_url_base()
|
|
108
|
+
clean_sha = (sha or "").strip()
|
|
109
|
+
if not base or not clean_sha:
|
|
110
|
+
return ""
|
|
111
|
+
return base.replace("{sha}", clean_sha)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _auto_upgrade_mode_file(base_dir: Path) -> Path:
|
|
115
|
+
return base_dir / "locks" / AUTO_UPGRADE_LOCK_NAME
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _auto_upgrade_skip_file(base_dir: Path) -> Path:
|
|
119
|
+
return base_dir / "locks" / AUTO_UPGRADE_SKIP_LOCK_NAME
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _auto_upgrade_log_file(base_dir: Path) -> Path:
|
|
123
|
+
return base_dir / "logs" / AUTO_UPGRADE_LOG_NAME
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _open_changelog_entries() -> list[dict[str, str]]:
|
|
127
|
+
"""Return changelog entries that are not yet part of a tagged release."""
|
|
128
|
+
|
|
129
|
+
changelog_path = Path("CHANGELOG.rst")
|
|
130
|
+
try:
|
|
131
|
+
text = changelog_path.read_text(encoding="utf-8")
|
|
132
|
+
except FileNotFoundError:
|
|
133
|
+
return []
|
|
134
|
+
except OSError:
|
|
135
|
+
return []
|
|
136
|
+
|
|
137
|
+
collecting = False
|
|
138
|
+
entries: list[dict[str, str]] = []
|
|
139
|
+
for raw_line in text.splitlines():
|
|
140
|
+
line = raw_line.strip()
|
|
141
|
+
if not collecting:
|
|
142
|
+
if line == "Unreleased":
|
|
143
|
+
collecting = True
|
|
144
|
+
continue
|
|
145
|
+
|
|
146
|
+
if not line:
|
|
147
|
+
if entries:
|
|
148
|
+
break
|
|
149
|
+
continue
|
|
150
|
+
|
|
151
|
+
if set(line) == {"-"}:
|
|
152
|
+
# Underline immediately following the section heading.
|
|
153
|
+
continue
|
|
154
|
+
|
|
155
|
+
if not line.startswith("- "):
|
|
156
|
+
break
|
|
157
|
+
|
|
158
|
+
trimmed = line[2:].strip()
|
|
159
|
+
if not trimmed:
|
|
160
|
+
continue
|
|
161
|
+
parts = trimmed.split(" ", 1)
|
|
162
|
+
sha = parts[0]
|
|
163
|
+
message = parts[1] if len(parts) > 1 else ""
|
|
164
|
+
entries.append({"sha": sha, "message": message, "url": _github_commit_url(sha)})
|
|
165
|
+
|
|
166
|
+
return entries
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _latest_release_changelog() -> dict[str, object]:
|
|
170
|
+
"""Return the most recent tagged release entries for display."""
|
|
171
|
+
|
|
172
|
+
changelog_path = Path("CHANGELOG.rst")
|
|
173
|
+
try:
|
|
174
|
+
text = changelog_path.read_text(encoding="utf-8")
|
|
175
|
+
except (FileNotFoundError, OSError):
|
|
176
|
+
return {"title": "", "entries": []}
|
|
177
|
+
|
|
178
|
+
lines = text.splitlines()
|
|
179
|
+
state = "before"
|
|
180
|
+
release_title = ""
|
|
181
|
+
entries: list[dict[str, str]] = []
|
|
182
|
+
|
|
183
|
+
for raw_line in lines:
|
|
184
|
+
stripped = raw_line.strip()
|
|
185
|
+
|
|
186
|
+
if state == "before":
|
|
187
|
+
if stripped == "Unreleased":
|
|
188
|
+
state = "unreleased-heading"
|
|
189
|
+
continue
|
|
190
|
+
|
|
191
|
+
if state == "unreleased-heading":
|
|
192
|
+
if set(stripped) == {"-"}:
|
|
193
|
+
state = "unreleased-body"
|
|
194
|
+
else:
|
|
195
|
+
state = "unreleased-body"
|
|
196
|
+
continue
|
|
197
|
+
|
|
198
|
+
if state == "unreleased-body":
|
|
199
|
+
if not stripped:
|
|
200
|
+
state = "after-unreleased"
|
|
201
|
+
continue
|
|
202
|
+
|
|
203
|
+
if state == "after-unreleased":
|
|
204
|
+
if not stripped:
|
|
205
|
+
continue
|
|
206
|
+
release_title = stripped
|
|
207
|
+
state = "release-heading"
|
|
208
|
+
continue
|
|
209
|
+
|
|
210
|
+
if state == "release-heading":
|
|
211
|
+
if set(stripped) == {"-"}:
|
|
212
|
+
state = "release-body"
|
|
213
|
+
else:
|
|
214
|
+
state = "release-body"
|
|
215
|
+
continue
|
|
216
|
+
|
|
217
|
+
if state == "release-body":
|
|
218
|
+
if not stripped:
|
|
219
|
+
if entries:
|
|
220
|
+
break
|
|
221
|
+
continue
|
|
222
|
+
if not stripped.startswith("- "):
|
|
223
|
+
break
|
|
224
|
+
trimmed = stripped[2:].strip()
|
|
225
|
+
if not trimmed:
|
|
226
|
+
continue
|
|
227
|
+
parts = trimmed.split(" ", 1)
|
|
228
|
+
sha = parts[0]
|
|
229
|
+
message = parts[1] if len(parts) > 1 else ""
|
|
230
|
+
entries.append({"sha": sha, "message": message, "url": _github_commit_url(sha)})
|
|
231
|
+
|
|
232
|
+
return {"title": release_title, "entries": entries}
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def _exclude_changelog_entries(shas: Iterable[str]) -> int:
|
|
236
|
+
"""Remove entries matching ``shas`` from the changelog.
|
|
237
|
+
|
|
238
|
+
Returns the number of entries removed. Only entries within the
|
|
239
|
+
``Unreleased`` section are considered.
|
|
240
|
+
"""
|
|
241
|
+
|
|
242
|
+
normalized_shas = {sha.strip() for sha in shas if sha and sha.strip()}
|
|
243
|
+
if not normalized_shas:
|
|
244
|
+
return 0
|
|
245
|
+
|
|
246
|
+
changelog_path = Path("CHANGELOG.rst")
|
|
247
|
+
try:
|
|
248
|
+
text = changelog_path.read_text(encoding="utf-8")
|
|
249
|
+
except (FileNotFoundError, OSError):
|
|
250
|
+
return 0
|
|
251
|
+
|
|
252
|
+
lines = text.splitlines(keepends=True)
|
|
253
|
+
new_lines: list[str] = []
|
|
254
|
+
collecting = False
|
|
255
|
+
removed = 0
|
|
256
|
+
|
|
257
|
+
for raw_line in lines:
|
|
258
|
+
stripped = raw_line.strip()
|
|
259
|
+
|
|
260
|
+
if not collecting:
|
|
261
|
+
new_lines.append(raw_line)
|
|
262
|
+
if stripped == "Unreleased":
|
|
263
|
+
collecting = True
|
|
264
|
+
continue
|
|
265
|
+
|
|
266
|
+
if not stripped:
|
|
267
|
+
new_lines.append(raw_line)
|
|
268
|
+
continue
|
|
269
|
+
|
|
270
|
+
if set(stripped) == {"-"}:
|
|
271
|
+
new_lines.append(raw_line)
|
|
272
|
+
continue
|
|
273
|
+
|
|
274
|
+
if not stripped.startswith("- "):
|
|
275
|
+
new_lines.append(raw_line)
|
|
276
|
+
collecting = False
|
|
277
|
+
continue
|
|
278
|
+
|
|
279
|
+
trimmed = stripped[2:].strip()
|
|
280
|
+
if not trimmed:
|
|
281
|
+
new_lines.append(raw_line)
|
|
282
|
+
continue
|
|
283
|
+
|
|
284
|
+
sha = trimmed.split(" ", 1)[0]
|
|
285
|
+
if sha in normalized_shas:
|
|
286
|
+
removed += 1
|
|
287
|
+
normalized_shas.remove(sha)
|
|
288
|
+
continue
|
|
289
|
+
|
|
290
|
+
new_lines.append(raw_line)
|
|
291
|
+
|
|
292
|
+
if removed:
|
|
293
|
+
new_text = "".join(new_lines)
|
|
294
|
+
if not new_text.endswith("\n"):
|
|
295
|
+
new_text += "\n"
|
|
296
|
+
changelog_path.write_text(new_text, encoding="utf-8")
|
|
297
|
+
|
|
298
|
+
return removed
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def _regenerate_changelog() -> None:
|
|
302
|
+
"""Rebuild the changelog file using recent git commits."""
|
|
303
|
+
|
|
304
|
+
changelog_path = Path("CHANGELOG.rst")
|
|
305
|
+
previous_text = (
|
|
306
|
+
changelog_path.read_text(encoding="utf-8") if changelog_path.exists() else None
|
|
307
|
+
)
|
|
308
|
+
range_spec = changelog_utils.determine_range_spec(previous_text=previous_text)
|
|
309
|
+
sections = changelog_utils.collect_sections(
|
|
310
|
+
range_spec=range_spec, previous_text=previous_text
|
|
311
|
+
)
|
|
312
|
+
content = changelog_utils.render_changelog(sections)
|
|
313
|
+
if not content.endswith("\n"):
|
|
314
|
+
content += "\n"
|
|
315
|
+
changelog_path.write_text(content, encoding="utf-8")
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def _format_git_command_output(
|
|
319
|
+
command: list[str], result: subprocess.CompletedProcess[str]
|
|
320
|
+
) -> str:
|
|
321
|
+
"""Return a readable summary of a git command execution."""
|
|
322
|
+
|
|
323
|
+
command_display = "$ " + " ".join(command)
|
|
324
|
+
message_parts = []
|
|
325
|
+
if result.stdout:
|
|
326
|
+
message_parts.append(result.stdout.strip())
|
|
327
|
+
if result.stderr:
|
|
328
|
+
message_parts.append(result.stderr.strip())
|
|
329
|
+
if result.returncode != 0:
|
|
330
|
+
message_parts.append(f"[exit status {result.returncode}]")
|
|
331
|
+
if message_parts:
|
|
332
|
+
return command_display + "\n" + "\n".join(part for part in message_parts if part)
|
|
333
|
+
return command_display
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def _git_status() -> str:
|
|
337
|
+
"""Return the repository status after attempting to commit."""
|
|
338
|
+
|
|
339
|
+
status_result = subprocess.run(
|
|
340
|
+
["git", "status", "--short", "--branch"],
|
|
341
|
+
capture_output=True,
|
|
342
|
+
text=True,
|
|
343
|
+
check=False,
|
|
344
|
+
)
|
|
345
|
+
stdout = status_result.stdout.strip()
|
|
346
|
+
stderr = status_result.stderr.strip()
|
|
347
|
+
if stdout and stderr:
|
|
348
|
+
return stdout + "\n" + stderr
|
|
349
|
+
return stdout or stderr
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def _commit_changelog() -> tuple[bool, str, str]:
|
|
353
|
+
"""Stage, commit, and push the changelog file."""
|
|
354
|
+
|
|
355
|
+
def _retry_push_with_release_credentials(
|
|
356
|
+
command: list[str],
|
|
357
|
+
result: subprocess.CompletedProcess[str],
|
|
358
|
+
) -> bool:
|
|
359
|
+
exc = subprocess.CalledProcessError(
|
|
360
|
+
result.returncode,
|
|
361
|
+
command,
|
|
362
|
+
output=result.stdout,
|
|
363
|
+
stderr=result.stderr,
|
|
364
|
+
)
|
|
365
|
+
if not _git_authentication_missing(exc):
|
|
366
|
+
return False
|
|
367
|
+
|
|
368
|
+
creds = _manager_git_credentials()
|
|
369
|
+
if not creds or not creds.has_auth():
|
|
370
|
+
return False
|
|
371
|
+
|
|
372
|
+
remote_url = _git_remote_url("origin")
|
|
373
|
+
if not remote_url:
|
|
374
|
+
return False
|
|
375
|
+
|
|
376
|
+
authed_url = _remote_with_credentials(remote_url, creds)
|
|
377
|
+
if not authed_url:
|
|
378
|
+
return False
|
|
379
|
+
|
|
380
|
+
retry_command = ["git", "push", authed_url]
|
|
381
|
+
retry_result = subprocess.run(
|
|
382
|
+
retry_command,
|
|
383
|
+
capture_output=True,
|
|
384
|
+
text=True,
|
|
385
|
+
check=False,
|
|
386
|
+
)
|
|
387
|
+
formatted_retry = _format_git_command_output(retry_command, retry_result)
|
|
388
|
+
if formatted_retry:
|
|
389
|
+
outputs.append(formatted_retry)
|
|
390
|
+
logger.info(
|
|
391
|
+
"Executed %s with exit code %s",
|
|
392
|
+
retry_command,
|
|
393
|
+
retry_result.returncode,
|
|
394
|
+
)
|
|
395
|
+
return retry_result.returncode == 0
|
|
396
|
+
|
|
397
|
+
git_commands: list[list[str]] = [
|
|
398
|
+
["git", "add", "CHANGELOG.rst"],
|
|
399
|
+
[
|
|
400
|
+
"git",
|
|
401
|
+
"commit",
|
|
402
|
+
"-m",
|
|
403
|
+
"chore: update changelog",
|
|
404
|
+
"--",
|
|
405
|
+
"CHANGELOG.rst",
|
|
406
|
+
],
|
|
407
|
+
["git", "push"],
|
|
408
|
+
]
|
|
409
|
+
outputs: list[str] = []
|
|
410
|
+
success = True
|
|
411
|
+
|
|
412
|
+
for command in git_commands:
|
|
413
|
+
result = subprocess.run(
|
|
414
|
+
command, capture_output=True, text=True, check=False
|
|
415
|
+
)
|
|
416
|
+
formatted = _format_git_command_output(command, result)
|
|
417
|
+
outputs.append(formatted)
|
|
418
|
+
logger.info("Executed %s with exit code %s", command, result.returncode)
|
|
419
|
+
if result.returncode != 0:
|
|
420
|
+
if command[:2] == ["git", "push"] and _retry_push_with_release_credentials(
|
|
421
|
+
command, result
|
|
422
|
+
):
|
|
423
|
+
continue
|
|
424
|
+
success = False
|
|
425
|
+
break
|
|
426
|
+
|
|
427
|
+
command_output = "\n\n".join(output for output in outputs if output)
|
|
428
|
+
repo_status = _git_status()
|
|
429
|
+
return success, command_output, repo_status
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
@dataclass(frozen=True)
|
|
433
|
+
class SystemField:
|
|
434
|
+
"""Metadata describing a single entry on the system admin page."""
|
|
435
|
+
|
|
436
|
+
label: str
|
|
437
|
+
sigil_key: str
|
|
438
|
+
value: object
|
|
439
|
+
field_type: str = "text"
|
|
440
|
+
|
|
441
|
+
@property
|
|
442
|
+
def sigil(self) -> str:
|
|
443
|
+
return f"SYS.{self.sigil_key}"
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
_RUNSERVER_PORT_PATTERN = re.compile(r":(\d{2,5})(?:\D|$)")
|
|
447
|
+
_RUNSERVER_PORT_FLAG_PATTERN = re.compile(r"--port(?:=|\s+)(\d{2,5})", re.IGNORECASE)
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
def _format_timestamp(dt: datetime | None) -> str:
|
|
451
|
+
"""Return ``dt`` formatted using the active ``DATETIME_FORMAT``."""
|
|
452
|
+
|
|
453
|
+
if dt is None:
|
|
454
|
+
return ""
|
|
455
|
+
try:
|
|
456
|
+
localized = timezone.localtime(dt)
|
|
457
|
+
except Exception:
|
|
458
|
+
localized = dt
|
|
459
|
+
return date_format(localized, "DATETIME_FORMAT")
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def _auto_upgrade_next_check() -> str:
|
|
463
|
+
"""Return the human-readable timestamp for the next auto-upgrade check."""
|
|
464
|
+
|
|
465
|
+
try: # pragma: no cover - optional dependency failures
|
|
466
|
+
from django_celery_beat.models import PeriodicTask
|
|
467
|
+
except Exception:
|
|
468
|
+
return ""
|
|
469
|
+
|
|
470
|
+
try:
|
|
471
|
+
task = (
|
|
472
|
+
PeriodicTask.objects.select_related(
|
|
473
|
+
"interval", "crontab", "solar", "clocked"
|
|
474
|
+
)
|
|
475
|
+
.only("enabled", "last_run_at", "start_time", "name")
|
|
476
|
+
.get(name=AUTO_UPGRADE_TASK_NAME)
|
|
477
|
+
)
|
|
478
|
+
except PeriodicTask.DoesNotExist:
|
|
479
|
+
return ""
|
|
480
|
+
except Exception: # pragma: no cover - database unavailable
|
|
481
|
+
return ""
|
|
482
|
+
|
|
483
|
+
if not task.enabled:
|
|
484
|
+
return str(_("Disabled"))
|
|
485
|
+
|
|
486
|
+
schedule = task.schedule
|
|
487
|
+
if schedule is None:
|
|
488
|
+
return ""
|
|
489
|
+
|
|
490
|
+
now = schedule.maybe_make_aware(schedule.now())
|
|
491
|
+
|
|
492
|
+
start_time = task.start_time
|
|
493
|
+
if start_time is not None:
|
|
494
|
+
try:
|
|
495
|
+
candidate_start = schedule.maybe_make_aware(start_time)
|
|
496
|
+
except Exception:
|
|
497
|
+
candidate_start = (
|
|
498
|
+
timezone.make_aware(start_time)
|
|
499
|
+
if timezone.is_naive(start_time)
|
|
500
|
+
else start_time
|
|
501
|
+
)
|
|
502
|
+
if candidate_start and candidate_start > now:
|
|
503
|
+
return _format_timestamp(candidate_start)
|
|
504
|
+
|
|
505
|
+
last_run_at = task.last_run_at
|
|
506
|
+
if last_run_at is not None:
|
|
507
|
+
try:
|
|
508
|
+
reference = schedule.maybe_make_aware(last_run_at)
|
|
509
|
+
except Exception:
|
|
510
|
+
reference = (
|
|
511
|
+
timezone.make_aware(last_run_at)
|
|
512
|
+
if timezone.is_naive(last_run_at)
|
|
513
|
+
else last_run_at
|
|
514
|
+
)
|
|
515
|
+
else:
|
|
516
|
+
reference = now
|
|
517
|
+
|
|
518
|
+
try:
|
|
519
|
+
remaining = schedule.remaining_estimate(reference)
|
|
520
|
+
except Exception:
|
|
521
|
+
return ""
|
|
522
|
+
|
|
523
|
+
next_run = now + remaining
|
|
524
|
+
return _format_timestamp(next_run)
|
|
525
|
+
|
|
526
|
+
|
|
527
|
+
def _read_auto_upgrade_mode(base_dir: Path) -> dict[str, object]:
|
|
528
|
+
"""Return metadata describing the configured auto-upgrade mode."""
|
|
529
|
+
|
|
530
|
+
mode_file = _auto_upgrade_mode_file(base_dir)
|
|
531
|
+
info: dict[str, object] = {
|
|
532
|
+
"mode": "version",
|
|
533
|
+
"enabled": False,
|
|
534
|
+
"lock_exists": mode_file.exists(),
|
|
535
|
+
"read_error": False,
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
if not info["lock_exists"]:
|
|
539
|
+
return info
|
|
540
|
+
|
|
541
|
+
info["enabled"] = True
|
|
542
|
+
|
|
543
|
+
try:
|
|
544
|
+
raw_value = mode_file.read_text(encoding="utf-8").strip()
|
|
545
|
+
except OSError:
|
|
546
|
+
info["read_error"] = True
|
|
547
|
+
return info
|
|
548
|
+
|
|
549
|
+
mode = raw_value or "version"
|
|
550
|
+
info["mode"] = mode
|
|
551
|
+
info["enabled"] = True
|
|
552
|
+
return info
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
def _load_auto_upgrade_skip_revisions(base_dir: Path) -> list[str]:
|
|
556
|
+
"""Return a sorted list of revisions blocked from auto-upgrade."""
|
|
557
|
+
|
|
558
|
+
skip_file = _auto_upgrade_skip_file(base_dir)
|
|
559
|
+
try:
|
|
560
|
+
lines = skip_file.read_text(encoding="utf-8").splitlines()
|
|
561
|
+
except FileNotFoundError:
|
|
562
|
+
return []
|
|
563
|
+
except OSError:
|
|
564
|
+
return []
|
|
565
|
+
|
|
566
|
+
revisions = {line.strip() for line in lines if line.strip()}
|
|
567
|
+
return sorted(revisions)
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
def _parse_log_timestamp(value: str) -> datetime | None:
|
|
571
|
+
"""Return a ``datetime`` parsed from ``value`` if it appears ISO formatted."""
|
|
572
|
+
|
|
573
|
+
if not value:
|
|
574
|
+
return None
|
|
575
|
+
|
|
576
|
+
candidate = value.strip()
|
|
577
|
+
if not candidate:
|
|
578
|
+
return None
|
|
579
|
+
|
|
580
|
+
if candidate[-1] in {"Z", "z"}:
|
|
581
|
+
candidate = f"{candidate[:-1]}+00:00"
|
|
582
|
+
|
|
583
|
+
try:
|
|
584
|
+
return datetime.fromisoformat(candidate)
|
|
585
|
+
except ValueError:
|
|
586
|
+
return None
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
def _load_auto_upgrade_log_entries(
|
|
590
|
+
base_dir: Path, *, limit: int = 25
|
|
591
|
+
) -> dict[str, object]:
|
|
592
|
+
"""Return the most recent auto-upgrade log entries."""
|
|
593
|
+
|
|
594
|
+
log_file = _auto_upgrade_log_file(base_dir)
|
|
595
|
+
result: dict[str, object] = {
|
|
596
|
+
"path": log_file,
|
|
597
|
+
"entries": [],
|
|
598
|
+
"error": "",
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
try:
|
|
602
|
+
with log_file.open("r", encoding="utf-8") as handle:
|
|
603
|
+
lines = deque((line.rstrip("\n") for line in handle), maxlen=limit)
|
|
604
|
+
except FileNotFoundError:
|
|
605
|
+
return result
|
|
606
|
+
except OSError:
|
|
607
|
+
result["error"] = str(
|
|
608
|
+
_("The auto-upgrade log could not be read."))
|
|
609
|
+
return result
|
|
610
|
+
|
|
611
|
+
entries: list[dict[str, str]] = []
|
|
612
|
+
for raw_line in lines:
|
|
613
|
+
line = raw_line.strip()
|
|
614
|
+
if not line:
|
|
615
|
+
continue
|
|
616
|
+
timestamp_str, _, message = line.partition(" ")
|
|
617
|
+
message = message.strip()
|
|
618
|
+
timestamp = _parse_log_timestamp(timestamp_str)
|
|
619
|
+
if not message:
|
|
620
|
+
message = timestamp_str
|
|
621
|
+
if timestamp is not None:
|
|
622
|
+
timestamp_display = _format_timestamp(timestamp)
|
|
623
|
+
else:
|
|
624
|
+
timestamp_display = timestamp_str
|
|
625
|
+
entries.append({
|
|
626
|
+
"timestamp": timestamp_display,
|
|
627
|
+
"message": message,
|
|
628
|
+
})
|
|
629
|
+
|
|
630
|
+
result["entries"] = entries
|
|
631
|
+
return result
|
|
632
|
+
|
|
633
|
+
|
|
634
|
+
def _get_auto_upgrade_periodic_task():
|
|
635
|
+
"""Return the configured auto-upgrade periodic task, if available."""
|
|
636
|
+
|
|
637
|
+
try: # pragma: no cover - optional dependency failures
|
|
638
|
+
from django_celery_beat.models import PeriodicTask
|
|
639
|
+
except Exception:
|
|
640
|
+
return None, False, str(_("django-celery-beat is not installed or configured."))
|
|
641
|
+
|
|
642
|
+
try:
|
|
643
|
+
task = (
|
|
644
|
+
PeriodicTask.objects.select_related(
|
|
645
|
+
"interval", "crontab", "solar", "clocked"
|
|
646
|
+
)
|
|
647
|
+
.only(
|
|
648
|
+
"enabled",
|
|
649
|
+
"last_run_at",
|
|
650
|
+
"start_time",
|
|
651
|
+
"one_off",
|
|
652
|
+
"total_run_count",
|
|
653
|
+
"queue",
|
|
654
|
+
"expires",
|
|
655
|
+
"task",
|
|
656
|
+
"name",
|
|
657
|
+
"description",
|
|
658
|
+
)
|
|
659
|
+
.get(name=AUTO_UPGRADE_TASK_NAME)
|
|
660
|
+
)
|
|
661
|
+
except PeriodicTask.DoesNotExist:
|
|
662
|
+
return None, True, ""
|
|
663
|
+
except Exception:
|
|
664
|
+
return None, False, str(_("Auto-upgrade schedule could not be loaded."))
|
|
665
|
+
|
|
666
|
+
return task, True, ""
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
def _load_auto_upgrade_schedule() -> dict[str, object]:
|
|
670
|
+
"""Return normalized auto-upgrade scheduling metadata."""
|
|
671
|
+
|
|
672
|
+
task, available, error = _get_auto_upgrade_periodic_task()
|
|
673
|
+
info: dict[str, object] = {
|
|
674
|
+
"available": available,
|
|
675
|
+
"configured": bool(task),
|
|
676
|
+
"enabled": getattr(task, "enabled", False) if task else False,
|
|
677
|
+
"one_off": getattr(task, "one_off", False) if task else False,
|
|
678
|
+
"queue": getattr(task, "queue", "") or "",
|
|
679
|
+
"schedule": "",
|
|
680
|
+
"start_time": "",
|
|
681
|
+
"last_run_at": "",
|
|
682
|
+
"next_run": "",
|
|
683
|
+
"total_run_count": 0,
|
|
684
|
+
"description": getattr(task, "description", "") or "",
|
|
685
|
+
"expires": "",
|
|
686
|
+
"task": getattr(task, "task", "") or "",
|
|
687
|
+
"name": getattr(task, "name", AUTO_UPGRADE_TASK_NAME) or AUTO_UPGRADE_TASK_NAME,
|
|
688
|
+
"error": error,
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
if not task:
|
|
692
|
+
return info
|
|
693
|
+
|
|
694
|
+
info["start_time"] = _format_timestamp(getattr(task, "start_time", None))
|
|
695
|
+
info["last_run_at"] = _format_timestamp(getattr(task, "last_run_at", None))
|
|
696
|
+
info["expires"] = _format_timestamp(getattr(task, "expires", None))
|
|
697
|
+
try:
|
|
698
|
+
run_count = int(getattr(task, "total_run_count", 0) or 0)
|
|
699
|
+
except (TypeError, ValueError):
|
|
700
|
+
run_count = 0
|
|
701
|
+
info["total_run_count"] = run_count
|
|
702
|
+
|
|
703
|
+
try:
|
|
704
|
+
schedule_obj = task.schedule
|
|
705
|
+
except Exception: # pragma: no cover - schedule property may raise
|
|
706
|
+
schedule_obj = None
|
|
707
|
+
|
|
708
|
+
if schedule_obj is not None:
|
|
709
|
+
try:
|
|
710
|
+
info["schedule"] = str(schedule_obj)
|
|
711
|
+
except Exception: # pragma: no cover - schedule string conversion failed
|
|
712
|
+
info["schedule"] = ""
|
|
713
|
+
|
|
714
|
+
info["next_run"] = _auto_upgrade_next_check()
|
|
715
|
+
return info
|
|
716
|
+
|
|
717
|
+
|
|
718
|
+
def _build_auto_upgrade_report(*, limit: int = 25) -> dict[str, object]:
|
|
719
|
+
"""Assemble the composite auto-upgrade report for the admin view."""
|
|
720
|
+
|
|
721
|
+
base_dir = Path(settings.BASE_DIR)
|
|
722
|
+
mode_info = _read_auto_upgrade_mode(base_dir)
|
|
723
|
+
log_info = _load_auto_upgrade_log_entries(base_dir, limit=limit)
|
|
724
|
+
skip_revisions = _load_auto_upgrade_skip_revisions(base_dir)
|
|
725
|
+
schedule_info = _load_auto_upgrade_schedule()
|
|
726
|
+
|
|
727
|
+
mode_value = str(mode_info.get("mode", "version"))
|
|
728
|
+
is_latest = mode_value.lower() == "latest"
|
|
729
|
+
|
|
730
|
+
settings_info = {
|
|
731
|
+
"enabled": bool(mode_info.get("enabled", False)),
|
|
732
|
+
"mode": mode_value,
|
|
733
|
+
"is_latest": is_latest,
|
|
734
|
+
"lock_exists": bool(mode_info.get("lock_exists", False)),
|
|
735
|
+
"read_error": bool(mode_info.get("read_error", False)),
|
|
736
|
+
"mode_file": str(_auto_upgrade_mode_file(base_dir)),
|
|
737
|
+
"skip_revisions": skip_revisions,
|
|
738
|
+
"task_name": AUTO_UPGRADE_TASK_NAME,
|
|
739
|
+
"task_path": AUTO_UPGRADE_TASK_PATH,
|
|
740
|
+
"log_path": str(log_info.get("path")),
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
return {
|
|
744
|
+
"settings": settings_info,
|
|
745
|
+
"schedule": schedule_info,
|
|
746
|
+
"log_entries": log_info.get("entries", []),
|
|
747
|
+
"log_error": str(log_info.get("error", "")),
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
|
|
751
|
+
def _resolve_auto_upgrade_namespace(key: str) -> str | None:
|
|
752
|
+
"""Resolve sigils within the legacy ``AUTO-UPGRADE`` namespace."""
|
|
753
|
+
|
|
754
|
+
normalized = key.replace("-", "_").upper()
|
|
755
|
+
if normalized == "NEXT_CHECK":
|
|
756
|
+
return _auto_upgrade_next_check()
|
|
757
|
+
return None
|
|
758
|
+
|
|
759
|
+
|
|
760
|
+
_SYSTEM_SIGIL_NAMESPACES: dict[str, Callable[[str], Optional[str]]] = {
|
|
761
|
+
"AUTO_UPGRADE": _resolve_auto_upgrade_namespace,
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
|
|
765
|
+
def resolve_system_namespace_value(key: str) -> str | None:
|
|
766
|
+
"""Resolve dot-notation sigils mapped to dynamic ``SYS`` namespaces."""
|
|
767
|
+
|
|
768
|
+
if not key:
|
|
769
|
+
return None
|
|
770
|
+
normalized_key = key.replace("-", "_").upper()
|
|
771
|
+
if normalized_key == "NEXT_VER_CHECK":
|
|
772
|
+
return _auto_upgrade_next_check()
|
|
773
|
+
namespace, _, remainder = key.partition(".")
|
|
774
|
+
if not remainder:
|
|
775
|
+
return None
|
|
776
|
+
normalized = namespace.replace("-", "_").upper()
|
|
777
|
+
handler = _SYSTEM_SIGIL_NAMESPACES.get(normalized)
|
|
778
|
+
if not handler:
|
|
779
|
+
return None
|
|
780
|
+
return handler(remainder)
|
|
781
|
+
|
|
782
|
+
|
|
783
|
+
def _database_configurations() -> list[dict[str, str]]:
|
|
784
|
+
"""Return a normalized list of configured database connections."""
|
|
785
|
+
|
|
786
|
+
databases: list[dict[str, str]] = []
|
|
787
|
+
for alias, config in settings.DATABASES.items():
|
|
788
|
+
engine = config.get("ENGINE", "")
|
|
789
|
+
name = config.get("NAME", "")
|
|
790
|
+
if engine is None:
|
|
791
|
+
engine = ""
|
|
792
|
+
if name is None:
|
|
793
|
+
name = ""
|
|
794
|
+
databases.append({
|
|
795
|
+
"alias": alias,
|
|
796
|
+
"engine": str(engine),
|
|
797
|
+
"name": str(name),
|
|
798
|
+
})
|
|
799
|
+
databases.sort(key=lambda entry: entry["alias"].lower())
|
|
800
|
+
return databases
|
|
801
|
+
|
|
802
|
+
|
|
803
|
+
def _build_system_fields(info: dict[str, object]) -> list[SystemField]:
|
|
804
|
+
"""Convert gathered system information into renderable rows."""
|
|
805
|
+
|
|
806
|
+
fields: list[SystemField] = []
|
|
807
|
+
|
|
808
|
+
def add_field(label: str, key: str, value: object, *, field_type: str = "text", visible: bool = True) -> None:
|
|
809
|
+
if not visible:
|
|
810
|
+
return
|
|
811
|
+
fields.append(SystemField(label=label, sigil_key=key, value=value, field_type=field_type))
|
|
812
|
+
|
|
813
|
+
add_field(_("Suite installed"), "INSTALLED", info.get("installed", False), field_type="boolean")
|
|
814
|
+
add_field(_("Revision"), "REVISION", info.get("revision", ""))
|
|
815
|
+
|
|
816
|
+
service_value = info.get("service") or _("not installed")
|
|
817
|
+
add_field(_("Service"), "SERVICE", service_value)
|
|
818
|
+
|
|
819
|
+
nginx_mode = info.get("mode", "")
|
|
820
|
+
port = info.get("port", "")
|
|
821
|
+
nginx_display = f"{nginx_mode} ({port})" if port else nginx_mode
|
|
822
|
+
add_field(_("Nginx mode"), "NGINX_MODE", nginx_display)
|
|
823
|
+
|
|
824
|
+
add_field(_("Node role"), "NODE_ROLE", info.get("role", ""))
|
|
825
|
+
add_field(
|
|
826
|
+
_("Display mode"),
|
|
827
|
+
"DISPLAY_MODE",
|
|
828
|
+
info.get("screen_mode", ""),
|
|
829
|
+
visible=bool(info.get("screen_mode")),
|
|
830
|
+
)
|
|
831
|
+
|
|
832
|
+
add_field(_("Features"), "FEATURES", info.get("features", []), field_type="features")
|
|
833
|
+
add_field(_("Running"), "RUNNING", info.get("running", False), field_type="boolean")
|
|
834
|
+
add_field(
|
|
835
|
+
_("Service status"),
|
|
836
|
+
"SERVICE_STATUS",
|
|
837
|
+
info.get("service_status", ""),
|
|
838
|
+
visible=bool(info.get("service")),
|
|
839
|
+
)
|
|
840
|
+
|
|
841
|
+
add_field(_("Hostname"), "HOSTNAME", info.get("hostname", ""))
|
|
842
|
+
|
|
843
|
+
ip_addresses: Iterable[str] = info.get("ip_addresses", []) # type: ignore[assignment]
|
|
844
|
+
add_field(_("IP addresses"), "IP_ADDRESSES", " ".join(ip_addresses))
|
|
845
|
+
|
|
846
|
+
add_field(
|
|
847
|
+
_("Databases"),
|
|
848
|
+
"DATABASES",
|
|
849
|
+
info.get("databases", []),
|
|
850
|
+
field_type="databases",
|
|
851
|
+
)
|
|
852
|
+
|
|
853
|
+
add_field(
|
|
854
|
+
_("Next version check"),
|
|
855
|
+
"NEXT-VER-CHECK",
|
|
856
|
+
info.get("auto_upgrade_next_check", ""),
|
|
857
|
+
)
|
|
858
|
+
|
|
859
|
+
return fields
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
def _export_field_value(field: SystemField) -> str:
|
|
863
|
+
"""Serialize a ``SystemField`` value for sigil resolution."""
|
|
864
|
+
|
|
865
|
+
if field.field_type in {"features", "databases"}:
|
|
866
|
+
return json.dumps(field.value)
|
|
867
|
+
if field.field_type == "boolean":
|
|
868
|
+
return "True" if field.value else "False"
|
|
869
|
+
if field.value is None:
|
|
870
|
+
return ""
|
|
871
|
+
return str(field.value)
|
|
872
|
+
|
|
873
|
+
|
|
874
|
+
def get_system_sigil_values() -> dict[str, str]:
|
|
875
|
+
"""Expose system information in a format suitable for sigil lookups."""
|
|
876
|
+
|
|
877
|
+
info = _gather_info()
|
|
878
|
+
values: dict[str, str] = {}
|
|
879
|
+
for field in _build_system_fields(info):
|
|
880
|
+
exported = _export_field_value(field)
|
|
881
|
+
raw_key = (field.sigil_key or "").strip()
|
|
882
|
+
if not raw_key:
|
|
883
|
+
continue
|
|
884
|
+
variants = {
|
|
885
|
+
raw_key.upper(),
|
|
886
|
+
raw_key.replace("-", "_").upper(),
|
|
887
|
+
}
|
|
888
|
+
for variant in variants:
|
|
889
|
+
values[variant] = exported
|
|
890
|
+
return values
|
|
891
|
+
|
|
892
|
+
|
|
893
|
+
def _parse_runserver_port(command_line: str) -> int | None:
|
|
894
|
+
"""Extract the HTTP port from a runserver command line."""
|
|
895
|
+
|
|
896
|
+
for pattern in (_RUNSERVER_PORT_PATTERN, _RUNSERVER_PORT_FLAG_PATTERN):
|
|
897
|
+
match = pattern.search(command_line)
|
|
898
|
+
if match:
|
|
899
|
+
try:
|
|
900
|
+
return int(match.group(1))
|
|
901
|
+
except ValueError:
|
|
902
|
+
continue
|
|
903
|
+
return None
|
|
904
|
+
|
|
905
|
+
|
|
906
|
+
def _detect_runserver_process() -> tuple[bool, int | None]:
|
|
907
|
+
"""Return whether the dev server is running and the port if available."""
|
|
908
|
+
|
|
909
|
+
try:
|
|
910
|
+
result = subprocess.run(
|
|
911
|
+
["pgrep", "-af", "manage.py runserver"],
|
|
912
|
+
capture_output=True,
|
|
913
|
+
text=True,
|
|
914
|
+
check=False,
|
|
915
|
+
)
|
|
916
|
+
except FileNotFoundError:
|
|
917
|
+
return False, None
|
|
918
|
+
except Exception:
|
|
919
|
+
return False, None
|
|
920
|
+
|
|
921
|
+
if result.returncode != 0:
|
|
922
|
+
return False, None
|
|
923
|
+
|
|
924
|
+
output = result.stdout.strip()
|
|
925
|
+
if not output:
|
|
926
|
+
return False, None
|
|
927
|
+
|
|
928
|
+
port = None
|
|
929
|
+
for line in output.splitlines():
|
|
930
|
+
port = _parse_runserver_port(line)
|
|
931
|
+
if port is not None:
|
|
932
|
+
break
|
|
933
|
+
|
|
934
|
+
if port is None:
|
|
935
|
+
port = 8000
|
|
936
|
+
|
|
937
|
+
return True, port
|
|
938
|
+
|
|
939
|
+
|
|
940
|
+
def _probe_ports(candidates: list[int]) -> tuple[bool, int | None]:
|
|
941
|
+
"""Attempt to connect to localhost on the provided ports."""
|
|
942
|
+
|
|
943
|
+
for port in candidates:
|
|
944
|
+
try:
|
|
945
|
+
with closing(socket.create_connection(("localhost", port), timeout=0.25)):
|
|
946
|
+
return True, port
|
|
947
|
+
except OSError:
|
|
948
|
+
continue
|
|
949
|
+
return False, None
|
|
950
|
+
|
|
951
|
+
|
|
952
|
+
def _port_candidates(default_port: int) -> list[int]:
|
|
953
|
+
"""Return a prioritized list of ports to probe for the HTTP service."""
|
|
954
|
+
|
|
955
|
+
candidates = [default_port]
|
|
956
|
+
for port in (8000, 8888):
|
|
957
|
+
if port not in candidates:
|
|
958
|
+
candidates.append(port)
|
|
959
|
+
return candidates
|
|
960
|
+
|
|
961
|
+
|
|
962
|
+
def _gather_info() -> dict:
|
|
963
|
+
"""Collect basic system information similar to status.sh."""
|
|
964
|
+
base_dir = Path(settings.BASE_DIR)
|
|
965
|
+
lock_dir = base_dir / "locks"
|
|
966
|
+
info: dict[str, object] = {}
|
|
967
|
+
|
|
968
|
+
info["installed"] = (base_dir / ".venv").exists()
|
|
969
|
+
info["revision"] = revision.get_revision()
|
|
970
|
+
|
|
971
|
+
service_file = lock_dir / "service.lck"
|
|
972
|
+
info["service"] = service_file.read_text().strip() if service_file.exists() else ""
|
|
973
|
+
|
|
974
|
+
mode_file = lock_dir / "nginx_mode.lck"
|
|
975
|
+
if mode_file.exists():
|
|
976
|
+
try:
|
|
977
|
+
raw_mode = mode_file.read_text().strip()
|
|
978
|
+
except OSError:
|
|
979
|
+
raw_mode = ""
|
|
980
|
+
else:
|
|
981
|
+
raw_mode = ""
|
|
982
|
+
mode = raw_mode.lower() or "internal"
|
|
983
|
+
info["mode"] = mode
|
|
984
|
+
default_port = 8000 if mode == "public" else 8888
|
|
985
|
+
detected_port: int | None = None
|
|
986
|
+
|
|
987
|
+
screen_file = lock_dir / "screen_mode.lck"
|
|
988
|
+
info["screen_mode"] = (
|
|
989
|
+
screen_file.read_text().strip() if screen_file.exists() else ""
|
|
990
|
+
)
|
|
991
|
+
|
|
992
|
+
# Use settings.NODE_ROLE as the single source of truth for the node role.
|
|
993
|
+
info["role"] = getattr(settings, "NODE_ROLE", "Terminal")
|
|
994
|
+
|
|
995
|
+
features: list[dict[str, object]] = []
|
|
996
|
+
try:
|
|
997
|
+
from nodes.models import Node, NodeFeature
|
|
998
|
+
except Exception:
|
|
999
|
+
info["features"] = features
|
|
1000
|
+
else:
|
|
1001
|
+
feature_map: dict[str, dict[str, object]] = {}
|
|
1002
|
+
|
|
1003
|
+
def _add_feature(feature: NodeFeature, flag: str) -> None:
|
|
1004
|
+
slug = getattr(feature, "slug", "") or ""
|
|
1005
|
+
if not slug:
|
|
1006
|
+
return
|
|
1007
|
+
display = (getattr(feature, "display", "") or "").strip()
|
|
1008
|
+
normalized = display or slug.replace("-", " ").title()
|
|
1009
|
+
entry = feature_map.setdefault(
|
|
1010
|
+
slug,
|
|
1011
|
+
{
|
|
1012
|
+
"slug": slug,
|
|
1013
|
+
"display": normalized,
|
|
1014
|
+
"expected": False,
|
|
1015
|
+
"actual": False,
|
|
1016
|
+
},
|
|
1017
|
+
)
|
|
1018
|
+
if display:
|
|
1019
|
+
entry["display"] = display
|
|
1020
|
+
entry[flag] = True
|
|
1021
|
+
|
|
1022
|
+
try:
|
|
1023
|
+
expected_features = (
|
|
1024
|
+
NodeFeature.objects.filter(roles__name=info["role"]).only("slug", "display").distinct()
|
|
1025
|
+
)
|
|
1026
|
+
except Exception:
|
|
1027
|
+
expected_features = []
|
|
1028
|
+
try:
|
|
1029
|
+
for feature in expected_features:
|
|
1030
|
+
_add_feature(feature, "expected")
|
|
1031
|
+
except Exception:
|
|
1032
|
+
pass
|
|
1033
|
+
|
|
1034
|
+
try:
|
|
1035
|
+
local_node = Node.get_local()
|
|
1036
|
+
except Exception:
|
|
1037
|
+
local_node = None
|
|
1038
|
+
|
|
1039
|
+
actual_features = []
|
|
1040
|
+
if local_node:
|
|
1041
|
+
try:
|
|
1042
|
+
actual_features = list(local_node.features.only("slug", "display"))
|
|
1043
|
+
except Exception:
|
|
1044
|
+
actual_features = []
|
|
1045
|
+
|
|
1046
|
+
try:
|
|
1047
|
+
for feature in actual_features:
|
|
1048
|
+
_add_feature(feature, "actual")
|
|
1049
|
+
except Exception:
|
|
1050
|
+
pass
|
|
1051
|
+
|
|
1052
|
+
features = sorted(
|
|
1053
|
+
feature_map.values(),
|
|
1054
|
+
key=lambda item: str(item.get("display", "")).lower(),
|
|
1055
|
+
)
|
|
1056
|
+
info["features"] = features
|
|
1057
|
+
|
|
1058
|
+
running = False
|
|
1059
|
+
service_status = ""
|
|
1060
|
+
service = info["service"]
|
|
1061
|
+
if service and shutil.which("systemctl"):
|
|
1062
|
+
try:
|
|
1063
|
+
result = subprocess.run(
|
|
1064
|
+
["systemctl", "is-active", str(service)],
|
|
1065
|
+
capture_output=True,
|
|
1066
|
+
text=True,
|
|
1067
|
+
check=False,
|
|
1068
|
+
)
|
|
1069
|
+
service_status = result.stdout.strip()
|
|
1070
|
+
running = service_status == "active"
|
|
1071
|
+
except Exception:
|
|
1072
|
+
pass
|
|
1073
|
+
else:
|
|
1074
|
+
process_running, process_port = _detect_runserver_process()
|
|
1075
|
+
if process_running:
|
|
1076
|
+
running = True
|
|
1077
|
+
detected_port = process_port
|
|
1078
|
+
|
|
1079
|
+
if not running or detected_port is None:
|
|
1080
|
+
probe_running, probe_port = _probe_ports(_port_candidates(default_port))
|
|
1081
|
+
if probe_running:
|
|
1082
|
+
running = True
|
|
1083
|
+
if detected_port is None:
|
|
1084
|
+
detected_port = probe_port
|
|
1085
|
+
|
|
1086
|
+
info["running"] = running
|
|
1087
|
+
info["port"] = detected_port if detected_port is not None else default_port
|
|
1088
|
+
info["service_status"] = service_status
|
|
1089
|
+
|
|
1090
|
+
try:
|
|
1091
|
+
hostname = socket.gethostname()
|
|
1092
|
+
ip_list = socket.gethostbyname_ex(hostname)[2]
|
|
1093
|
+
except Exception:
|
|
1094
|
+
hostname = ""
|
|
1095
|
+
ip_list = []
|
|
1096
|
+
info["hostname"] = hostname
|
|
1097
|
+
info["ip_addresses"] = ip_list
|
|
1098
|
+
|
|
1099
|
+
info["databases"] = _database_configurations()
|
|
1100
|
+
info["auto_upgrade_next_check"] = _auto_upgrade_next_check()
|
|
1101
|
+
|
|
1102
|
+
return info
|
|
1103
|
+
|
|
1104
|
+
|
|
1105
|
+
def _system_view(request):
|
|
1106
|
+
info = _gather_info()
|
|
1107
|
+
|
|
1108
|
+
context = admin.site.each_context(request)
|
|
1109
|
+
context.update(
|
|
1110
|
+
{
|
|
1111
|
+
"title": _("System"),
|
|
1112
|
+
"info": info,
|
|
1113
|
+
"system_fields": _build_system_fields(info),
|
|
1114
|
+
}
|
|
1115
|
+
)
|
|
1116
|
+
return TemplateResponse(request, "admin/system.html", context)
|
|
1117
|
+
|
|
1118
|
+
|
|
1119
|
+
def _system_changelog_report_view(request):
|
|
1120
|
+
if request.method == "POST":
|
|
1121
|
+
action = request.POST.get("action")
|
|
1122
|
+
if action == "exclude":
|
|
1123
|
+
selected_shas = request.POST.getlist("selected_shas")
|
|
1124
|
+
removed = _exclude_changelog_entries(selected_shas)
|
|
1125
|
+
if removed:
|
|
1126
|
+
messages.success(
|
|
1127
|
+
request,
|
|
1128
|
+
ngettext(
|
|
1129
|
+
"Excluded %(count)d changelog entry.",
|
|
1130
|
+
"Excluded %(count)d changelog entries.",
|
|
1131
|
+
removed,
|
|
1132
|
+
)
|
|
1133
|
+
% {"count": removed},
|
|
1134
|
+
)
|
|
1135
|
+
else:
|
|
1136
|
+
if selected_shas:
|
|
1137
|
+
messages.info(
|
|
1138
|
+
request,
|
|
1139
|
+
_(
|
|
1140
|
+
"The selected changelog entries were not found or have already been excluded."
|
|
1141
|
+
),
|
|
1142
|
+
)
|
|
1143
|
+
else:
|
|
1144
|
+
messages.info(
|
|
1145
|
+
request,
|
|
1146
|
+
_("Select at least one changelog entry to exclude."),
|
|
1147
|
+
)
|
|
1148
|
+
elif action == "commit":
|
|
1149
|
+
success, command_output, repo_status = _commit_changelog()
|
|
1150
|
+
details: list[str] = []
|
|
1151
|
+
if command_output:
|
|
1152
|
+
details.append(
|
|
1153
|
+
format_html(
|
|
1154
|
+
"<div class=\"changelog-git-output\"><strong>{}</strong><pre>{}</pre></div>",
|
|
1155
|
+
_("Command log"),
|
|
1156
|
+
command_output,
|
|
1157
|
+
)
|
|
1158
|
+
)
|
|
1159
|
+
if repo_status:
|
|
1160
|
+
details.append(
|
|
1161
|
+
format_html(
|
|
1162
|
+
"<div class=\"changelog-git-status\"><strong>{}</strong><pre>{}</pre></div>",
|
|
1163
|
+
_("Repository status"),
|
|
1164
|
+
repo_status,
|
|
1165
|
+
)
|
|
1166
|
+
)
|
|
1167
|
+
details_html = (
|
|
1168
|
+
format_html_join("", "{}", ((detail,) for detail in details))
|
|
1169
|
+
if details
|
|
1170
|
+
else ""
|
|
1171
|
+
)
|
|
1172
|
+
if success:
|
|
1173
|
+
base_message = _("Committed the changelog and pushed to the current branch.")
|
|
1174
|
+
messages.success(request, format_html("{}{}", base_message, details_html))
|
|
1175
|
+
else:
|
|
1176
|
+
base_message = _("Unable to commit the changelog.")
|
|
1177
|
+
messages.error(request, format_html("{}{}", base_message, details_html))
|
|
1178
|
+
else:
|
|
1179
|
+
try:
|
|
1180
|
+
_regenerate_changelog()
|
|
1181
|
+
except subprocess.CalledProcessError as exc:
|
|
1182
|
+
logger.exception("Changelog regeneration failed")
|
|
1183
|
+
messages.error(
|
|
1184
|
+
request,
|
|
1185
|
+
_("Unable to recalculate the changelog: %(error)s")
|
|
1186
|
+
% {"error": exc.stderr.strip() if exc.stderr else str(exc)},
|
|
1187
|
+
)
|
|
1188
|
+
except Exception as exc: # pragma: no cover - unexpected failure
|
|
1189
|
+
logger.exception("Unexpected error while regenerating changelog")
|
|
1190
|
+
messages.error(
|
|
1191
|
+
request,
|
|
1192
|
+
_("Unable to recalculate the changelog: %(error)s")
|
|
1193
|
+
% {"error": str(exc)},
|
|
1194
|
+
)
|
|
1195
|
+
else:
|
|
1196
|
+
messages.success(
|
|
1197
|
+
request,
|
|
1198
|
+
_("Successfully recalculated the changelog from recent commits."),
|
|
1199
|
+
)
|
|
1200
|
+
return HttpResponseRedirect(reverse("admin:system-changelog-report"))
|
|
1201
|
+
|
|
1202
|
+
context = admin.site.each_context(request)
|
|
1203
|
+
context.update(
|
|
1204
|
+
{
|
|
1205
|
+
"title": _("Changelog Report"),
|
|
1206
|
+
"open_changelog_entries": _open_changelog_entries(),
|
|
1207
|
+
"latest_release_changelog": _latest_release_changelog(),
|
|
1208
|
+
}
|
|
1209
|
+
)
|
|
1210
|
+
return TemplateResponse(request, "admin/system_changelog_report.html", context)
|
|
1211
|
+
|
|
1212
|
+
|
|
1213
|
+
def _system_upgrade_report_view(request):
|
|
1214
|
+
context = admin.site.each_context(request)
|
|
1215
|
+
context.update(
|
|
1216
|
+
{
|
|
1217
|
+
"title": _("Upgrade Report"),
|
|
1218
|
+
"auto_upgrade_report": _build_auto_upgrade_report(),
|
|
1219
|
+
}
|
|
1220
|
+
)
|
|
1221
|
+
return TemplateResponse(request, "admin/system_upgrade_report.html", context)
|
|
1222
|
+
|
|
1223
|
+
|
|
1224
|
+
class PendingTodoForm(forms.ModelForm):
|
|
1225
|
+
mark_done = forms.BooleanField(required=False, label=_("Approve"))
|
|
1226
|
+
|
|
1227
|
+
class Meta:
|
|
1228
|
+
model = Todo
|
|
1229
|
+
fields = [
|
|
1230
|
+
"request",
|
|
1231
|
+
"request_details",
|
|
1232
|
+
"url",
|
|
1233
|
+
"generated_for_version",
|
|
1234
|
+
"generated_for_revision",
|
|
1235
|
+
"on_done_condition",
|
|
1236
|
+
]
|
|
1237
|
+
widgets = {
|
|
1238
|
+
"request_details": forms.Textarea(attrs={"rows": 3}),
|
|
1239
|
+
"on_done_condition": forms.Textarea(attrs={"rows": 2}),
|
|
1240
|
+
}
|
|
1241
|
+
|
|
1242
|
+
def __init__(self, *args, **kwargs):
|
|
1243
|
+
super().__init__(*args, **kwargs)
|
|
1244
|
+
for name in [
|
|
1245
|
+
"request",
|
|
1246
|
+
"url",
|
|
1247
|
+
"generated_for_version",
|
|
1248
|
+
"generated_for_revision",
|
|
1249
|
+
]:
|
|
1250
|
+
self.fields[name].widget.attrs.setdefault("class", "vTextField")
|
|
1251
|
+
for name in ["request_details", "on_done_condition"]:
|
|
1252
|
+
self.fields[name].widget.attrs.setdefault("class", "vLargeTextField")
|
|
1253
|
+
|
|
1254
|
+
mark_done_widget = self.fields["mark_done"].widget
|
|
1255
|
+
existing_classes = mark_done_widget.attrs.get("class", "").split()
|
|
1256
|
+
if "approve-checkbox" not in existing_classes:
|
|
1257
|
+
existing_classes.append("approve-checkbox")
|
|
1258
|
+
mark_done_widget.attrs["class"] = " ".join(
|
|
1259
|
+
class_name for class_name in existing_classes if class_name
|
|
1260
|
+
)
|
|
1261
|
+
|
|
1262
|
+
|
|
1263
|
+
PendingTodoFormSet = modelformset_factory(Todo, form=PendingTodoForm, extra=0)
|
|
1264
|
+
|
|
1265
|
+
|
|
1266
|
+
def _system_pending_todos_report_view(request):
|
|
1267
|
+
queryset = (
|
|
1268
|
+
Todo.objects.filter(is_deleted=False, done_on__isnull=True)
|
|
1269
|
+
.order_by("request")
|
|
1270
|
+
)
|
|
1271
|
+
formset = PendingTodoFormSet(
|
|
1272
|
+
request.POST or None,
|
|
1273
|
+
queryset=queryset,
|
|
1274
|
+
prefix="todos",
|
|
1275
|
+
)
|
|
1276
|
+
|
|
1277
|
+
if request.method == "POST":
|
|
1278
|
+
if formset.is_valid():
|
|
1279
|
+
approved_count = 0
|
|
1280
|
+
edited_count = 0
|
|
1281
|
+
for form in formset.forms:
|
|
1282
|
+
mark_done = form.cleaned_data.get("mark_done")
|
|
1283
|
+
todo = form.save(commit=False)
|
|
1284
|
+
has_changes = form.has_changed()
|
|
1285
|
+
if mark_done and todo.done_on is None:
|
|
1286
|
+
todo.done_on = timezone.now()
|
|
1287
|
+
todo.populate_done_metadata(request.user)
|
|
1288
|
+
approved_count += 1
|
|
1289
|
+
has_changes = True
|
|
1290
|
+
if has_changes:
|
|
1291
|
+
todo.save()
|
|
1292
|
+
if form.has_changed():
|
|
1293
|
+
edited_count += 1
|
|
1294
|
+
if has_changes and form.has_changed():
|
|
1295
|
+
form.save_m2m()
|
|
1296
|
+
|
|
1297
|
+
if approved_count or edited_count:
|
|
1298
|
+
message_parts: list[str] = []
|
|
1299
|
+
if edited_count:
|
|
1300
|
+
message_parts.append(
|
|
1301
|
+
ngettext(
|
|
1302
|
+
"%(count)d TODO updated.",
|
|
1303
|
+
"%(count)d TODOs updated.",
|
|
1304
|
+
edited_count,
|
|
1305
|
+
)
|
|
1306
|
+
% {"count": edited_count}
|
|
1307
|
+
)
|
|
1308
|
+
if approved_count:
|
|
1309
|
+
message_parts.append(
|
|
1310
|
+
ngettext(
|
|
1311
|
+
"%(count)d TODO approved.",
|
|
1312
|
+
"%(count)d TODOs approved.",
|
|
1313
|
+
approved_count,
|
|
1314
|
+
)
|
|
1315
|
+
% {"count": approved_count}
|
|
1316
|
+
)
|
|
1317
|
+
messages.success(request, " ".join(message_parts))
|
|
1318
|
+
else:
|
|
1319
|
+
messages.info(
|
|
1320
|
+
request,
|
|
1321
|
+
_("No changes were applied to the pending TODOs."),
|
|
1322
|
+
)
|
|
1323
|
+
return HttpResponseRedirect(reverse("admin:system-pending-todos-report"))
|
|
1324
|
+
else:
|
|
1325
|
+
messages.error(request, _("Please correct the errors below."))
|
|
1326
|
+
|
|
1327
|
+
rows = [
|
|
1328
|
+
{
|
|
1329
|
+
"form": form,
|
|
1330
|
+
"todo": form.instance,
|
|
1331
|
+
}
|
|
1332
|
+
for form in formset.forms
|
|
1333
|
+
]
|
|
1334
|
+
|
|
1335
|
+
context = admin.site.each_context(request)
|
|
1336
|
+
context.update(
|
|
1337
|
+
{
|
|
1338
|
+
"title": _("Pending TODOs Report"),
|
|
1339
|
+
"formset": formset,
|
|
1340
|
+
"rows": rows,
|
|
1341
|
+
}
|
|
1342
|
+
)
|
|
1343
|
+
return TemplateResponse(
|
|
1344
|
+
request,
|
|
1345
|
+
"admin/system_pending_todos_report.html",
|
|
1346
|
+
context,
|
|
1347
|
+
)
|
|
1348
|
+
|
|
1349
|
+
|
|
1350
|
+
def _trigger_upgrade_check() -> bool:
|
|
1351
|
+
"""Return ``True`` when the upgrade check was queued asynchronously."""
|
|
1352
|
+
|
|
1353
|
+
try:
|
|
1354
|
+
check_github_updates.delay()
|
|
1355
|
+
except Exception:
|
|
1356
|
+
logger.exception("Failed to enqueue upgrade check; running synchronously instead")
|
|
1357
|
+
check_github_updates()
|
|
1358
|
+
return False
|
|
1359
|
+
return True
|
|
1360
|
+
|
|
1361
|
+
|
|
1362
|
+
def _system_trigger_upgrade_check_view(request):
|
|
1363
|
+
if request.method != "POST":
|
|
1364
|
+
return HttpResponseRedirect(reverse("admin:system-upgrade-report"))
|
|
1365
|
+
|
|
1366
|
+
try:
|
|
1367
|
+
queued = _trigger_upgrade_check()
|
|
1368
|
+
except Exception as exc: # pragma: no cover - unexpected failure
|
|
1369
|
+
logger.exception("Unable to trigger upgrade check")
|
|
1370
|
+
messages.error(
|
|
1371
|
+
request,
|
|
1372
|
+
_("Unable to trigger an upgrade check: %(error)s")
|
|
1373
|
+
% {"error": str(exc)},
|
|
1374
|
+
)
|
|
1375
|
+
else:
|
|
1376
|
+
if queued:
|
|
1377
|
+
messages.success(
|
|
1378
|
+
request,
|
|
1379
|
+
_("Upgrade check requested. The task will run shortly."),
|
|
1380
|
+
)
|
|
1381
|
+
else:
|
|
1382
|
+
messages.success(
|
|
1383
|
+
request,
|
|
1384
|
+
_(
|
|
1385
|
+
"Upgrade check started locally. Review the auto-upgrade log for"
|
|
1386
|
+
" progress."
|
|
1387
|
+
),
|
|
1388
|
+
)
|
|
1389
|
+
|
|
1390
|
+
return HttpResponseRedirect(reverse("admin:system-upgrade-report"))
|
|
1391
|
+
|
|
1392
|
+
|
|
1393
|
+
def patch_admin_system_view() -> None:
|
|
1394
|
+
"""Add custom admin view for system information."""
|
|
1395
|
+
original_get_urls = admin.site.get_urls
|
|
1396
|
+
|
|
1397
|
+
def get_urls():
|
|
1398
|
+
urls = original_get_urls()
|
|
1399
|
+
custom = [
|
|
1400
|
+
path("system/", admin.site.admin_view(_system_view), name="system"),
|
|
1401
|
+
path(
|
|
1402
|
+
"system/changelog-report/",
|
|
1403
|
+
admin.site.admin_view(_system_changelog_report_view),
|
|
1404
|
+
name="system-changelog-report",
|
|
1405
|
+
),
|
|
1406
|
+
path(
|
|
1407
|
+
"system/pending-todos-report/",
|
|
1408
|
+
admin.site.admin_view(_system_pending_todos_report_view),
|
|
1409
|
+
name="system-pending-todos-report",
|
|
1410
|
+
),
|
|
1411
|
+
path(
|
|
1412
|
+
"system/upgrade-report/",
|
|
1413
|
+
admin.site.admin_view(_system_upgrade_report_view),
|
|
1414
|
+
name="system-upgrade-report",
|
|
1415
|
+
),
|
|
1416
|
+
path(
|
|
1417
|
+
"system/upgrade-report/run-check/",
|
|
1418
|
+
admin.site.admin_view(_system_trigger_upgrade_check_view),
|
|
1419
|
+
name="system-upgrade-run-check",
|
|
1420
|
+
),
|
|
1421
|
+
]
|
|
1422
|
+
return custom + urls
|
|
1423
|
+
|
|
1424
|
+
admin.site.get_urls = get_urls
|
|
1425
|
+
|