arthexis 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.9.dist-info → arthexis-0.1.11.dist-info}/METADATA +76 -23
- arthexis-0.1.11.dist-info/RECORD +99 -0
- config/context_processors.py +1 -0
- config/settings.py +245 -26
- config/urls.py +11 -4
- core/admin.py +585 -57
- core/apps.py +29 -1
- core/auto_upgrade.py +57 -0
- core/backends.py +115 -3
- core/environment.py +23 -5
- core/fields.py +93 -0
- core/mailer.py +3 -1
- core/models.py +482 -38
- core/reference_utils.py +108 -0
- core/sigil_builder.py +23 -5
- core/sigil_resolver.py +35 -4
- core/system.py +400 -140
- core/tasks.py +151 -8
- core/temp_passwords.py +181 -0
- core/test_system_info.py +97 -1
- core/tests.py +393 -15
- core/user_data.py +154 -16
- core/views.py +499 -20
- nodes/admin.py +149 -6
- nodes/backends.py +125 -18
- nodes/dns.py +203 -0
- nodes/models.py +498 -9
- nodes/tests.py +682 -3
- nodes/views.py +154 -7
- ocpp/admin.py +63 -3
- ocpp/consumers.py +255 -41
- ocpp/evcs.py +6 -3
- ocpp/models.py +52 -7
- ocpp/reference_utils.py +42 -0
- ocpp/simulator.py +62 -5
- ocpp/store.py +30 -0
- ocpp/test_rfid.py +169 -7
- ocpp/tests.py +414 -8
- ocpp/views.py +109 -76
- pages/admin.py +9 -1
- pages/context_processors.py +24 -4
- pages/defaults.py +14 -0
- pages/forms.py +131 -0
- pages/models.py +53 -14
- pages/tests.py +450 -14
- pages/urls.py +4 -0
- pages/views.py +419 -110
- arthexis-0.1.9.dist-info/RECORD +0 -92
- {arthexis-0.1.9.dist-info → arthexis-0.1.11.dist-info}/WHEEL +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.11.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.11.dist-info}/top_level.txt +0 -0
core/views.py
CHANGED
|
@@ -3,19 +3,26 @@ import shutil
|
|
|
3
3
|
from datetime import timedelta
|
|
4
4
|
|
|
5
5
|
import requests
|
|
6
|
+
from django.conf import settings
|
|
6
7
|
from django.contrib.admin.views.decorators import staff_member_required
|
|
7
8
|
from django.contrib.auth import authenticate, login
|
|
9
|
+
from django.contrib import messages
|
|
10
|
+
from django.contrib.sites.models import Site
|
|
8
11
|
from django.http import Http404, JsonResponse
|
|
9
|
-
from django.shortcuts import get_object_or_404, render,
|
|
10
|
-
from django.views.decorators.csrf import csrf_exempt
|
|
11
|
-
from django.views.decorators.http import require_POST
|
|
12
|
-
from django.utils.translation import gettext as _
|
|
12
|
+
from django.shortcuts import get_object_or_404, redirect, render, resolve_url
|
|
13
13
|
from django.utils import timezone
|
|
14
|
+
from django.utils.text import slugify
|
|
15
|
+
from django.utils.translation import gettext as _
|
|
14
16
|
from django.urls import NoReverseMatch, reverse
|
|
17
|
+
from django.views.decorators.csrf import csrf_exempt
|
|
18
|
+
from django.views.decorators.http import require_GET, require_POST
|
|
19
|
+
from django.utils.http import url_has_allowed_host_and_scheme
|
|
15
20
|
from pathlib import Path
|
|
21
|
+
from urllib.parse import urlsplit, urlunsplit
|
|
22
|
+
import errno
|
|
16
23
|
import subprocess
|
|
17
|
-
import json
|
|
18
24
|
|
|
25
|
+
from utils import revision
|
|
19
26
|
from utils.api import api_login_required
|
|
20
27
|
|
|
21
28
|
from .models import Product, EnergyAccount, PackageRelease, Todo
|
|
@@ -42,6 +49,22 @@ def odoo_products(request):
|
|
|
42
49
|
return JsonResponse(items, safe=False)
|
|
43
50
|
|
|
44
51
|
|
|
52
|
+
@require_GET
|
|
53
|
+
def version_info(request):
|
|
54
|
+
"""Return the running application version and Git revision."""
|
|
55
|
+
|
|
56
|
+
version = ""
|
|
57
|
+
version_path = Path(settings.BASE_DIR) / "VERSION"
|
|
58
|
+
if version_path.exists():
|
|
59
|
+
version = version_path.read_text(encoding="utf-8").strip()
|
|
60
|
+
return JsonResponse(
|
|
61
|
+
{
|
|
62
|
+
"version": version,
|
|
63
|
+
"revision": revision.get_revision(),
|
|
64
|
+
}
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
|
|
45
68
|
from . import release as release_utils
|
|
46
69
|
|
|
47
70
|
|
|
@@ -60,6 +83,188 @@ def _clean_repo() -> None:
|
|
|
60
83
|
subprocess.run(["git", "clean", "-fd"], check=False)
|
|
61
84
|
|
|
62
85
|
|
|
86
|
+
def _format_path(path: Path) -> str:
|
|
87
|
+
try:
|
|
88
|
+
return str(path.resolve().relative_to(Path.cwd()))
|
|
89
|
+
except ValueError:
|
|
90
|
+
return str(path)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _next_patch_version(version: str) -> str:
|
|
94
|
+
from packaging.version import InvalidVersion, Version
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
parsed = Version(version)
|
|
98
|
+
except InvalidVersion:
|
|
99
|
+
parts = version.split(".")
|
|
100
|
+
for index in range(len(parts) - 1, -1, -1):
|
|
101
|
+
segment = parts[index]
|
|
102
|
+
if segment.isdigit():
|
|
103
|
+
parts[index] = str(int(segment) + 1)
|
|
104
|
+
return ".".join(parts)
|
|
105
|
+
return version
|
|
106
|
+
return f"{parsed.major}.{parsed.minor}.{parsed.micro + 1}"
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _write_todo_fixture(todo: Todo) -> Path:
|
|
110
|
+
safe_request = todo.request.replace(".", " ")
|
|
111
|
+
slug = slugify(safe_request).replace("-", "_")
|
|
112
|
+
if not slug:
|
|
113
|
+
slug = "todo"
|
|
114
|
+
path = TODO_FIXTURE_DIR / f"todos__{slug}.json"
|
|
115
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
116
|
+
data = [
|
|
117
|
+
{
|
|
118
|
+
"model": "core.todo",
|
|
119
|
+
"fields": {
|
|
120
|
+
"request": todo.request,
|
|
121
|
+
"url": todo.url,
|
|
122
|
+
"request_details": todo.request_details,
|
|
123
|
+
},
|
|
124
|
+
}
|
|
125
|
+
]
|
|
126
|
+
path.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
|
|
127
|
+
return path
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _should_use_python_changelog(exc: OSError) -> bool:
|
|
131
|
+
winerror = getattr(exc, "winerror", None)
|
|
132
|
+
if winerror in {193}:
|
|
133
|
+
return True
|
|
134
|
+
return exc.errno in {errno.ENOEXEC, errno.EACCES, errno.ENOENT}
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _generate_changelog_with_python(log_path: Path) -> None:
|
|
138
|
+
_append_log(log_path, "Falling back to Python changelog generator")
|
|
139
|
+
describe = subprocess.run(
|
|
140
|
+
["git", "describe", "--tags", "--abbrev=0"],
|
|
141
|
+
capture_output=True,
|
|
142
|
+
text=True,
|
|
143
|
+
check=False,
|
|
144
|
+
)
|
|
145
|
+
start_tag = describe.stdout.strip() if describe.returncode == 0 else ""
|
|
146
|
+
range_spec = f"{start_tag}..HEAD" if start_tag else "HEAD"
|
|
147
|
+
log_proc = subprocess.run(
|
|
148
|
+
["git", "log", range_spec, "--no-merges", "--pretty=format:- %h %s"],
|
|
149
|
+
capture_output=True,
|
|
150
|
+
text=True,
|
|
151
|
+
check=True,
|
|
152
|
+
)
|
|
153
|
+
entries = [line for line in log_proc.stdout.splitlines() if line]
|
|
154
|
+
changelog_path = Path("CHANGELOG.rst")
|
|
155
|
+
previous_lines: list[str] = []
|
|
156
|
+
if changelog_path.exists():
|
|
157
|
+
previous_lines = changelog_path.read_text(encoding="utf-8").splitlines()
|
|
158
|
+
if len(previous_lines) > 6:
|
|
159
|
+
previous_lines = previous_lines[6:]
|
|
160
|
+
else:
|
|
161
|
+
previous_lines = []
|
|
162
|
+
lines = [
|
|
163
|
+
"Changelog",
|
|
164
|
+
"=========",
|
|
165
|
+
"",
|
|
166
|
+
"Unreleased",
|
|
167
|
+
"----------",
|
|
168
|
+
"",
|
|
169
|
+
]
|
|
170
|
+
if entries:
|
|
171
|
+
lines.extend(entries)
|
|
172
|
+
if previous_lines:
|
|
173
|
+
lines.append("")
|
|
174
|
+
lines.extend(previous_lines)
|
|
175
|
+
content = "\n".join(lines)
|
|
176
|
+
if not content.endswith("\n"):
|
|
177
|
+
content += "\n"
|
|
178
|
+
changelog_path.write_text(content, encoding="utf-8")
|
|
179
|
+
_append_log(log_path, "Regenerated CHANGELOG.rst using Python fallback")
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def _ensure_release_todo(release) -> tuple[Todo, Path]:
|
|
183
|
+
target_version = _next_patch_version(release.version)
|
|
184
|
+
request = f"Create release {release.package.name} {target_version}"
|
|
185
|
+
try:
|
|
186
|
+
url = reverse("admin:core_packagerelease_changelist")
|
|
187
|
+
except NoReverseMatch:
|
|
188
|
+
url = ""
|
|
189
|
+
todo, _ = Todo.all_objects.update_or_create(
|
|
190
|
+
request__iexact=request,
|
|
191
|
+
defaults={
|
|
192
|
+
"request": request,
|
|
193
|
+
"url": url,
|
|
194
|
+
"request_details": "",
|
|
195
|
+
"is_seed_data": True,
|
|
196
|
+
"is_deleted": False,
|
|
197
|
+
"is_user_data": False,
|
|
198
|
+
"done_on": None,
|
|
199
|
+
"on_done_condition": "",
|
|
200
|
+
},
|
|
201
|
+
)
|
|
202
|
+
fixture_path = _write_todo_fixture(todo)
|
|
203
|
+
return todo, fixture_path
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
|
|
207
|
+
"""Ensure ``release`` matches the repository revision and version.
|
|
208
|
+
|
|
209
|
+
Returns a tuple ``(updated, previous_version)`` where ``updated`` is
|
|
210
|
+
``True`` when any field changed and ``previous_version`` is the version
|
|
211
|
+
before synchronization.
|
|
212
|
+
"""
|
|
213
|
+
|
|
214
|
+
from packaging.version import InvalidVersion, Version
|
|
215
|
+
|
|
216
|
+
previous_version = release.version
|
|
217
|
+
updated_fields: set[str] = set()
|
|
218
|
+
|
|
219
|
+
repo_version: Version | None = None
|
|
220
|
+
version_path = Path("VERSION")
|
|
221
|
+
if version_path.exists():
|
|
222
|
+
try:
|
|
223
|
+
repo_version = Version(version_path.read_text(encoding="utf-8").strip())
|
|
224
|
+
except InvalidVersion:
|
|
225
|
+
repo_version = None
|
|
226
|
+
|
|
227
|
+
try:
|
|
228
|
+
release_version = Version(release.version)
|
|
229
|
+
except InvalidVersion:
|
|
230
|
+
release_version = None
|
|
231
|
+
|
|
232
|
+
if repo_version is not None:
|
|
233
|
+
bumped_repo_version = Version(
|
|
234
|
+
f"{repo_version.major}.{repo_version.minor}.{repo_version.micro + 1}"
|
|
235
|
+
)
|
|
236
|
+
if release_version is None or release_version < bumped_repo_version:
|
|
237
|
+
release.version = str(bumped_repo_version)
|
|
238
|
+
release_version = bumped_repo_version
|
|
239
|
+
updated_fields.add("version")
|
|
240
|
+
|
|
241
|
+
current_revision = revision.get_revision()
|
|
242
|
+
if current_revision and current_revision != release.revision:
|
|
243
|
+
release.revision = current_revision
|
|
244
|
+
updated_fields.add("revision")
|
|
245
|
+
|
|
246
|
+
if updated_fields:
|
|
247
|
+
release.save(update_fields=list(updated_fields))
|
|
248
|
+
PackageRelease.dump_fixture()
|
|
249
|
+
|
|
250
|
+
package_updated = False
|
|
251
|
+
if release.package_id and not release.package.is_active:
|
|
252
|
+
release.package.is_active = True
|
|
253
|
+
release.package.save(update_fields=["is_active"])
|
|
254
|
+
package_updated = True
|
|
255
|
+
|
|
256
|
+
version_updated = False
|
|
257
|
+
if release.version:
|
|
258
|
+
current = ""
|
|
259
|
+
if version_path.exists():
|
|
260
|
+
current = version_path.read_text(encoding="utf-8").strip()
|
|
261
|
+
if current != release.version:
|
|
262
|
+
version_path.write_text(f"{release.version}\n", encoding="utf-8")
|
|
263
|
+
version_updated = True
|
|
264
|
+
|
|
265
|
+
return bool(updated_fields or version_updated or package_updated), previous_version
|
|
266
|
+
|
|
267
|
+
|
|
63
268
|
def _changelog_notes(version: str) -> str:
|
|
64
269
|
path = Path("CHANGELOG.rst")
|
|
65
270
|
if not path.exists():
|
|
@@ -85,6 +290,46 @@ class ApprovalRequired(Exception):
|
|
|
85
290
|
"""Raised when release manager approval is required before continuing."""
|
|
86
291
|
|
|
87
292
|
|
|
293
|
+
def _format_condition_failure(todo: Todo, result) -> str:
|
|
294
|
+
"""Return a localized error message for a failed TODO condition."""
|
|
295
|
+
|
|
296
|
+
if result.error and result.resolved:
|
|
297
|
+
detail = _("%(condition)s (error: %(error)s)") % {
|
|
298
|
+
"condition": result.resolved,
|
|
299
|
+
"error": result.error,
|
|
300
|
+
}
|
|
301
|
+
elif result.error:
|
|
302
|
+
detail = _("Error: %(error)s") % {"error": result.error}
|
|
303
|
+
elif result.resolved:
|
|
304
|
+
detail = result.resolved
|
|
305
|
+
else:
|
|
306
|
+
detail = _("Condition evaluated to False")
|
|
307
|
+
return _("Condition failed for %(todo)s: %(detail)s") % {
|
|
308
|
+
"todo": todo.request,
|
|
309
|
+
"detail": detail,
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def _get_return_url(request) -> str:
|
|
314
|
+
"""Return a safe URL to redirect back to after completing a TODO."""
|
|
315
|
+
|
|
316
|
+
candidates = [request.GET.get("next"), request.POST.get("next")]
|
|
317
|
+
referer = request.META.get("HTTP_REFERER")
|
|
318
|
+
if referer:
|
|
319
|
+
candidates.append(referer)
|
|
320
|
+
|
|
321
|
+
for candidate in candidates:
|
|
322
|
+
if not candidate:
|
|
323
|
+
continue
|
|
324
|
+
if url_has_allowed_host_and_scheme(
|
|
325
|
+
candidate,
|
|
326
|
+
allowed_hosts={request.get_host()},
|
|
327
|
+
require_https=request.is_secure(),
|
|
328
|
+
):
|
|
329
|
+
return candidate
|
|
330
|
+
return resolve_url("admin:index")
|
|
331
|
+
|
|
332
|
+
|
|
88
333
|
def _step_check_todos(release, ctx, log_path: Path) -> None:
|
|
89
334
|
pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
|
|
90
335
|
if pending_qs.exists():
|
|
@@ -107,7 +352,7 @@ def _step_check_todos(release, ctx, log_path: Path) -> None:
|
|
|
107
352
|
check=False,
|
|
108
353
|
)
|
|
109
354
|
ctx.pop("todos", None)
|
|
110
|
-
ctx
|
|
355
|
+
ctx["todos_ack"] = True
|
|
111
356
|
|
|
112
357
|
|
|
113
358
|
def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
@@ -158,6 +403,7 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
|
158
403
|
)
|
|
159
404
|
subprocess.run(["git", "add", *fixture_files], check=True)
|
|
160
405
|
subprocess.run(["git", "commit", "-m", "chore: update fixtures"], check=True)
|
|
406
|
+
_append_log(log_path, "Fixture changes committed")
|
|
161
407
|
|
|
162
408
|
version_path = Path("VERSION")
|
|
163
409
|
if version_path.exists():
|
|
@@ -178,25 +424,59 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
|
178
424
|
if "already on PyPI" in str(exc):
|
|
179
425
|
raise
|
|
180
426
|
_append_log(log_path, f"PyPI check failed: {exc}")
|
|
427
|
+
else:
|
|
428
|
+
_append_log(
|
|
429
|
+
log_path,
|
|
430
|
+
f"Version {release.version} not published on PyPI",
|
|
431
|
+
)
|
|
181
432
|
else:
|
|
182
433
|
_append_log(log_path, "Network unavailable, skipping PyPI check")
|
|
183
434
|
|
|
184
435
|
|
|
185
436
|
def _step_handle_migrations(release, ctx, log_path: Path) -> None:
|
|
186
437
|
_append_log(log_path, "Freeze, squash and approve migrations")
|
|
438
|
+
_append_log(log_path, "Migration review acknowledged (manual step)")
|
|
187
439
|
|
|
188
440
|
|
|
189
441
|
def _step_changelog_docs(release, ctx, log_path: Path) -> None:
|
|
190
442
|
_append_log(log_path, "Compose CHANGELOG and documentation")
|
|
443
|
+
_append_log(log_path, "CHANGELOG and documentation review recorded")
|
|
191
444
|
|
|
192
445
|
|
|
193
446
|
def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
|
|
194
447
|
_append_log(log_path, "Execute pre-release actions")
|
|
448
|
+
try:
|
|
449
|
+
subprocess.run(["scripts/generate-changelog.sh"], check=True)
|
|
450
|
+
except OSError as exc:
|
|
451
|
+
if _should_use_python_changelog(exc):
|
|
452
|
+
_append_log(
|
|
453
|
+
log_path,
|
|
454
|
+
f"scripts/generate-changelog.sh failed: {exc}",
|
|
455
|
+
)
|
|
456
|
+
_generate_changelog_with_python(log_path)
|
|
457
|
+
else: # pragma: no cover - unexpected OSError
|
|
458
|
+
raise
|
|
459
|
+
else:
|
|
460
|
+
_append_log(
|
|
461
|
+
log_path, "Regenerated CHANGELOG.rst using scripts/generate-changelog.sh"
|
|
462
|
+
)
|
|
463
|
+
subprocess.run(["git", "add", "CHANGELOG.rst"], check=True)
|
|
464
|
+
_append_log(log_path, "Staged CHANGELOG.rst for commit")
|
|
195
465
|
version_path = Path("VERSION")
|
|
196
466
|
version_path.write_text(f"{release.version}\n", encoding="utf-8")
|
|
467
|
+
_append_log(log_path, f"Updated VERSION file to {release.version}")
|
|
197
468
|
subprocess.run(["git", "add", "VERSION"], check=True)
|
|
469
|
+
_append_log(log_path, "Staged VERSION for commit")
|
|
198
470
|
diff = subprocess.run(
|
|
199
|
-
[
|
|
471
|
+
[
|
|
472
|
+
"git",
|
|
473
|
+
"diff",
|
|
474
|
+
"--cached",
|
|
475
|
+
"--quiet",
|
|
476
|
+
"--",
|
|
477
|
+
"CHANGELOG.rst",
|
|
478
|
+
"VERSION",
|
|
479
|
+
],
|
|
200
480
|
check=False,
|
|
201
481
|
)
|
|
202
482
|
if diff.returncode != 0:
|
|
@@ -204,13 +484,40 @@ def _step_pre_release_actions(release, ctx, log_path: Path) -> None:
|
|
|
204
484
|
["git", "commit", "-m", f"pre-release commit {release.version}"],
|
|
205
485
|
check=True,
|
|
206
486
|
)
|
|
487
|
+
_append_log(log_path, f"Committed VERSION update for {release.version}")
|
|
207
488
|
else:
|
|
208
|
-
_append_log(
|
|
489
|
+
_append_log(
|
|
490
|
+
log_path, "No changes detected for VERSION or CHANGELOG; skipping commit"
|
|
491
|
+
)
|
|
492
|
+
subprocess.run(["git", "reset", "HEAD", "CHANGELOG.rst"], check=False)
|
|
493
|
+
_append_log(log_path, "Unstaged CHANGELOG.rst")
|
|
209
494
|
subprocess.run(["git", "reset", "HEAD", "VERSION"], check=False)
|
|
495
|
+
_append_log(log_path, "Unstaged VERSION file")
|
|
496
|
+
todo, fixture_path = _ensure_release_todo(release)
|
|
497
|
+
fixture_display = _format_path(fixture_path)
|
|
498
|
+
_append_log(log_path, f"Added TODO: {todo.request}")
|
|
499
|
+
_append_log(log_path, f"Wrote TODO fixture {fixture_display}")
|
|
500
|
+
subprocess.run(["git", "add", str(fixture_path)], check=True)
|
|
501
|
+
_append_log(log_path, f"Staged TODO fixture {fixture_display}")
|
|
502
|
+
fixture_diff = subprocess.run(
|
|
503
|
+
["git", "diff", "--cached", "--quiet", "--", str(fixture_path)],
|
|
504
|
+
check=False,
|
|
505
|
+
)
|
|
506
|
+
if fixture_diff.returncode != 0:
|
|
507
|
+
commit_message = f"chore: add release TODO for {release.package.name}"
|
|
508
|
+
subprocess.run(["git", "commit", "-m", commit_message], check=True)
|
|
509
|
+
_append_log(log_path, f"Committed TODO fixture {fixture_display}")
|
|
510
|
+
else:
|
|
511
|
+
_append_log(
|
|
512
|
+
log_path,
|
|
513
|
+
f"No changes detected for TODO fixture {fixture_display}; skipping commit",
|
|
514
|
+
)
|
|
515
|
+
_append_log(log_path, "Pre-release actions complete")
|
|
210
516
|
|
|
211
517
|
|
|
212
518
|
def _step_run_tests(release, ctx, log_path: Path) -> None:
|
|
213
519
|
_append_log(log_path, "Complete test suite with --all flag")
|
|
520
|
+
_append_log(log_path, "Test suite completion acknowledged")
|
|
214
521
|
|
|
215
522
|
|
|
216
523
|
def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
@@ -220,15 +527,22 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
|
220
527
|
try:
|
|
221
528
|
try:
|
|
222
529
|
subprocess.run(["git", "fetch", "origin", "main"], check=True)
|
|
530
|
+
_append_log(log_path, "Fetched latest changes from origin/main")
|
|
223
531
|
subprocess.run(["git", "rebase", "origin/main"], check=True)
|
|
532
|
+
_append_log(log_path, "Rebased current branch onto origin/main")
|
|
224
533
|
except subprocess.CalledProcessError as exc:
|
|
225
534
|
subprocess.run(["git", "rebase", "--abort"], check=False)
|
|
535
|
+
_append_log(log_path, "Rebase onto origin/main failed; aborted rebase")
|
|
226
536
|
raise Exception("Rebase onto main failed") from exc
|
|
227
537
|
release_utils.promote(
|
|
228
538
|
package=release.to_package(),
|
|
229
539
|
version=release.version,
|
|
230
540
|
creds=release.to_credentials(),
|
|
231
541
|
)
|
|
542
|
+
_append_log(
|
|
543
|
+
log_path,
|
|
544
|
+
f"Generated release artifacts for v{release.version}",
|
|
545
|
+
)
|
|
232
546
|
from glob import glob
|
|
233
547
|
|
|
234
548
|
paths = ["VERSION", *glob("core/fixtures/releases__*.json")]
|
|
@@ -239,6 +553,7 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
|
239
553
|
)
|
|
240
554
|
if diff.stdout.strip():
|
|
241
555
|
subprocess.run(["git", "add", *paths], check=True)
|
|
556
|
+
_append_log(log_path, "Staged release metadata updates")
|
|
242
557
|
subprocess.run(
|
|
243
558
|
[
|
|
244
559
|
"git",
|
|
@@ -248,8 +563,14 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
|
248
563
|
],
|
|
249
564
|
check=True,
|
|
250
565
|
)
|
|
566
|
+
_append_log(
|
|
567
|
+
log_path,
|
|
568
|
+
f"Committed release metadata for v{release.version}",
|
|
569
|
+
)
|
|
251
570
|
subprocess.run(["git", "push"], check=True)
|
|
571
|
+
_append_log(log_path, "Pushed release changes to origin")
|
|
252
572
|
PackageRelease.dump_fixture()
|
|
573
|
+
_append_log(log_path, "Updated release fixtures")
|
|
253
574
|
except Exception:
|
|
254
575
|
_clean_repo()
|
|
255
576
|
raise
|
|
@@ -307,15 +628,20 @@ def _step_publish(release, ctx, log_path: Path) -> None:
|
|
|
307
628
|
release.pypi_url = (
|
|
308
629
|
f"https://pypi.org/project/{release.package.name}/{release.version}/"
|
|
309
630
|
)
|
|
310
|
-
release.
|
|
631
|
+
release.release_on = timezone.now()
|
|
632
|
+
release.save(update_fields=["pypi_url", "release_on"])
|
|
311
633
|
PackageRelease.dump_fixture()
|
|
634
|
+
_append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
|
|
312
635
|
_append_log(log_path, "Upload complete")
|
|
313
636
|
|
|
314
637
|
|
|
638
|
+
FIXTURE_REVIEW_STEP_NAME = "Freeze, squash and approve migrations"
|
|
639
|
+
|
|
640
|
+
|
|
315
641
|
PUBLISH_STEPS = [
|
|
316
642
|
("Check version number availability", _step_check_version),
|
|
317
643
|
("Confirm release TODO completion", _step_check_todos),
|
|
318
|
-
(
|
|
644
|
+
(FIXTURE_REVIEW_STEP_NAME, _step_handle_migrations),
|
|
319
645
|
("Compose CHANGELOG and documentation", _step_changelog_docs),
|
|
320
646
|
("Execute pre-release actions", _step_pre_release_actions),
|
|
321
647
|
("Build release artifacts", _step_promote_build),
|
|
@@ -514,12 +840,28 @@ def release_progress(request, pk: int, action: str):
|
|
|
514
840
|
release = get_object_or_404(PackageRelease, pk=pk)
|
|
515
841
|
if action != "publish":
|
|
516
842
|
raise Http404("Unknown action")
|
|
517
|
-
if not release.is_current:
|
|
518
|
-
raise Http404("Release is not current")
|
|
519
843
|
session_key = f"release_publish_{pk}"
|
|
520
844
|
lock_path = Path("locks") / f"release_publish_{pk}.json"
|
|
521
845
|
restart_path = Path("locks") / f"release_publish_{pk}.restarts"
|
|
522
846
|
|
|
847
|
+
if not release.is_current:
|
|
848
|
+
if release.is_published:
|
|
849
|
+
raise Http404("Release is not current")
|
|
850
|
+
updated, previous_version = _sync_release_with_revision(release)
|
|
851
|
+
if updated:
|
|
852
|
+
request.session.pop(session_key, None)
|
|
853
|
+
if lock_path.exists():
|
|
854
|
+
lock_path.unlink()
|
|
855
|
+
if restart_path.exists():
|
|
856
|
+
restart_path.unlink()
|
|
857
|
+
log_dir = Path("logs")
|
|
858
|
+
for log_file in log_dir.glob(
|
|
859
|
+
f"{release.package.name}-{previous_version}*.log"
|
|
860
|
+
):
|
|
861
|
+
log_file.unlink()
|
|
862
|
+
if not release.is_current:
|
|
863
|
+
raise Http404("Release is not current")
|
|
864
|
+
|
|
523
865
|
if request.GET.get("restart"):
|
|
524
866
|
count = 0
|
|
525
867
|
if restart_path.exists():
|
|
@@ -531,7 +873,8 @@ def release_progress(request, pk: int, action: str):
|
|
|
531
873
|
restart_path.write_text(str(count + 1), encoding="utf-8")
|
|
532
874
|
_clean_repo()
|
|
533
875
|
release.pypi_url = ""
|
|
534
|
-
release.
|
|
876
|
+
release.release_on = None
|
|
877
|
+
release.save(update_fields=["pypi_url", "release_on"])
|
|
535
878
|
request.session.pop(session_key, None)
|
|
536
879
|
if lock_path.exists():
|
|
537
880
|
lock_path.unlink()
|
|
@@ -555,11 +898,11 @@ def release_progress(request, pk: int, action: str):
|
|
|
555
898
|
if credentials_ready and ctx.get("approval_credentials_missing"):
|
|
556
899
|
ctx.pop("approval_credentials_missing", None)
|
|
557
900
|
|
|
901
|
+
ack_todos_requested = bool(request.GET.get("ack_todos"))
|
|
902
|
+
|
|
558
903
|
if request.GET.get("start"):
|
|
559
904
|
ctx["started"] = True
|
|
560
905
|
ctx["paused"] = False
|
|
561
|
-
if request.GET.get("ack_todos"):
|
|
562
|
-
ctx["todos_ack"] = True
|
|
563
906
|
if (
|
|
564
907
|
ctx.get("awaiting_approval")
|
|
565
908
|
and not ctx.get("approval_credentials_missing")
|
|
@@ -580,9 +923,34 @@ def release_progress(request, pk: int, action: str):
|
|
|
580
923
|
step_count = ctx.get("step", 0)
|
|
581
924
|
step_param = request.GET.get("step")
|
|
582
925
|
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
926
|
+
pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
|
|
927
|
+
pending_items = list(pending_qs)
|
|
928
|
+
if ack_todos_requested:
|
|
929
|
+
if pending_items:
|
|
930
|
+
failures = []
|
|
931
|
+
for todo in pending_items:
|
|
932
|
+
result = todo.check_on_done_condition()
|
|
933
|
+
if not result.passed:
|
|
934
|
+
failures.append((todo, result))
|
|
935
|
+
if failures:
|
|
936
|
+
ctx.pop("todos_ack", None)
|
|
937
|
+
for todo, result in failures:
|
|
938
|
+
messages.error(request, _format_condition_failure(todo, result))
|
|
939
|
+
else:
|
|
940
|
+
ctx["todos_ack"] = True
|
|
941
|
+
else:
|
|
942
|
+
ctx["todos_ack"] = True
|
|
943
|
+
|
|
944
|
+
if pending_items and not ctx.get("todos_ack"):
|
|
945
|
+
ctx["todos"] = [
|
|
946
|
+
{
|
|
947
|
+
"id": todo.pk,
|
|
948
|
+
"request": todo.request,
|
|
949
|
+
"url": todo.url,
|
|
950
|
+
"request_details": todo.request_details,
|
|
951
|
+
}
|
|
952
|
+
for todo in pending_items
|
|
953
|
+
]
|
|
586
954
|
else:
|
|
587
955
|
ctx.pop("todos", None)
|
|
588
956
|
|
|
@@ -608,6 +976,14 @@ def release_progress(request, pk: int, action: str):
|
|
|
608
976
|
log_path.unlink()
|
|
609
977
|
|
|
610
978
|
steps = PUBLISH_STEPS
|
|
979
|
+
fixtures_step_index = next(
|
|
980
|
+
(
|
|
981
|
+
index
|
|
982
|
+
for index, (name, _) in enumerate(steps)
|
|
983
|
+
if name == FIXTURE_REVIEW_STEP_NAME
|
|
984
|
+
),
|
|
985
|
+
None,
|
|
986
|
+
)
|
|
611
987
|
error = ctx.get("error")
|
|
612
988
|
|
|
613
989
|
if (
|
|
@@ -742,6 +1118,14 @@ def release_progress(request, pk: int, action: str):
|
|
|
742
1118
|
"admin:core_user_change", args=[request.user.pk]
|
|
743
1119
|
)
|
|
744
1120
|
|
|
1121
|
+
fixtures_summary = ctx.get("fixtures")
|
|
1122
|
+
if (
|
|
1123
|
+
fixtures_summary
|
|
1124
|
+
and fixtures_step_index is not None
|
|
1125
|
+
and step_count > fixtures_step_index
|
|
1126
|
+
):
|
|
1127
|
+
fixtures_summary = None
|
|
1128
|
+
|
|
745
1129
|
context = {
|
|
746
1130
|
"release": release,
|
|
747
1131
|
"action": "publish",
|
|
@@ -753,7 +1137,7 @@ def release_progress(request, pk: int, action: str):
|
|
|
753
1137
|
"log_content": log_content,
|
|
754
1138
|
"log_path": str(log_path),
|
|
755
1139
|
"cert_log": ctx.get("cert_log"),
|
|
756
|
-
"fixtures":
|
|
1140
|
+
"fixtures": fixtures_summary,
|
|
757
1141
|
"todos": ctx.get("todos"),
|
|
758
1142
|
"restart_count": restart_count,
|
|
759
1143
|
"started": ctx.get("started", False),
|
|
@@ -780,10 +1164,105 @@ def release_progress(request, pk: int, action: str):
|
|
|
780
1164
|
return render(request, "core/release_progress.html", context)
|
|
781
1165
|
|
|
782
1166
|
|
|
1167
|
+
def _todo_iframe_url(request, todo: Todo) -> str:
|
|
1168
|
+
"""Return a safe iframe URL for ``todo`` scoped to the current host."""
|
|
1169
|
+
|
|
1170
|
+
fallback = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1171
|
+
raw_url = (todo.url or "").strip()
|
|
1172
|
+
if not raw_url:
|
|
1173
|
+
return fallback
|
|
1174
|
+
|
|
1175
|
+
focus_path = reverse("todo-focus", args=[todo.pk])
|
|
1176
|
+
focus_norm = focus_path.strip("/").lower()
|
|
1177
|
+
|
|
1178
|
+
def _is_focus_target(target: str) -> bool:
|
|
1179
|
+
if not target:
|
|
1180
|
+
return False
|
|
1181
|
+
parsed_target = urlsplit(target)
|
|
1182
|
+
path = parsed_target.path
|
|
1183
|
+
if not path and not parsed_target.scheme and not parsed_target.netloc:
|
|
1184
|
+
path = target.split("?", 1)[0].split("#", 1)[0]
|
|
1185
|
+
normalized = path.strip("/").lower()
|
|
1186
|
+
return normalized == focus_norm if normalized else False
|
|
1187
|
+
|
|
1188
|
+
if _is_focus_target(raw_url):
|
|
1189
|
+
return fallback
|
|
1190
|
+
|
|
1191
|
+
parsed = urlsplit(raw_url)
|
|
1192
|
+
if not parsed.scheme and not parsed.netloc:
|
|
1193
|
+
return fallback if _is_focus_target(parsed.path) else raw_url
|
|
1194
|
+
|
|
1195
|
+
if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
|
|
1196
|
+
return fallback
|
|
1197
|
+
|
|
1198
|
+
request_host = request.get_host().strip().lower()
|
|
1199
|
+
host_without_port = request_host.split(":", 1)[0]
|
|
1200
|
+
allowed_hosts = {
|
|
1201
|
+
request_host,
|
|
1202
|
+
host_without_port,
|
|
1203
|
+
"localhost",
|
|
1204
|
+
"127.0.0.1",
|
|
1205
|
+
"0.0.0.0",
|
|
1206
|
+
"::1",
|
|
1207
|
+
}
|
|
1208
|
+
|
|
1209
|
+
site_domain = ""
|
|
1210
|
+
try:
|
|
1211
|
+
site_domain = Site.objects.get_current().domain.strip().lower()
|
|
1212
|
+
except Site.DoesNotExist:
|
|
1213
|
+
site_domain = ""
|
|
1214
|
+
if site_domain:
|
|
1215
|
+
allowed_hosts.add(site_domain)
|
|
1216
|
+
allowed_hosts.add(site_domain.split(":", 1)[0])
|
|
1217
|
+
|
|
1218
|
+
for host in getattr(settings, "ALLOWED_HOSTS", []):
|
|
1219
|
+
if not isinstance(host, str):
|
|
1220
|
+
continue
|
|
1221
|
+
normalized = host.strip().lower()
|
|
1222
|
+
if not normalized or normalized.startswith("*"):
|
|
1223
|
+
continue
|
|
1224
|
+
allowed_hosts.add(normalized)
|
|
1225
|
+
allowed_hosts.add(normalized.split(":", 1)[0])
|
|
1226
|
+
|
|
1227
|
+
hostname = (parsed.hostname or "").strip().lower()
|
|
1228
|
+
netloc = parsed.netloc.strip().lower()
|
|
1229
|
+
if hostname in allowed_hosts or netloc in allowed_hosts:
|
|
1230
|
+
path = parsed.path or "/"
|
|
1231
|
+
if not path.startswith("/"):
|
|
1232
|
+
path = f"/{path}"
|
|
1233
|
+
relative_url = urlunsplit(("", "", path, parsed.query, parsed.fragment))
|
|
1234
|
+
if _is_focus_target(relative_url):
|
|
1235
|
+
return fallback
|
|
1236
|
+
return relative_url or fallback
|
|
1237
|
+
|
|
1238
|
+
return fallback
|
|
1239
|
+
|
|
1240
|
+
|
|
1241
|
+
@staff_member_required
|
|
1242
|
+
def todo_focus(request, pk: int):
|
|
1243
|
+
todo = get_object_or_404(Todo, pk=pk, is_deleted=False)
|
|
1244
|
+
if todo.done_on:
|
|
1245
|
+
return redirect(_get_return_url(request))
|
|
1246
|
+
|
|
1247
|
+
iframe_url = _todo_iframe_url(request, todo)
|
|
1248
|
+
context = {
|
|
1249
|
+
"todo": todo,
|
|
1250
|
+
"iframe_url": iframe_url,
|
|
1251
|
+
"next_url": _get_return_url(request),
|
|
1252
|
+
"done_url": reverse("todo-done", args=[todo.pk]),
|
|
1253
|
+
}
|
|
1254
|
+
return render(request, "core/todo_focus.html", context)
|
|
1255
|
+
|
|
1256
|
+
|
|
783
1257
|
@staff_member_required
|
|
784
1258
|
@require_POST
|
|
785
1259
|
def todo_done(request, pk: int):
|
|
786
1260
|
todo = get_object_or_404(Todo, pk=pk, is_deleted=False, done_on__isnull=True)
|
|
1261
|
+
redirect_to = _get_return_url(request)
|
|
1262
|
+
result = todo.check_on_done_condition()
|
|
1263
|
+
if not result.passed:
|
|
1264
|
+
messages.error(request, _format_condition_failure(todo, result))
|
|
1265
|
+
return redirect(redirect_to)
|
|
787
1266
|
todo.done_on = timezone.now()
|
|
788
1267
|
todo.save(update_fields=["done_on"])
|
|
789
|
-
return redirect(
|
|
1268
|
+
return redirect(redirect_to)
|