arthexis 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/METADATA +2 -2
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/RECORD +50 -44
- config/asgi.py +15 -1
- config/celery.py +8 -1
- config/settings.py +49 -78
- config/settings_helpers.py +109 -0
- core/admin.py +293 -78
- core/apps.py +21 -0
- core/auto_upgrade.py +2 -2
- core/form_fields.py +75 -0
- core/models.py +203 -47
- core/reference_utils.py +1 -1
- core/release.py +42 -20
- core/system.py +6 -3
- core/tasks.py +92 -40
- core/tests.py +75 -1
- core/views.py +178 -29
- core/widgets.py +43 -0
- nodes/admin.py +583 -10
- nodes/apps.py +15 -0
- nodes/feature_checks.py +133 -0
- nodes/models.py +287 -49
- nodes/reports.py +411 -0
- nodes/tests.py +990 -42
- nodes/urls.py +1 -0
- nodes/utils.py +32 -0
- nodes/views.py +173 -5
- ocpp/admin.py +424 -17
- ocpp/consumers.py +630 -15
- ocpp/evcs.py +7 -94
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +236 -4
- ocpp/routing.py +4 -2
- ocpp/simulator.py +346 -26
- ocpp/status_display.py +26 -0
- ocpp/store.py +110 -2
- ocpp/tests.py +1425 -33
- ocpp/transactions_io.py +27 -3
- ocpp/views.py +344 -38
- pages/admin.py +138 -3
- pages/context_processors.py +15 -1
- pages/defaults.py +1 -2
- pages/forms.py +67 -0
- pages/models.py +136 -1
- pages/tests.py +379 -4
- pages/urls.py +1 -0
- pages/views.py +64 -7
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/WHEEL +0 -0
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.11.dist-info → arthexis-0.1.13.dist-info}/top_level.txt +0 -0
core/tasks.py
CHANGED
|
@@ -17,7 +17,7 @@ from nodes.models import NetMessage
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
AUTO_UPGRADE_HEALTH_DELAY_SECONDS = 30
|
|
20
|
-
|
|
20
|
+
AUTO_UPGRADE_SKIP_LOCK_NAME = "auto_upgrade_skip_revisions.lck"
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
logger = logging.getLogger(__name__)
|
|
@@ -66,6 +66,46 @@ def _append_auto_upgrade_log(base_dir: Path, message: str) -> None:
|
|
|
66
66
|
logger.warning("Failed to append auto-upgrade log entry: %s", message)
|
|
67
67
|
|
|
68
68
|
|
|
69
|
+
def _skip_lock_path(base_dir: Path) -> Path:
|
|
70
|
+
return base_dir / "locks" / AUTO_UPGRADE_SKIP_LOCK_NAME
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _load_skipped_revisions(base_dir: Path) -> set[str]:
|
|
74
|
+
skip_file = _skip_lock_path(base_dir)
|
|
75
|
+
try:
|
|
76
|
+
return {
|
|
77
|
+
line.strip()
|
|
78
|
+
for line in skip_file.read_text().splitlines()
|
|
79
|
+
if line.strip()
|
|
80
|
+
}
|
|
81
|
+
except FileNotFoundError:
|
|
82
|
+
return set()
|
|
83
|
+
except OSError:
|
|
84
|
+
logger.warning("Failed to read auto-upgrade skip lockfile")
|
|
85
|
+
return set()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _add_skipped_revision(base_dir: Path, revision: str) -> None:
|
|
89
|
+
if not revision:
|
|
90
|
+
return
|
|
91
|
+
|
|
92
|
+
skip_file = _skip_lock_path(base_dir)
|
|
93
|
+
try:
|
|
94
|
+
skip_file.parent.mkdir(parents=True, exist_ok=True)
|
|
95
|
+
existing = _load_skipped_revisions(base_dir)
|
|
96
|
+
if revision in existing:
|
|
97
|
+
return
|
|
98
|
+
with skip_file.open("a", encoding="utf-8") as fh:
|
|
99
|
+
fh.write(f"{revision}\n")
|
|
100
|
+
_append_auto_upgrade_log(
|
|
101
|
+
base_dir, f"Recorded blocked revision {revision} for auto-upgrade"
|
|
102
|
+
)
|
|
103
|
+
except OSError:
|
|
104
|
+
logger.warning(
|
|
105
|
+
"Failed to update auto-upgrade skip lockfile with revision %s", revision
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
69
109
|
def _resolve_service_url(base_dir: Path) -> str:
|
|
70
110
|
"""Return the local URL used to probe the Django suite."""
|
|
71
111
|
|
|
@@ -110,6 +150,23 @@ def check_github_updates() -> None:
|
|
|
110
150
|
except Exception:
|
|
111
151
|
startup = None
|
|
112
152
|
|
|
153
|
+
remote_revision = (
|
|
154
|
+
subprocess.check_output(
|
|
155
|
+
["git", "rev-parse", f"origin/{branch}"], cwd=base_dir
|
|
156
|
+
)
|
|
157
|
+
.decode()
|
|
158
|
+
.strip()
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
skipped_revisions = _load_skipped_revisions(base_dir)
|
|
162
|
+
if remote_revision in skipped_revisions:
|
|
163
|
+
_append_auto_upgrade_log(
|
|
164
|
+
base_dir, f"Skipping auto-upgrade for blocked revision {remote_revision}"
|
|
165
|
+
)
|
|
166
|
+
if startup:
|
|
167
|
+
startup()
|
|
168
|
+
return
|
|
169
|
+
|
|
113
170
|
upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
|
|
114
171
|
|
|
115
172
|
upgrade_was_applied = False
|
|
@@ -120,19 +177,7 @@ def check_github_updates() -> None:
|
|
|
120
177
|
.decode()
|
|
121
178
|
.strip()
|
|
122
179
|
)
|
|
123
|
-
|
|
124
|
-
subprocess.check_output(
|
|
125
|
-
[
|
|
126
|
-
"git",
|
|
127
|
-
"rev-parse",
|
|
128
|
-
f"origin/{branch}",
|
|
129
|
-
],
|
|
130
|
-
cwd=base_dir,
|
|
131
|
-
)
|
|
132
|
-
.decode()
|
|
133
|
-
.strip()
|
|
134
|
-
)
|
|
135
|
-
if local == remote:
|
|
180
|
+
if local == remote_revision:
|
|
136
181
|
if startup:
|
|
137
182
|
startup()
|
|
138
183
|
return
|
|
@@ -254,12 +299,29 @@ def _schedule_health_check(next_attempt: int) -> None:
|
|
|
254
299
|
)
|
|
255
300
|
|
|
256
301
|
|
|
302
|
+
def _handle_failed_health_check(base_dir: Path, detail: str) -> None:
|
|
303
|
+
revision = ""
|
|
304
|
+
try:
|
|
305
|
+
revision = (
|
|
306
|
+
subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=base_dir)
|
|
307
|
+
.decode()
|
|
308
|
+
.strip()
|
|
309
|
+
)
|
|
310
|
+
except Exception: # pragma: no cover - best effort capture
|
|
311
|
+
logger.warning("Failed to determine revision during auto-upgrade revert")
|
|
312
|
+
|
|
313
|
+
_add_skipped_revision(base_dir, revision)
|
|
314
|
+
_append_auto_upgrade_log(base_dir, "Health check failed; reverting upgrade")
|
|
315
|
+
subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
|
|
316
|
+
|
|
317
|
+
|
|
257
318
|
@shared_task
|
|
258
319
|
def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
|
|
259
320
|
"""Verify the upgraded suite responds successfully.
|
|
260
321
|
|
|
261
|
-
|
|
262
|
-
|
|
322
|
+
After the post-upgrade delay the site is probed once; any response other
|
|
323
|
+
than HTTP 200 triggers an automatic revert and records the failing
|
|
324
|
+
revision so future upgrade attempts skip it.
|
|
263
325
|
"""
|
|
264
326
|
|
|
265
327
|
base_dir = Path(__file__).resolve().parent.parent
|
|
@@ -270,33 +332,29 @@ def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
|
|
|
270
332
|
)
|
|
271
333
|
|
|
272
334
|
status: int | None = None
|
|
335
|
+
detail = "succeeded"
|
|
273
336
|
try:
|
|
274
337
|
with urllib.request.urlopen(request, timeout=10) as response:
|
|
275
338
|
status = getattr(response, "status", response.getcode())
|
|
276
339
|
except urllib.error.HTTPError as exc:
|
|
277
340
|
status = exc.code
|
|
341
|
+
detail = f"returned HTTP {exc.code}"
|
|
278
342
|
logger.warning(
|
|
279
343
|
"Auto-upgrade health check attempt %s returned HTTP %s", attempt, exc.code
|
|
280
344
|
)
|
|
281
345
|
except urllib.error.URLError as exc:
|
|
346
|
+
detail = f"failed with {exc}"
|
|
282
347
|
logger.warning(
|
|
283
348
|
"Auto-upgrade health check attempt %s failed: %s", attempt, exc
|
|
284
349
|
)
|
|
285
350
|
except Exception as exc: # pragma: no cover - unexpected network error
|
|
351
|
+
detail = f"failed with {exc}"
|
|
286
352
|
logger.exception(
|
|
287
353
|
"Unexpected error probing suite during auto-upgrade attempt %s", attempt
|
|
288
354
|
)
|
|
289
|
-
detail = f"failed with {exc}"
|
|
290
355
|
_record_health_check_result(base_dir, attempt, status, detail)
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
base_dir,
|
|
294
|
-
"Health check raised unexpected error; reverting upgrade",
|
|
295
|
-
)
|
|
296
|
-
subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
|
|
297
|
-
else:
|
|
298
|
-
_schedule_health_check(attempt + 1)
|
|
299
|
-
return None
|
|
356
|
+
_handle_failed_health_check(base_dir, detail)
|
|
357
|
+
return False
|
|
300
358
|
|
|
301
359
|
if status == 200:
|
|
302
360
|
_record_health_check_result(base_dir, attempt, status, "succeeded")
|
|
@@ -307,21 +365,15 @@ def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
|
|
|
307
365
|
)
|
|
308
366
|
return True
|
|
309
367
|
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
)
|
|
316
|
-
_append_auto_upgrade_log(
|
|
317
|
-
base_dir,
|
|
318
|
-
"Health check failed three times; reverting upgrade",
|
|
319
|
-
)
|
|
320
|
-
subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
|
|
321
|
-
return False
|
|
368
|
+
if detail == "succeeded":
|
|
369
|
+
if status is not None:
|
|
370
|
+
detail = f"returned HTTP {status}"
|
|
371
|
+
else:
|
|
372
|
+
detail = "failed with unknown status"
|
|
322
373
|
|
|
323
|
-
|
|
324
|
-
|
|
374
|
+
_record_health_check_result(base_dir, attempt, status, detail)
|
|
375
|
+
_handle_failed_health_check(base_dir, detail)
|
|
376
|
+
return False
|
|
325
377
|
|
|
326
378
|
|
|
327
379
|
@shared_task
|
core/tests.py
CHANGED
|
@@ -770,6 +770,50 @@ class ReleaseProcessTests(TestCase):
|
|
|
770
770
|
self.assertFalse(proc.stdout.strip())
|
|
771
771
|
self.assertEqual(version_path.read_text(encoding="utf-8"), original)
|
|
772
772
|
|
|
773
|
+
@mock.patch("core.views.requests.get")
|
|
774
|
+
@mock.patch("core.views.release_utils.network_available", return_value=True)
|
|
775
|
+
@mock.patch("core.views.release_utils._git_clean", return_value=True)
|
|
776
|
+
def test_step_check_ignores_yanked_release(
|
|
777
|
+
self, git_clean, network_available, requests_get
|
|
778
|
+
):
|
|
779
|
+
response = mock.Mock()
|
|
780
|
+
response.ok = True
|
|
781
|
+
response.json.return_value = {
|
|
782
|
+
"releases": {
|
|
783
|
+
"0.1.12": [
|
|
784
|
+
{"filename": "pkg.whl", "yanked": True},
|
|
785
|
+
{"filename": "pkg.tar.gz", "yanked": True},
|
|
786
|
+
]
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
requests_get.return_value = response
|
|
790
|
+
self.release.version = "0.1.12"
|
|
791
|
+
_step_check_version(self.release, {}, Path("rel.log"))
|
|
792
|
+
requests_get.assert_called_once()
|
|
793
|
+
|
|
794
|
+
@mock.patch("core.views.requests.get")
|
|
795
|
+
@mock.patch("core.views.release_utils.network_available", return_value=True)
|
|
796
|
+
@mock.patch("core.views.release_utils._git_clean", return_value=True)
|
|
797
|
+
def test_step_check_blocks_available_release(
|
|
798
|
+
self, git_clean, network_available, requests_get
|
|
799
|
+
):
|
|
800
|
+
response = mock.Mock()
|
|
801
|
+
response.ok = True
|
|
802
|
+
response.json.return_value = {
|
|
803
|
+
"releases": {
|
|
804
|
+
"0.1.12": [
|
|
805
|
+
{"filename": "pkg.whl", "yanked": False},
|
|
806
|
+
{"filename": "pkg.tar.gz"},
|
|
807
|
+
]
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
requests_get.return_value = response
|
|
811
|
+
self.release.version = "0.1.12"
|
|
812
|
+
with self.assertRaises(Exception) as exc:
|
|
813
|
+
_step_check_version(self.release, {}, Path("rel.log"))
|
|
814
|
+
self.assertIn("already on PyPI", str(exc.exception))
|
|
815
|
+
requests_get.assert_called_once()
|
|
816
|
+
|
|
773
817
|
@mock.patch("core.models.PackageRelease.dump_fixture")
|
|
774
818
|
def test_save_does_not_dump_fixture(self, dump):
|
|
775
819
|
self.release.pypi_url = "https://example.com"
|
|
@@ -1002,7 +1046,9 @@ class ReleaseProgressFixtureVisibilityTests(TestCase):
|
|
|
1002
1046
|
package=package, version=current_version
|
|
1003
1047
|
)
|
|
1004
1048
|
self.session_key = f"release_publish_{self.release.pk}"
|
|
1005
|
-
self.log_name =
|
|
1049
|
+
self.log_name = core_views._release_log_name(
|
|
1050
|
+
self.release.package.name, self.release.version
|
|
1051
|
+
)
|
|
1006
1052
|
self.lock_path = Path("locks") / f"{self.session_key}.json"
|
|
1007
1053
|
self.restart_path = Path("locks") / f"{self.session_key}.restarts"
|
|
1008
1054
|
self.log_path = Path("logs") / self.log_name
|
|
@@ -1284,6 +1330,14 @@ class TodoDoneTests(TestCase):
|
|
|
1284
1330
|
self.assertIsNotNone(todo.done_on)
|
|
1285
1331
|
self.assertFalse(todo.is_deleted)
|
|
1286
1332
|
|
|
1333
|
+
def test_mark_done_missing_task_refreshes(self):
|
|
1334
|
+
todo = Todo.objects.create(request="Task", is_seed_data=True)
|
|
1335
|
+
todo.delete()
|
|
1336
|
+
resp = self.client.post(reverse("todo-done", args=[todo.pk]))
|
|
1337
|
+
self.assertRedirects(resp, reverse("admin:index"))
|
|
1338
|
+
messages = [m.message for m in get_messages(resp.wsgi_request)]
|
|
1339
|
+
self.assertFalse(messages)
|
|
1340
|
+
|
|
1287
1341
|
def test_mark_done_condition_failure_shows_message(self):
|
|
1288
1342
|
todo = Todo.objects.create(
|
|
1289
1343
|
request="Task",
|
|
@@ -1369,6 +1423,13 @@ class TodoFocusViewTests(TestCase):
|
|
|
1369
1423
|
change_url = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1370
1424
|
self.assertContains(resp, f'src="{change_url}"')
|
|
1371
1425
|
|
|
1426
|
+
def test_focus_view_includes_open_target_button(self):
|
|
1427
|
+
todo = Todo.objects.create(request="Task", url="/docs/")
|
|
1428
|
+
resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
|
|
1429
|
+
self.assertContains(resp, 'class="todo-button todo-button-open"')
|
|
1430
|
+
self.assertContains(resp, 'target="_blank"')
|
|
1431
|
+
self.assertContains(resp, 'href="/docs/"')
|
|
1432
|
+
|
|
1372
1433
|
def test_focus_view_sanitizes_loopback_absolute_url(self):
|
|
1373
1434
|
todo = Todo.objects.create(
|
|
1374
1435
|
request="Task",
|
|
@@ -1402,6 +1463,19 @@ class TodoFocusViewTests(TestCase):
|
|
|
1402
1463
|
change_url = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1403
1464
|
self.assertContains(resp, f'src="{change_url}"')
|
|
1404
1465
|
|
|
1466
|
+
def test_focus_view_parses_auth_directives(self):
|
|
1467
|
+
todo = Todo.objects.create(
|
|
1468
|
+
request="Task",
|
|
1469
|
+
url="/docs/?section=chart&_todo_auth=logout&_todo_auth=user:demo&_todo_auth=perm:core.view_user&_todo_auth=extra",
|
|
1470
|
+
)
|
|
1471
|
+
resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
|
|
1472
|
+
self.assertContains(resp, 'src="/docs/?section=chart"')
|
|
1473
|
+
self.assertContains(resp, 'href="/docs/?section=chart"')
|
|
1474
|
+
self.assertContains(resp, "logged out")
|
|
1475
|
+
self.assertContains(resp, "Sign in using: demo")
|
|
1476
|
+
self.assertContains(resp, "Required permissions: core.view_user")
|
|
1477
|
+
self.assertContains(resp, "Additional authentication notes: extra")
|
|
1478
|
+
|
|
1405
1479
|
def test_focus_view_redirects_if_todo_completed(self):
|
|
1406
1480
|
todo = Todo.objects.create(request="Task")
|
|
1407
1481
|
todo.done_on = timezone.now()
|
core/views.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import logging
|
|
2
3
|
import shutil
|
|
3
4
|
from datetime import timedelta
|
|
4
5
|
|
|
@@ -8,7 +9,7 @@ from django.contrib.admin.views.decorators import staff_member_required
|
|
|
8
9
|
from django.contrib.auth import authenticate, login
|
|
9
10
|
from django.contrib import messages
|
|
10
11
|
from django.contrib.sites.models import Site
|
|
11
|
-
from django.http import Http404, JsonResponse
|
|
12
|
+
from django.http import Http404, JsonResponse, HttpResponse
|
|
12
13
|
from django.shortcuts import get_object_or_404, redirect, render, resolve_url
|
|
13
14
|
from django.utils import timezone
|
|
14
15
|
from django.utils.text import slugify
|
|
@@ -18,13 +19,18 @@ from django.views.decorators.csrf import csrf_exempt
|
|
|
18
19
|
from django.views.decorators.http import require_GET, require_POST
|
|
19
20
|
from django.utils.http import url_has_allowed_host_and_scheme
|
|
20
21
|
from pathlib import Path
|
|
21
|
-
from urllib.parse import urlsplit, urlunsplit
|
|
22
|
+
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
|
|
22
23
|
import errno
|
|
23
24
|
import subprocess
|
|
24
25
|
|
|
26
|
+
from django.template.loader import get_template
|
|
27
|
+
from django.test import signals
|
|
28
|
+
|
|
25
29
|
from utils import revision
|
|
26
30
|
from utils.api import api_login_required
|
|
27
31
|
|
|
32
|
+
logger = logging.getLogger(__name__)
|
|
33
|
+
|
|
28
34
|
from .models import Product, EnergyAccount, PackageRelease, Todo
|
|
29
35
|
from .models import RFID
|
|
30
36
|
|
|
@@ -40,10 +46,17 @@ def odoo_products(request):
|
|
|
40
46
|
products = profile.execute(
|
|
41
47
|
"product.product",
|
|
42
48
|
"search_read",
|
|
43
|
-
[],
|
|
49
|
+
[[]],
|
|
44
50
|
{"fields": ["name"], "limit": 50},
|
|
45
51
|
)
|
|
46
52
|
except Exception:
|
|
53
|
+
logger.exception(
|
|
54
|
+
"Failed to fetch Odoo products via API for user %s (profile_id=%s, host=%s, database=%s)",
|
|
55
|
+
getattr(request.user, "pk", None),
|
|
56
|
+
getattr(profile, "pk", None),
|
|
57
|
+
getattr(profile, "host", None),
|
|
58
|
+
getattr(profile, "database", None),
|
|
59
|
+
)
|
|
47
60
|
return JsonResponse({"detail": "Unable to fetch products"}, status=502)
|
|
48
61
|
items = [{"id": p.get("id"), "name": p.get("name", "")} for p in products]
|
|
49
62
|
return JsonResponse(items, safe=False)
|
|
@@ -77,6 +90,10 @@ def _append_log(path: Path, message: str) -> None:
|
|
|
77
90
|
fh.write(message + "\n")
|
|
78
91
|
|
|
79
92
|
|
|
93
|
+
def _release_log_name(package_name: str, version: str) -> str:
|
|
94
|
+
return f"pr.{package_name}.v{version}.log"
|
|
95
|
+
|
|
96
|
+
|
|
80
97
|
def _clean_repo() -> None:
|
|
81
98
|
"""Return the git repository to a clean state."""
|
|
82
99
|
subprocess.run(["git", "reset", "--hard"], check=False)
|
|
@@ -357,7 +374,7 @@ def _step_check_todos(release, ctx, log_path: Path) -> None:
|
|
|
357
374
|
|
|
358
375
|
def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
359
376
|
from . import release as release_utils
|
|
360
|
-
from packaging.version import Version
|
|
377
|
+
from packaging.version import InvalidVersion, Version
|
|
361
378
|
|
|
362
379
|
if not release_utils._git_clean():
|
|
363
380
|
proc = subprocess.run(
|
|
@@ -417,8 +434,33 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
|
417
434
|
if release_utils.network_available():
|
|
418
435
|
try:
|
|
419
436
|
resp = requests.get(f"https://pypi.org/pypi/{release.package.name}/json")
|
|
420
|
-
if resp.ok
|
|
421
|
-
|
|
437
|
+
if resp.ok:
|
|
438
|
+
data = resp.json()
|
|
439
|
+
releases = data.get("releases", {})
|
|
440
|
+
try:
|
|
441
|
+
target_version = Version(release.version)
|
|
442
|
+
except InvalidVersion:
|
|
443
|
+
target_version = None
|
|
444
|
+
|
|
445
|
+
for candidate, files in releases.items():
|
|
446
|
+
same_version = candidate == release.version
|
|
447
|
+
if target_version is not None and not same_version:
|
|
448
|
+
try:
|
|
449
|
+
same_version = Version(candidate) == target_version
|
|
450
|
+
except InvalidVersion:
|
|
451
|
+
same_version = False
|
|
452
|
+
if not same_version:
|
|
453
|
+
continue
|
|
454
|
+
|
|
455
|
+
has_available_files = any(
|
|
456
|
+
isinstance(file_data, dict)
|
|
457
|
+
and not file_data.get("yanked", False)
|
|
458
|
+
for file_data in files or []
|
|
459
|
+
)
|
|
460
|
+
if has_available_files:
|
|
461
|
+
raise Exception(
|
|
462
|
+
f"Version {release.version} already on PyPI"
|
|
463
|
+
)
|
|
422
464
|
except Exception as exc:
|
|
423
465
|
# network errors should be logged but not crash
|
|
424
466
|
if "already on PyPI" in str(exc):
|
|
@@ -574,9 +616,14 @@ def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
|
574
616
|
except Exception:
|
|
575
617
|
_clean_repo()
|
|
576
618
|
raise
|
|
577
|
-
|
|
578
|
-
new_log = log_path.with_name(
|
|
579
|
-
log_path
|
|
619
|
+
target_name = _release_log_name(release.package.name, release.version)
|
|
620
|
+
new_log = log_path.with_name(target_name)
|
|
621
|
+
if log_path != new_log:
|
|
622
|
+
if new_log.exists():
|
|
623
|
+
new_log.unlink()
|
|
624
|
+
log_path.rename(new_log)
|
|
625
|
+
else:
|
|
626
|
+
new_log = log_path
|
|
580
627
|
ctx["log"] = new_log.name
|
|
581
628
|
_append_log(new_log, "Build complete")
|
|
582
629
|
|
|
@@ -855,9 +902,8 @@ def release_progress(request, pk: int, action: str):
|
|
|
855
902
|
if restart_path.exists():
|
|
856
903
|
restart_path.unlink()
|
|
857
904
|
log_dir = Path("logs")
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
):
|
|
905
|
+
pattern = f"pr.{release.package.name}.v{previous_version}*.log"
|
|
906
|
+
for log_file in log_dir.glob(pattern):
|
|
861
907
|
log_file.unlink()
|
|
862
908
|
if not release.is_current:
|
|
863
909
|
raise Http404("Release is not current")
|
|
@@ -879,7 +925,8 @@ def release_progress(request, pk: int, action: str):
|
|
|
879
925
|
if lock_path.exists():
|
|
880
926
|
lock_path.unlink()
|
|
881
927
|
log_dir = Path("logs")
|
|
882
|
-
|
|
928
|
+
pattern = f"pr.{release.package.name}.v{release.version}*.log"
|
|
929
|
+
for f in log_dir.glob(pattern):
|
|
883
930
|
f.unlink()
|
|
884
931
|
return redirect(request.path)
|
|
885
932
|
ctx = request.session.get(session_key)
|
|
@@ -954,8 +1001,7 @@ def release_progress(request, pk: int, action: str):
|
|
|
954
1001
|
else:
|
|
955
1002
|
ctx.pop("todos", None)
|
|
956
1003
|
|
|
957
|
-
|
|
958
|
-
log_name = f"{identifier}.log"
|
|
1004
|
+
log_name = _release_log_name(release.package.name, release.version)
|
|
959
1005
|
if ctx.get("log") != log_name:
|
|
960
1006
|
ctx = {
|
|
961
1007
|
"step": 0,
|
|
@@ -1161,16 +1207,95 @@ def release_progress(request, pk: int, action: str):
|
|
|
1161
1207
|
else:
|
|
1162
1208
|
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1163
1209
|
lock_path.write_text(json.dumps(ctx), encoding="utf-8")
|
|
1164
|
-
|
|
1210
|
+
template = get_template("core/release_progress.html")
|
|
1211
|
+
content = template.render(context, request)
|
|
1212
|
+
signals.template_rendered.send(
|
|
1213
|
+
sender=template.__class__,
|
|
1214
|
+
template=template,
|
|
1215
|
+
context=context,
|
|
1216
|
+
using=getattr(getattr(template, "engine", None), "name", None),
|
|
1217
|
+
)
|
|
1218
|
+
response = HttpResponse(content)
|
|
1219
|
+
response.context = context
|
|
1220
|
+
response.templates = [template]
|
|
1221
|
+
return response
|
|
1222
|
+
|
|
1223
|
+
|
|
1224
|
+
def _dedupe_preserve_order(values):
|
|
1225
|
+
seen = set()
|
|
1226
|
+
result = []
|
|
1227
|
+
for value in values:
|
|
1228
|
+
if value in seen:
|
|
1229
|
+
continue
|
|
1230
|
+
seen.add(value)
|
|
1231
|
+
result.append(value)
|
|
1232
|
+
return result
|
|
1233
|
+
|
|
1234
|
+
|
|
1235
|
+
def _parse_todo_auth_directives(query: str):
|
|
1236
|
+
directives = {
|
|
1237
|
+
"require_logout": False,
|
|
1238
|
+
"users": [],
|
|
1239
|
+
"permissions": [],
|
|
1240
|
+
"notes": [],
|
|
1241
|
+
}
|
|
1242
|
+
if not query:
|
|
1243
|
+
return "", directives
|
|
1244
|
+
|
|
1245
|
+
remaining = []
|
|
1246
|
+
for key, value in parse_qsl(query, keep_blank_values=True):
|
|
1247
|
+
if key != "_todo_auth":
|
|
1248
|
+
remaining.append((key, value))
|
|
1249
|
+
continue
|
|
1250
|
+
token = (value or "").strip()
|
|
1251
|
+
if not token:
|
|
1252
|
+
continue
|
|
1253
|
+
kind, _, payload = token.partition(":")
|
|
1254
|
+
kind = kind.strip().lower()
|
|
1255
|
+
payload = payload.strip()
|
|
1256
|
+
if kind in {"logout", "anonymous", "anon"}:
|
|
1257
|
+
directives["require_logout"] = True
|
|
1258
|
+
elif kind in {"user", "username"} and payload:
|
|
1259
|
+
directives["users"].append(payload)
|
|
1260
|
+
elif kind in {"perm", "permission"} and payload:
|
|
1261
|
+
directives["permissions"].append(payload)
|
|
1262
|
+
else:
|
|
1263
|
+
directives["notes"].append(token)
|
|
1264
|
+
|
|
1265
|
+
sanitized_query = urlencode(remaining, doseq=True)
|
|
1266
|
+
return sanitized_query, directives
|
|
1165
1267
|
|
|
1166
1268
|
|
|
1167
|
-
def _todo_iframe_url(request, todo: Todo)
|
|
1168
|
-
"""Return a safe iframe URL for ``todo
|
|
1269
|
+
def _todo_iframe_url(request, todo: Todo):
|
|
1270
|
+
"""Return a safe iframe URL and auth context for ``todo``."""
|
|
1169
1271
|
|
|
1170
1272
|
fallback = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1171
1273
|
raw_url = (todo.url or "").strip()
|
|
1274
|
+
|
|
1275
|
+
auth_context = {
|
|
1276
|
+
"require_logout": False,
|
|
1277
|
+
"users": [],
|
|
1278
|
+
"permissions": [],
|
|
1279
|
+
"notes": [],
|
|
1280
|
+
}
|
|
1281
|
+
|
|
1282
|
+
def _final_context(target_url: str):
|
|
1283
|
+
return {
|
|
1284
|
+
"target_url": target_url or fallback,
|
|
1285
|
+
"require_logout": auth_context["require_logout"],
|
|
1286
|
+
"users": _dedupe_preserve_order(auth_context["users"]),
|
|
1287
|
+
"permissions": _dedupe_preserve_order(auth_context["permissions"]),
|
|
1288
|
+
"notes": _dedupe_preserve_order(auth_context["notes"]),
|
|
1289
|
+
"has_requirements": bool(
|
|
1290
|
+
auth_context["require_logout"]
|
|
1291
|
+
or auth_context["users"]
|
|
1292
|
+
or auth_context["permissions"]
|
|
1293
|
+
or auth_context["notes"]
|
|
1294
|
+
),
|
|
1295
|
+
}
|
|
1296
|
+
|
|
1172
1297
|
if not raw_url:
|
|
1173
|
-
return fallback
|
|
1298
|
+
return fallback, _final_context(fallback)
|
|
1174
1299
|
|
|
1175
1300
|
focus_path = reverse("todo-focus", args=[todo.pk])
|
|
1176
1301
|
focus_norm = focus_path.strip("/").lower()
|
|
@@ -1186,14 +1311,31 @@ def _todo_iframe_url(request, todo: Todo) -> str:
|
|
|
1186
1311
|
return normalized == focus_norm if normalized else False
|
|
1187
1312
|
|
|
1188
1313
|
if _is_focus_target(raw_url):
|
|
1189
|
-
return fallback
|
|
1314
|
+
return fallback, _final_context(fallback)
|
|
1190
1315
|
|
|
1191
1316
|
parsed = urlsplit(raw_url)
|
|
1317
|
+
|
|
1318
|
+
def _merge_directives(parsed_result):
|
|
1319
|
+
sanitized_query, directives = _parse_todo_auth_directives(parsed_result.query)
|
|
1320
|
+
if directives["require_logout"]:
|
|
1321
|
+
auth_context["require_logout"] = True
|
|
1322
|
+
auth_context["users"].extend(directives["users"])
|
|
1323
|
+
auth_context["permissions"].extend(directives["permissions"])
|
|
1324
|
+
auth_context["notes"].extend(directives["notes"])
|
|
1325
|
+
return parsed_result._replace(query=sanitized_query)
|
|
1326
|
+
|
|
1192
1327
|
if not parsed.scheme and not parsed.netloc:
|
|
1193
|
-
|
|
1328
|
+
sanitized = _merge_directives(parsed)
|
|
1329
|
+
path = sanitized.path or "/"
|
|
1330
|
+
if not path.startswith("/"):
|
|
1331
|
+
path = f"/{path}"
|
|
1332
|
+
relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
|
|
1333
|
+
if _is_focus_target(relative_url):
|
|
1334
|
+
return fallback, _final_context(fallback)
|
|
1335
|
+
return relative_url or fallback, _final_context(relative_url)
|
|
1194
1336
|
|
|
1195
1337
|
if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
|
|
1196
|
-
return fallback
|
|
1338
|
+
return fallback, _final_context(fallback)
|
|
1197
1339
|
|
|
1198
1340
|
request_host = request.get_host().strip().lower()
|
|
1199
1341
|
host_without_port = request_host.split(":", 1)[0]
|
|
@@ -1227,15 +1369,16 @@ def _todo_iframe_url(request, todo: Todo) -> str:
|
|
|
1227
1369
|
hostname = (parsed.hostname or "").strip().lower()
|
|
1228
1370
|
netloc = parsed.netloc.strip().lower()
|
|
1229
1371
|
if hostname in allowed_hosts or netloc in allowed_hosts:
|
|
1230
|
-
|
|
1372
|
+
sanitized = _merge_directives(parsed)
|
|
1373
|
+
path = sanitized.path or "/"
|
|
1231
1374
|
if not path.startswith("/"):
|
|
1232
1375
|
path = f"/{path}"
|
|
1233
|
-
relative_url = urlunsplit(("", "", path,
|
|
1376
|
+
relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
|
|
1234
1377
|
if _is_focus_target(relative_url):
|
|
1235
|
-
return fallback
|
|
1236
|
-
return relative_url or fallback
|
|
1378
|
+
return fallback, _final_context(fallback)
|
|
1379
|
+
return relative_url or fallback, _final_context(relative_url)
|
|
1237
1380
|
|
|
1238
|
-
return fallback
|
|
1381
|
+
return fallback, _final_context(fallback)
|
|
1239
1382
|
|
|
1240
1383
|
|
|
1241
1384
|
@staff_member_required
|
|
@@ -1244,10 +1387,13 @@ def todo_focus(request, pk: int):
|
|
|
1244
1387
|
if todo.done_on:
|
|
1245
1388
|
return redirect(_get_return_url(request))
|
|
1246
1389
|
|
|
1247
|
-
iframe_url = _todo_iframe_url(request, todo)
|
|
1390
|
+
iframe_url, focus_auth = _todo_iframe_url(request, todo)
|
|
1391
|
+
focus_target_url = focus_auth.get("target_url", iframe_url) if focus_auth else iframe_url
|
|
1248
1392
|
context = {
|
|
1249
1393
|
"todo": todo,
|
|
1250
1394
|
"iframe_url": iframe_url,
|
|
1395
|
+
"focus_target_url": focus_target_url,
|
|
1396
|
+
"focus_auth": focus_auth,
|
|
1251
1397
|
"next_url": _get_return_url(request),
|
|
1252
1398
|
"done_url": reverse("todo-done", args=[todo.pk]),
|
|
1253
1399
|
}
|
|
@@ -1257,8 +1403,11 @@ def todo_focus(request, pk: int):
|
|
|
1257
1403
|
@staff_member_required
|
|
1258
1404
|
@require_POST
|
|
1259
1405
|
def todo_done(request, pk: int):
|
|
1260
|
-
todo = get_object_or_404(Todo, pk=pk, is_deleted=False, done_on__isnull=True)
|
|
1261
1406
|
redirect_to = _get_return_url(request)
|
|
1407
|
+
try:
|
|
1408
|
+
todo = Todo.objects.get(pk=pk, is_deleted=False, done_on__isnull=True)
|
|
1409
|
+
except Todo.DoesNotExist:
|
|
1410
|
+
return redirect(redirect_to)
|
|
1262
1411
|
result = todo.check_on_done_condition()
|
|
1263
1412
|
if not result.passed:
|
|
1264
1413
|
messages.error(request, _format_condition_failure(todo, result))
|