arthexis 0.1.11__py3-none-any.whl → 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.11.dist-info → arthexis-0.1.12.dist-info}/METADATA +2 -2
- {arthexis-0.1.11.dist-info → arthexis-0.1.12.dist-info}/RECORD +38 -35
- config/settings.py +7 -2
- core/admin.py +246 -68
- core/apps.py +21 -0
- core/models.py +41 -8
- core/reference_utils.py +1 -1
- core/release.py +4 -0
- core/system.py +6 -3
- core/tasks.py +92 -40
- core/tests.py +64 -0
- core/views.py +131 -17
- nodes/admin.py +316 -6
- nodes/feature_checks.py +133 -0
- nodes/models.py +83 -26
- nodes/reports.py +411 -0
- nodes/tests.py +365 -36
- nodes/utils.py +32 -0
- ocpp/admin.py +278 -15
- ocpp/consumers.py +506 -8
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +234 -4
- ocpp/simulator.py +321 -22
- ocpp/store.py +110 -2
- ocpp/tests.py +789 -6
- ocpp/transactions_io.py +17 -3
- ocpp/views.py +225 -19
- pages/admin.py +135 -3
- pages/context_processors.py +15 -1
- pages/defaults.py +1 -2
- pages/forms.py +38 -0
- pages/models.py +136 -1
- pages/tests.py +262 -4
- pages/urls.py +1 -0
- pages/views.py +52 -3
- {arthexis-0.1.11.dist-info → arthexis-0.1.12.dist-info}/WHEEL +0 -0
- {arthexis-0.1.11.dist-info → arthexis-0.1.12.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.11.dist-info → arthexis-0.1.12.dist-info}/top_level.txt +0 -0
core/models.py
CHANGED
|
@@ -765,15 +765,28 @@ class EmailInbox(Profile):
|
|
|
765
765
|
typ, data = conn.search(None, "ALL")
|
|
766
766
|
else:
|
|
767
767
|
criteria = []
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
768
|
+
charset = None
|
|
769
|
+
|
|
770
|
+
def _append(term: str, value: str):
|
|
771
|
+
nonlocal charset
|
|
772
|
+
if not value:
|
|
773
|
+
return
|
|
774
|
+
try:
|
|
775
|
+
value.encode("ascii")
|
|
776
|
+
encoded_value = value
|
|
777
|
+
except UnicodeEncodeError:
|
|
778
|
+
charset = charset or "UTF-8"
|
|
779
|
+
encoded_value = value.encode("utf-8")
|
|
780
|
+
criteria.extend([term, encoded_value])
|
|
781
|
+
|
|
782
|
+
_append("SUBJECT", subject)
|
|
783
|
+
_append("FROM", from_address)
|
|
784
|
+
_append("TEXT", body)
|
|
785
|
+
|
|
774
786
|
if not criteria:
|
|
775
|
-
|
|
776
|
-
|
|
787
|
+
typ, data = conn.search(None, "ALL")
|
|
788
|
+
else:
|
|
789
|
+
typ, data = conn.search(charset, *criteria)
|
|
777
790
|
ids = data[0].split()[-fetch_limit:]
|
|
778
791
|
messages = []
|
|
779
792
|
for mid in ids:
|
|
@@ -2637,3 +2650,23 @@ class Todo(Entity):
|
|
|
2637
2650
|
if isinstance(field, ConditionTextField):
|
|
2638
2651
|
return field.evaluate(self)
|
|
2639
2652
|
return ConditionCheckResult(True, "")
|
|
2653
|
+
|
|
2654
|
+
|
|
2655
|
+
class TOTPDeviceSettings(models.Model):
|
|
2656
|
+
"""Per-device configuration options for authenticator enrollments."""
|
|
2657
|
+
|
|
2658
|
+
device = models.OneToOneField(
|
|
2659
|
+
"otp_totp.TOTPDevice",
|
|
2660
|
+
on_delete=models.CASCADE,
|
|
2661
|
+
related_name="custom_settings",
|
|
2662
|
+
)
|
|
2663
|
+
issuer = models.CharField(
|
|
2664
|
+
max_length=64,
|
|
2665
|
+
blank=True,
|
|
2666
|
+
default="",
|
|
2667
|
+
help_text=_("Label shown in authenticator apps. Leave blank to use Arthexis."),
|
|
2668
|
+
)
|
|
2669
|
+
|
|
2670
|
+
class Meta:
|
|
2671
|
+
verbose_name = _("Authenticator device settings")
|
|
2672
|
+
verbose_name_plural = _("Authenticator device settings")
|
core/reference_utils.py
CHANGED
core/release.py
CHANGED
|
@@ -344,3 +344,7 @@ def publish(
|
|
|
344
344
|
proc = subprocess.run(cmd, capture_output=True, text=True)
|
|
345
345
|
if proc.returncode != 0:
|
|
346
346
|
raise ReleaseError(proc.stdout + proc.stderr)
|
|
347
|
+
|
|
348
|
+
tag_name = f"v{version}"
|
|
349
|
+
_run(["git", "tag", tag_name])
|
|
350
|
+
_run(["git", "push", "origin", tag_name])
|
core/system.py
CHANGED
|
@@ -119,7 +119,7 @@ def _auto_upgrade_next_check() -> str:
|
|
|
119
119
|
|
|
120
120
|
|
|
121
121
|
def _resolve_auto_upgrade_namespace(key: str) -> str | None:
|
|
122
|
-
"""Resolve sigils within the ``AUTO-UPGRADE`` namespace."""
|
|
122
|
+
"""Resolve sigils within the legacy ``AUTO-UPGRADE`` namespace."""
|
|
123
123
|
|
|
124
124
|
normalized = key.replace("-", "_").upper()
|
|
125
125
|
if normalized == "NEXT_CHECK":
|
|
@@ -137,6 +137,9 @@ def resolve_system_namespace_value(key: str) -> str | None:
|
|
|
137
137
|
|
|
138
138
|
if not key:
|
|
139
139
|
return None
|
|
140
|
+
normalized_key = key.replace("-", "_").upper()
|
|
141
|
+
if normalized_key == "NEXT_VER_CHECK":
|
|
142
|
+
return _auto_upgrade_next_check()
|
|
140
143
|
namespace, _, remainder = key.partition(".")
|
|
141
144
|
if not remainder:
|
|
142
145
|
return None
|
|
@@ -218,8 +221,8 @@ def _build_system_fields(info: dict[str, object]) -> list[SystemField]:
|
|
|
218
221
|
)
|
|
219
222
|
|
|
220
223
|
add_field(
|
|
221
|
-
_("Next
|
|
222
|
-
"
|
|
224
|
+
_("Next version check"),
|
|
225
|
+
"NEXT-VER-CHECK",
|
|
223
226
|
info.get("auto_upgrade_next_check", ""),
|
|
224
227
|
)
|
|
225
228
|
|
core/tasks.py
CHANGED
|
@@ -17,7 +17,7 @@ from nodes.models import NetMessage
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
AUTO_UPGRADE_HEALTH_DELAY_SECONDS = 30
|
|
20
|
-
|
|
20
|
+
AUTO_UPGRADE_SKIP_LOCK_NAME = "auto_upgrade_skip_revisions.lck"
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
logger = logging.getLogger(__name__)
|
|
@@ -66,6 +66,46 @@ def _append_auto_upgrade_log(base_dir: Path, message: str) -> None:
|
|
|
66
66
|
logger.warning("Failed to append auto-upgrade log entry: %s", message)
|
|
67
67
|
|
|
68
68
|
|
|
69
|
+
def _skip_lock_path(base_dir: Path) -> Path:
|
|
70
|
+
return base_dir / "locks" / AUTO_UPGRADE_SKIP_LOCK_NAME
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _load_skipped_revisions(base_dir: Path) -> set[str]:
|
|
74
|
+
skip_file = _skip_lock_path(base_dir)
|
|
75
|
+
try:
|
|
76
|
+
return {
|
|
77
|
+
line.strip()
|
|
78
|
+
for line in skip_file.read_text().splitlines()
|
|
79
|
+
if line.strip()
|
|
80
|
+
}
|
|
81
|
+
except FileNotFoundError:
|
|
82
|
+
return set()
|
|
83
|
+
except OSError:
|
|
84
|
+
logger.warning("Failed to read auto-upgrade skip lockfile")
|
|
85
|
+
return set()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _add_skipped_revision(base_dir: Path, revision: str) -> None:
|
|
89
|
+
if not revision:
|
|
90
|
+
return
|
|
91
|
+
|
|
92
|
+
skip_file = _skip_lock_path(base_dir)
|
|
93
|
+
try:
|
|
94
|
+
skip_file.parent.mkdir(parents=True, exist_ok=True)
|
|
95
|
+
existing = _load_skipped_revisions(base_dir)
|
|
96
|
+
if revision in existing:
|
|
97
|
+
return
|
|
98
|
+
with skip_file.open("a", encoding="utf-8") as fh:
|
|
99
|
+
fh.write(f"{revision}\n")
|
|
100
|
+
_append_auto_upgrade_log(
|
|
101
|
+
base_dir, f"Recorded blocked revision {revision} for auto-upgrade"
|
|
102
|
+
)
|
|
103
|
+
except OSError:
|
|
104
|
+
logger.warning(
|
|
105
|
+
"Failed to update auto-upgrade skip lockfile with revision %s", revision
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
69
109
|
def _resolve_service_url(base_dir: Path) -> str:
|
|
70
110
|
"""Return the local URL used to probe the Django suite."""
|
|
71
111
|
|
|
@@ -110,6 +150,23 @@ def check_github_updates() -> None:
|
|
|
110
150
|
except Exception:
|
|
111
151
|
startup = None
|
|
112
152
|
|
|
153
|
+
remote_revision = (
|
|
154
|
+
subprocess.check_output(
|
|
155
|
+
["git", "rev-parse", f"origin/{branch}"], cwd=base_dir
|
|
156
|
+
)
|
|
157
|
+
.decode()
|
|
158
|
+
.strip()
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
skipped_revisions = _load_skipped_revisions(base_dir)
|
|
162
|
+
if remote_revision in skipped_revisions:
|
|
163
|
+
_append_auto_upgrade_log(
|
|
164
|
+
base_dir, f"Skipping auto-upgrade for blocked revision {remote_revision}"
|
|
165
|
+
)
|
|
166
|
+
if startup:
|
|
167
|
+
startup()
|
|
168
|
+
return
|
|
169
|
+
|
|
113
170
|
upgrade_stamp = timezone.now().strftime("@ %Y%m%d %H:%M")
|
|
114
171
|
|
|
115
172
|
upgrade_was_applied = False
|
|
@@ -120,19 +177,7 @@ def check_github_updates() -> None:
|
|
|
120
177
|
.decode()
|
|
121
178
|
.strip()
|
|
122
179
|
)
|
|
123
|
-
|
|
124
|
-
subprocess.check_output(
|
|
125
|
-
[
|
|
126
|
-
"git",
|
|
127
|
-
"rev-parse",
|
|
128
|
-
f"origin/{branch}",
|
|
129
|
-
],
|
|
130
|
-
cwd=base_dir,
|
|
131
|
-
)
|
|
132
|
-
.decode()
|
|
133
|
-
.strip()
|
|
134
|
-
)
|
|
135
|
-
if local == remote:
|
|
180
|
+
if local == remote_revision:
|
|
136
181
|
if startup:
|
|
137
182
|
startup()
|
|
138
183
|
return
|
|
@@ -254,12 +299,29 @@ def _schedule_health_check(next_attempt: int) -> None:
|
|
|
254
299
|
)
|
|
255
300
|
|
|
256
301
|
|
|
302
|
+
def _handle_failed_health_check(base_dir: Path, detail: str) -> None:
|
|
303
|
+
revision = ""
|
|
304
|
+
try:
|
|
305
|
+
revision = (
|
|
306
|
+
subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=base_dir)
|
|
307
|
+
.decode()
|
|
308
|
+
.strip()
|
|
309
|
+
)
|
|
310
|
+
except Exception: # pragma: no cover - best effort capture
|
|
311
|
+
logger.warning("Failed to determine revision during auto-upgrade revert")
|
|
312
|
+
|
|
313
|
+
_add_skipped_revision(base_dir, revision)
|
|
314
|
+
_append_auto_upgrade_log(base_dir, "Health check failed; reverting upgrade")
|
|
315
|
+
subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
|
|
316
|
+
|
|
317
|
+
|
|
257
318
|
@shared_task
|
|
258
319
|
def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
|
|
259
320
|
"""Verify the upgraded suite responds successfully.
|
|
260
321
|
|
|
261
|
-
|
|
262
|
-
|
|
322
|
+
After the post-upgrade delay the site is probed once; any response other
|
|
323
|
+
than HTTP 200 triggers an automatic revert and records the failing
|
|
324
|
+
revision so future upgrade attempts skip it.
|
|
263
325
|
"""
|
|
264
326
|
|
|
265
327
|
base_dir = Path(__file__).resolve().parent.parent
|
|
@@ -270,33 +332,29 @@ def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
|
|
|
270
332
|
)
|
|
271
333
|
|
|
272
334
|
status: int | None = None
|
|
335
|
+
detail = "succeeded"
|
|
273
336
|
try:
|
|
274
337
|
with urllib.request.urlopen(request, timeout=10) as response:
|
|
275
338
|
status = getattr(response, "status", response.getcode())
|
|
276
339
|
except urllib.error.HTTPError as exc:
|
|
277
340
|
status = exc.code
|
|
341
|
+
detail = f"returned HTTP {exc.code}"
|
|
278
342
|
logger.warning(
|
|
279
343
|
"Auto-upgrade health check attempt %s returned HTTP %s", attempt, exc.code
|
|
280
344
|
)
|
|
281
345
|
except urllib.error.URLError as exc:
|
|
346
|
+
detail = f"failed with {exc}"
|
|
282
347
|
logger.warning(
|
|
283
348
|
"Auto-upgrade health check attempt %s failed: %s", attempt, exc
|
|
284
349
|
)
|
|
285
350
|
except Exception as exc: # pragma: no cover - unexpected network error
|
|
351
|
+
detail = f"failed with {exc}"
|
|
286
352
|
logger.exception(
|
|
287
353
|
"Unexpected error probing suite during auto-upgrade attempt %s", attempt
|
|
288
354
|
)
|
|
289
|
-
detail = f"failed with {exc}"
|
|
290
355
|
_record_health_check_result(base_dir, attempt, status, detail)
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
base_dir,
|
|
294
|
-
"Health check raised unexpected error; reverting upgrade",
|
|
295
|
-
)
|
|
296
|
-
subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
|
|
297
|
-
else:
|
|
298
|
-
_schedule_health_check(attempt + 1)
|
|
299
|
-
return None
|
|
356
|
+
_handle_failed_health_check(base_dir, detail)
|
|
357
|
+
return False
|
|
300
358
|
|
|
301
359
|
if status == 200:
|
|
302
360
|
_record_health_check_result(base_dir, attempt, status, "succeeded")
|
|
@@ -307,21 +365,15 @@ def verify_auto_upgrade_health(attempt: int = 1) -> bool | None:
|
|
|
307
365
|
)
|
|
308
366
|
return True
|
|
309
367
|
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
)
|
|
316
|
-
_append_auto_upgrade_log(
|
|
317
|
-
base_dir,
|
|
318
|
-
"Health check failed three times; reverting upgrade",
|
|
319
|
-
)
|
|
320
|
-
subprocess.run(["./upgrade.sh", "--revert"], cwd=base_dir, check=True)
|
|
321
|
-
return False
|
|
368
|
+
if detail == "succeeded":
|
|
369
|
+
if status is not None:
|
|
370
|
+
detail = f"returned HTTP {status}"
|
|
371
|
+
else:
|
|
372
|
+
detail = "failed with unknown status"
|
|
322
373
|
|
|
323
|
-
|
|
324
|
-
|
|
374
|
+
_record_health_check_result(base_dir, attempt, status, detail)
|
|
375
|
+
_handle_failed_health_check(base_dir, detail)
|
|
376
|
+
return False
|
|
325
377
|
|
|
326
378
|
|
|
327
379
|
@shared_task
|
core/tests.py
CHANGED
|
@@ -770,6 +770,50 @@ class ReleaseProcessTests(TestCase):
|
|
|
770
770
|
self.assertFalse(proc.stdout.strip())
|
|
771
771
|
self.assertEqual(version_path.read_text(encoding="utf-8"), original)
|
|
772
772
|
|
|
773
|
+
@mock.patch("core.views.requests.get")
|
|
774
|
+
@mock.patch("core.views.release_utils.network_available", return_value=True)
|
|
775
|
+
@mock.patch("core.views.release_utils._git_clean", return_value=True)
|
|
776
|
+
def test_step_check_ignores_yanked_release(
|
|
777
|
+
self, git_clean, network_available, requests_get
|
|
778
|
+
):
|
|
779
|
+
response = mock.Mock()
|
|
780
|
+
response.ok = True
|
|
781
|
+
response.json.return_value = {
|
|
782
|
+
"releases": {
|
|
783
|
+
"0.1.12": [
|
|
784
|
+
{"filename": "pkg.whl", "yanked": True},
|
|
785
|
+
{"filename": "pkg.tar.gz", "yanked": True},
|
|
786
|
+
]
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
requests_get.return_value = response
|
|
790
|
+
self.release.version = "0.1.12"
|
|
791
|
+
_step_check_version(self.release, {}, Path("rel.log"))
|
|
792
|
+
requests_get.assert_called_once()
|
|
793
|
+
|
|
794
|
+
@mock.patch("core.views.requests.get")
|
|
795
|
+
@mock.patch("core.views.release_utils.network_available", return_value=True)
|
|
796
|
+
@mock.patch("core.views.release_utils._git_clean", return_value=True)
|
|
797
|
+
def test_step_check_blocks_available_release(
|
|
798
|
+
self, git_clean, network_available, requests_get
|
|
799
|
+
):
|
|
800
|
+
response = mock.Mock()
|
|
801
|
+
response.ok = True
|
|
802
|
+
response.json.return_value = {
|
|
803
|
+
"releases": {
|
|
804
|
+
"0.1.12": [
|
|
805
|
+
{"filename": "pkg.whl", "yanked": False},
|
|
806
|
+
{"filename": "pkg.tar.gz"},
|
|
807
|
+
]
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
requests_get.return_value = response
|
|
811
|
+
self.release.version = "0.1.12"
|
|
812
|
+
with self.assertRaises(Exception) as exc:
|
|
813
|
+
_step_check_version(self.release, {}, Path("rel.log"))
|
|
814
|
+
self.assertIn("already on PyPI", str(exc.exception))
|
|
815
|
+
requests_get.assert_called_once()
|
|
816
|
+
|
|
773
817
|
@mock.patch("core.models.PackageRelease.dump_fixture")
|
|
774
818
|
def test_save_does_not_dump_fixture(self, dump):
|
|
775
819
|
self.release.pypi_url = "https://example.com"
|
|
@@ -1369,6 +1413,13 @@ class TodoFocusViewTests(TestCase):
|
|
|
1369
1413
|
change_url = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1370
1414
|
self.assertContains(resp, f'src="{change_url}"')
|
|
1371
1415
|
|
|
1416
|
+
def test_focus_view_includes_open_target_button(self):
|
|
1417
|
+
todo = Todo.objects.create(request="Task", url="/docs/")
|
|
1418
|
+
resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
|
|
1419
|
+
self.assertContains(resp, 'class="todo-button todo-button-open"')
|
|
1420
|
+
self.assertContains(resp, 'target="_blank"')
|
|
1421
|
+
self.assertContains(resp, 'href="/docs/"')
|
|
1422
|
+
|
|
1372
1423
|
def test_focus_view_sanitizes_loopback_absolute_url(self):
|
|
1373
1424
|
todo = Todo.objects.create(
|
|
1374
1425
|
request="Task",
|
|
@@ -1402,6 +1453,19 @@ class TodoFocusViewTests(TestCase):
|
|
|
1402
1453
|
change_url = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1403
1454
|
self.assertContains(resp, f'src="{change_url}"')
|
|
1404
1455
|
|
|
1456
|
+
def test_focus_view_parses_auth_directives(self):
|
|
1457
|
+
todo = Todo.objects.create(
|
|
1458
|
+
request="Task",
|
|
1459
|
+
url="/docs/?section=chart&_todo_auth=logout&_todo_auth=user:demo&_todo_auth=perm:core.view_user&_todo_auth=extra",
|
|
1460
|
+
)
|
|
1461
|
+
resp = self.client.get(reverse("todo-focus", args=[todo.pk]))
|
|
1462
|
+
self.assertContains(resp, 'src="/docs/?section=chart"')
|
|
1463
|
+
self.assertContains(resp, 'href="/docs/?section=chart"')
|
|
1464
|
+
self.assertContains(resp, "logged out")
|
|
1465
|
+
self.assertContains(resp, "Sign in using: demo")
|
|
1466
|
+
self.assertContains(resp, "Required permissions: core.view_user")
|
|
1467
|
+
self.assertContains(resp, "Additional authentication notes: extra")
|
|
1468
|
+
|
|
1405
1469
|
def test_focus_view_redirects_if_todo_completed(self):
|
|
1406
1470
|
todo = Todo.objects.create(request="Task")
|
|
1407
1471
|
todo.done_on = timezone.now()
|
core/views.py
CHANGED
|
@@ -18,7 +18,7 @@ from django.views.decorators.csrf import csrf_exempt
|
|
|
18
18
|
from django.views.decorators.http import require_GET, require_POST
|
|
19
19
|
from django.utils.http import url_has_allowed_host_and_scheme
|
|
20
20
|
from pathlib import Path
|
|
21
|
-
from urllib.parse import urlsplit, urlunsplit
|
|
21
|
+
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
|
|
22
22
|
import errno
|
|
23
23
|
import subprocess
|
|
24
24
|
|
|
@@ -40,7 +40,7 @@ def odoo_products(request):
|
|
|
40
40
|
products = profile.execute(
|
|
41
41
|
"product.product",
|
|
42
42
|
"search_read",
|
|
43
|
-
[],
|
|
43
|
+
[[]],
|
|
44
44
|
{"fields": ["name"], "limit": 50},
|
|
45
45
|
)
|
|
46
46
|
except Exception:
|
|
@@ -357,7 +357,7 @@ def _step_check_todos(release, ctx, log_path: Path) -> None:
|
|
|
357
357
|
|
|
358
358
|
def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
359
359
|
from . import release as release_utils
|
|
360
|
-
from packaging.version import Version
|
|
360
|
+
from packaging.version import InvalidVersion, Version
|
|
361
361
|
|
|
362
362
|
if not release_utils._git_clean():
|
|
363
363
|
proc = subprocess.run(
|
|
@@ -417,8 +417,33 @@ def _step_check_version(release, ctx, log_path: Path) -> None:
|
|
|
417
417
|
if release_utils.network_available():
|
|
418
418
|
try:
|
|
419
419
|
resp = requests.get(f"https://pypi.org/pypi/{release.package.name}/json")
|
|
420
|
-
if resp.ok
|
|
421
|
-
|
|
420
|
+
if resp.ok:
|
|
421
|
+
data = resp.json()
|
|
422
|
+
releases = data.get("releases", {})
|
|
423
|
+
try:
|
|
424
|
+
target_version = Version(release.version)
|
|
425
|
+
except InvalidVersion:
|
|
426
|
+
target_version = None
|
|
427
|
+
|
|
428
|
+
for candidate, files in releases.items():
|
|
429
|
+
same_version = candidate == release.version
|
|
430
|
+
if target_version is not None and not same_version:
|
|
431
|
+
try:
|
|
432
|
+
same_version = Version(candidate) == target_version
|
|
433
|
+
except InvalidVersion:
|
|
434
|
+
same_version = False
|
|
435
|
+
if not same_version:
|
|
436
|
+
continue
|
|
437
|
+
|
|
438
|
+
has_available_files = any(
|
|
439
|
+
isinstance(file_data, dict)
|
|
440
|
+
and not file_data.get("yanked", False)
|
|
441
|
+
for file_data in files or []
|
|
442
|
+
)
|
|
443
|
+
if has_available_files:
|
|
444
|
+
raise Exception(
|
|
445
|
+
f"Version {release.version} already on PyPI"
|
|
446
|
+
)
|
|
422
447
|
except Exception as exc:
|
|
423
448
|
# network errors should be logged but not crash
|
|
424
449
|
if "already on PyPI" in str(exc):
|
|
@@ -1164,13 +1189,81 @@ def release_progress(request, pk: int, action: str):
|
|
|
1164
1189
|
return render(request, "core/release_progress.html", context)
|
|
1165
1190
|
|
|
1166
1191
|
|
|
1167
|
-
def
|
|
1168
|
-
|
|
1192
|
+
def _dedupe_preserve_order(values):
|
|
1193
|
+
seen = set()
|
|
1194
|
+
result = []
|
|
1195
|
+
for value in values:
|
|
1196
|
+
if value in seen:
|
|
1197
|
+
continue
|
|
1198
|
+
seen.add(value)
|
|
1199
|
+
result.append(value)
|
|
1200
|
+
return result
|
|
1201
|
+
|
|
1202
|
+
|
|
1203
|
+
def _parse_todo_auth_directives(query: str):
|
|
1204
|
+
directives = {
|
|
1205
|
+
"require_logout": False,
|
|
1206
|
+
"users": [],
|
|
1207
|
+
"permissions": [],
|
|
1208
|
+
"notes": [],
|
|
1209
|
+
}
|
|
1210
|
+
if not query:
|
|
1211
|
+
return "", directives
|
|
1212
|
+
|
|
1213
|
+
remaining = []
|
|
1214
|
+
for key, value in parse_qsl(query, keep_blank_values=True):
|
|
1215
|
+
if key != "_todo_auth":
|
|
1216
|
+
remaining.append((key, value))
|
|
1217
|
+
continue
|
|
1218
|
+
token = (value or "").strip()
|
|
1219
|
+
if not token:
|
|
1220
|
+
continue
|
|
1221
|
+
kind, _, payload = token.partition(":")
|
|
1222
|
+
kind = kind.strip().lower()
|
|
1223
|
+
payload = payload.strip()
|
|
1224
|
+
if kind in {"logout", "anonymous", "anon"}:
|
|
1225
|
+
directives["require_logout"] = True
|
|
1226
|
+
elif kind in {"user", "username"} and payload:
|
|
1227
|
+
directives["users"].append(payload)
|
|
1228
|
+
elif kind in {"perm", "permission"} and payload:
|
|
1229
|
+
directives["permissions"].append(payload)
|
|
1230
|
+
else:
|
|
1231
|
+
directives["notes"].append(token)
|
|
1232
|
+
|
|
1233
|
+
sanitized_query = urlencode(remaining, doseq=True)
|
|
1234
|
+
return sanitized_query, directives
|
|
1235
|
+
|
|
1236
|
+
|
|
1237
|
+
def _todo_iframe_url(request, todo: Todo):
|
|
1238
|
+
"""Return a safe iframe URL and auth context for ``todo``."""
|
|
1169
1239
|
|
|
1170
1240
|
fallback = reverse("admin:core_todo_change", args=[todo.pk])
|
|
1171
1241
|
raw_url = (todo.url or "").strip()
|
|
1242
|
+
|
|
1243
|
+
auth_context = {
|
|
1244
|
+
"require_logout": False,
|
|
1245
|
+
"users": [],
|
|
1246
|
+
"permissions": [],
|
|
1247
|
+
"notes": [],
|
|
1248
|
+
}
|
|
1249
|
+
|
|
1250
|
+
def _final_context(target_url: str):
|
|
1251
|
+
return {
|
|
1252
|
+
"target_url": target_url or fallback,
|
|
1253
|
+
"require_logout": auth_context["require_logout"],
|
|
1254
|
+
"users": _dedupe_preserve_order(auth_context["users"]),
|
|
1255
|
+
"permissions": _dedupe_preserve_order(auth_context["permissions"]),
|
|
1256
|
+
"notes": _dedupe_preserve_order(auth_context["notes"]),
|
|
1257
|
+
"has_requirements": bool(
|
|
1258
|
+
auth_context["require_logout"]
|
|
1259
|
+
or auth_context["users"]
|
|
1260
|
+
or auth_context["permissions"]
|
|
1261
|
+
or auth_context["notes"]
|
|
1262
|
+
),
|
|
1263
|
+
}
|
|
1264
|
+
|
|
1172
1265
|
if not raw_url:
|
|
1173
|
-
return fallback
|
|
1266
|
+
return fallback, _final_context(fallback)
|
|
1174
1267
|
|
|
1175
1268
|
focus_path = reverse("todo-focus", args=[todo.pk])
|
|
1176
1269
|
focus_norm = focus_path.strip("/").lower()
|
|
@@ -1186,14 +1279,31 @@ def _todo_iframe_url(request, todo: Todo) -> str:
|
|
|
1186
1279
|
return normalized == focus_norm if normalized else False
|
|
1187
1280
|
|
|
1188
1281
|
if _is_focus_target(raw_url):
|
|
1189
|
-
return fallback
|
|
1282
|
+
return fallback, _final_context(fallback)
|
|
1190
1283
|
|
|
1191
1284
|
parsed = urlsplit(raw_url)
|
|
1285
|
+
|
|
1286
|
+
def _merge_directives(parsed_result):
|
|
1287
|
+
sanitized_query, directives = _parse_todo_auth_directives(parsed_result.query)
|
|
1288
|
+
if directives["require_logout"]:
|
|
1289
|
+
auth_context["require_logout"] = True
|
|
1290
|
+
auth_context["users"].extend(directives["users"])
|
|
1291
|
+
auth_context["permissions"].extend(directives["permissions"])
|
|
1292
|
+
auth_context["notes"].extend(directives["notes"])
|
|
1293
|
+
return parsed_result._replace(query=sanitized_query)
|
|
1294
|
+
|
|
1192
1295
|
if not parsed.scheme and not parsed.netloc:
|
|
1193
|
-
|
|
1296
|
+
sanitized = _merge_directives(parsed)
|
|
1297
|
+
path = sanitized.path or "/"
|
|
1298
|
+
if not path.startswith("/"):
|
|
1299
|
+
path = f"/{path}"
|
|
1300
|
+
relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
|
|
1301
|
+
if _is_focus_target(relative_url):
|
|
1302
|
+
return fallback, _final_context(fallback)
|
|
1303
|
+
return relative_url or fallback, _final_context(relative_url)
|
|
1194
1304
|
|
|
1195
1305
|
if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
|
|
1196
|
-
return fallback
|
|
1306
|
+
return fallback, _final_context(fallback)
|
|
1197
1307
|
|
|
1198
1308
|
request_host = request.get_host().strip().lower()
|
|
1199
1309
|
host_without_port = request_host.split(":", 1)[0]
|
|
@@ -1227,15 +1337,16 @@ def _todo_iframe_url(request, todo: Todo) -> str:
|
|
|
1227
1337
|
hostname = (parsed.hostname or "").strip().lower()
|
|
1228
1338
|
netloc = parsed.netloc.strip().lower()
|
|
1229
1339
|
if hostname in allowed_hosts or netloc in allowed_hosts:
|
|
1230
|
-
|
|
1340
|
+
sanitized = _merge_directives(parsed)
|
|
1341
|
+
path = sanitized.path or "/"
|
|
1231
1342
|
if not path.startswith("/"):
|
|
1232
1343
|
path = f"/{path}"
|
|
1233
|
-
relative_url = urlunsplit(("", "", path,
|
|
1344
|
+
relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
|
|
1234
1345
|
if _is_focus_target(relative_url):
|
|
1235
|
-
return fallback
|
|
1236
|
-
return relative_url or fallback
|
|
1346
|
+
return fallback, _final_context(fallback)
|
|
1347
|
+
return relative_url or fallback, _final_context(relative_url)
|
|
1237
1348
|
|
|
1238
|
-
return fallback
|
|
1349
|
+
return fallback, _final_context(fallback)
|
|
1239
1350
|
|
|
1240
1351
|
|
|
1241
1352
|
@staff_member_required
|
|
@@ -1244,10 +1355,13 @@ def todo_focus(request, pk: int):
|
|
|
1244
1355
|
if todo.done_on:
|
|
1245
1356
|
return redirect(_get_return_url(request))
|
|
1246
1357
|
|
|
1247
|
-
iframe_url = _todo_iframe_url(request, todo)
|
|
1358
|
+
iframe_url, focus_auth = _todo_iframe_url(request, todo)
|
|
1359
|
+
focus_target_url = focus_auth.get("target_url", iframe_url) if focus_auth else iframe_url
|
|
1248
1360
|
context = {
|
|
1249
1361
|
"todo": todo,
|
|
1250
1362
|
"iframe_url": iframe_url,
|
|
1363
|
+
"focus_target_url": focus_target_url,
|
|
1364
|
+
"focus_auth": focus_auth,
|
|
1251
1365
|
"next_url": _get_return_url(request),
|
|
1252
1366
|
"done_url": reverse("todo-done", args=[todo.pk]),
|
|
1253
1367
|
}
|