arthexis 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- arthexis-0.1.3.dist-info/METADATA +126 -0
- arthexis-0.1.3.dist-info/RECORD +73 -0
- arthexis-0.1.3.dist-info/WHEEL +5 -0
- arthexis-0.1.3.dist-info/licenses/LICENSE +21 -0
- arthexis-0.1.3.dist-info/top_level.txt +5 -0
- config/__init__.py +6 -0
- config/active_app.py +15 -0
- config/asgi.py +29 -0
- config/auth_app.py +8 -0
- config/celery.py +19 -0
- config/context_processors.py +68 -0
- config/loadenv.py +11 -0
- config/logging.py +43 -0
- config/middleware.py +25 -0
- config/offline.py +47 -0
- config/settings.py +374 -0
- config/urls.py +91 -0
- config/wsgi.py +17 -0
- core/__init__.py +0 -0
- core/admin.py +830 -0
- core/apps.py +67 -0
- core/backends.py +82 -0
- core/entity.py +97 -0
- core/environment.py +43 -0
- core/fields.py +70 -0
- core/lcd_screen.py +77 -0
- core/middleware.py +34 -0
- core/models.py +1277 -0
- core/notifications.py +95 -0
- core/release.py +451 -0
- core/system.py +111 -0
- core/tasks.py +100 -0
- core/tests.py +483 -0
- core/urls.py +11 -0
- core/user_data.py +333 -0
- core/views.py +431 -0
- nodes/__init__.py +0 -0
- nodes/actions.py +72 -0
- nodes/admin.py +347 -0
- nodes/apps.py +76 -0
- nodes/lcd.py +151 -0
- nodes/models.py +577 -0
- nodes/tasks.py +50 -0
- nodes/tests.py +1072 -0
- nodes/urls.py +13 -0
- nodes/utils.py +62 -0
- nodes/views.py +262 -0
- ocpp/__init__.py +0 -0
- ocpp/admin.py +392 -0
- ocpp/apps.py +24 -0
- ocpp/consumers.py +267 -0
- ocpp/evcs.py +911 -0
- ocpp/models.py +300 -0
- ocpp/routing.py +9 -0
- ocpp/simulator.py +357 -0
- ocpp/store.py +175 -0
- ocpp/tasks.py +27 -0
- ocpp/test_export_import.py +129 -0
- ocpp/test_rfid.py +345 -0
- ocpp/tests.py +1229 -0
- ocpp/transactions_io.py +119 -0
- ocpp/urls.py +17 -0
- ocpp/views.py +359 -0
- pages/__init__.py +0 -0
- pages/admin.py +231 -0
- pages/apps.py +10 -0
- pages/checks.py +41 -0
- pages/context_processors.py +72 -0
- pages/models.py +224 -0
- pages/tests.py +628 -0
- pages/urls.py +17 -0
- pages/utils.py +13 -0
- pages/views.py +191 -0
core/views.py
ADDED
|
@@ -0,0 +1,431 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import shutil
|
|
3
|
+
from datetime import date, timedelta
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
from django.contrib.admin.views.decorators import staff_member_required
|
|
7
|
+
from django.contrib.auth import authenticate, login
|
|
8
|
+
from django.http import Http404, JsonResponse
|
|
9
|
+
from django.shortcuts import get_object_or_404, render
|
|
10
|
+
from django.views.decorators.csrf import csrf_exempt
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
import subprocess
|
|
13
|
+
|
|
14
|
+
from utils.api import api_login_required
|
|
15
|
+
|
|
16
|
+
from .models import Product, Subscription, EnergyAccount, PackageRelease
|
|
17
|
+
from .models import RFID
|
|
18
|
+
from . import release as release_utils
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _append_log(path: Path, message: str) -> None:
|
|
22
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
23
|
+
with path.open("a", encoding="utf-8") as fh:
|
|
24
|
+
fh.write(message + "\n")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _changelog_notes(version: str) -> str:
|
|
28
|
+
path = Path("CHANGELOG.rst")
|
|
29
|
+
if not path.exists():
|
|
30
|
+
return ""
|
|
31
|
+
lines = path.read_text(encoding="utf-8").splitlines()
|
|
32
|
+
prefix = f"{version} "
|
|
33
|
+
for i, line in enumerate(lines):
|
|
34
|
+
if line.startswith(prefix):
|
|
35
|
+
j = i + 2
|
|
36
|
+
items = []
|
|
37
|
+
while j < len(lines) and lines[j].startswith("- "):
|
|
38
|
+
items.append(lines[j])
|
|
39
|
+
j += 1
|
|
40
|
+
return "\n".join(items)
|
|
41
|
+
return ""
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _step_check_pypi(release, ctx, log_path: Path) -> None:
|
|
45
|
+
from . import release as release_utils
|
|
46
|
+
|
|
47
|
+
_append_log(log_path, f"Checking if version {release.version} exists on PyPI")
|
|
48
|
+
if release_utils.network_available():
|
|
49
|
+
try:
|
|
50
|
+
resp = requests.get(
|
|
51
|
+
f"https://pypi.org/pypi/{release.package.name}/json"
|
|
52
|
+
)
|
|
53
|
+
if resp.ok and release.version in resp.json().get("releases", {}):
|
|
54
|
+
raise Exception(
|
|
55
|
+
f"Version {release.version} already on PyPI"
|
|
56
|
+
)
|
|
57
|
+
except Exception as exc:
|
|
58
|
+
# network errors should be logged but not crash
|
|
59
|
+
if "already on PyPI" in str(exc):
|
|
60
|
+
raise
|
|
61
|
+
_append_log(log_path, f"PyPI check failed: {exc}")
|
|
62
|
+
else:
|
|
63
|
+
_append_log(log_path, "Network unavailable, skipping PyPI check")
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
67
|
+
from . import release as release_utils
|
|
68
|
+
release.pypi_url = f"https://pypi.org/project/{release.package.name}/{release.version}/"
|
|
69
|
+
release.save(update_fields=["pypi_url"])
|
|
70
|
+
PackageRelease.dump_fixture()
|
|
71
|
+
_append_log(log_path, "Generating build files")
|
|
72
|
+
commit_hash, branch, _current = release_utils.promote(
|
|
73
|
+
package=release.to_package(),
|
|
74
|
+
version=release.version,
|
|
75
|
+
creds=release.to_credentials(),
|
|
76
|
+
)
|
|
77
|
+
release.revision = commit_hash
|
|
78
|
+
release.save(update_fields=["revision"])
|
|
79
|
+
ctx["branch"] = branch
|
|
80
|
+
release_name = f"{release.package.name}-{release.version}-{commit_hash[:7]}"
|
|
81
|
+
new_log = log_path.with_name(f"{release_name}.log")
|
|
82
|
+
log_path.rename(new_log)
|
|
83
|
+
ctx["log"] = new_log.name
|
|
84
|
+
_append_log(new_log, "Build complete")
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _step_push_branch(release, ctx, log_path: Path) -> None:
|
|
88
|
+
branch = ctx.get("branch")
|
|
89
|
+
_append_log(log_path, f"Pushing branch {branch}")
|
|
90
|
+
subprocess.run(["git", "push", "-u", "origin", branch], check=True)
|
|
91
|
+
pr_url = None
|
|
92
|
+
gh_path = shutil.which("gh")
|
|
93
|
+
if gh_path:
|
|
94
|
+
try:
|
|
95
|
+
title = f"Release candidate for {release.version}"
|
|
96
|
+
body = _changelog_notes(release.version)
|
|
97
|
+
proc = subprocess.run(
|
|
98
|
+
[
|
|
99
|
+
gh_path,
|
|
100
|
+
"pr",
|
|
101
|
+
"create",
|
|
102
|
+
"--title",
|
|
103
|
+
title,
|
|
104
|
+
"--body",
|
|
105
|
+
body,
|
|
106
|
+
"--base",
|
|
107
|
+
"main",
|
|
108
|
+
"--head",
|
|
109
|
+
branch,
|
|
110
|
+
],
|
|
111
|
+
check=True,
|
|
112
|
+
capture_output=True,
|
|
113
|
+
text=True,
|
|
114
|
+
)
|
|
115
|
+
pr_url = proc.stdout.strip()
|
|
116
|
+
ctx["pr_url"] = pr_url
|
|
117
|
+
release.pr_url = pr_url
|
|
118
|
+
release.save(update_fields=["pr_url"])
|
|
119
|
+
_append_log(log_path, f"PR created: {pr_url}")
|
|
120
|
+
cert_log = Path("logs") / "certifications.log"
|
|
121
|
+
_append_log(cert_log, f"{release.version} {branch} {pr_url}")
|
|
122
|
+
ctx["cert_log"] = str(cert_log)
|
|
123
|
+
except Exception as exc: # pragma: no cover - best effort
|
|
124
|
+
_append_log(log_path, f"PR creation failed: {exc}")
|
|
125
|
+
else:
|
|
126
|
+
token = release.get_github_token()
|
|
127
|
+
if token:
|
|
128
|
+
try: # pragma: no cover - best effort
|
|
129
|
+
remote = subprocess.run(
|
|
130
|
+
["git", "config", "--get", "remote.origin.url"],
|
|
131
|
+
check=True,
|
|
132
|
+
capture_output=True,
|
|
133
|
+
text=True,
|
|
134
|
+
).stdout.strip()
|
|
135
|
+
repo = remote.rsplit(":", 1)[-1].split("github.com/")[-1].removesuffix(".git")
|
|
136
|
+
title = f"Release candidate for {release.version}"
|
|
137
|
+
body = _changelog_notes(release.version)
|
|
138
|
+
resp = requests.post(
|
|
139
|
+
f"https://api.github.com/repos/{repo}/pulls",
|
|
140
|
+
json={
|
|
141
|
+
"title": title,
|
|
142
|
+
"head": branch,
|
|
143
|
+
"base": "main",
|
|
144
|
+
"body": body,
|
|
145
|
+
},
|
|
146
|
+
headers={"Authorization": f"token {token}"},
|
|
147
|
+
timeout=10,
|
|
148
|
+
)
|
|
149
|
+
resp.raise_for_status()
|
|
150
|
+
pr_url = resp.json().get("html_url")
|
|
151
|
+
if pr_url:
|
|
152
|
+
ctx["pr_url"] = pr_url
|
|
153
|
+
release.pr_url = pr_url
|
|
154
|
+
release.save(update_fields=["pr_url"])
|
|
155
|
+
_append_log(log_path, f"PR created: {pr_url}")
|
|
156
|
+
cert_log = Path("logs") / "certifications.log"
|
|
157
|
+
_append_log(cert_log, f"{release.version} {branch} {pr_url}")
|
|
158
|
+
ctx["cert_log"] = str(cert_log)
|
|
159
|
+
else:
|
|
160
|
+
_append_log(log_path, "PR creation failed: no URL returned")
|
|
161
|
+
except Exception as exc:
|
|
162
|
+
_append_log(log_path, f"PR creation failed: {exc}")
|
|
163
|
+
else:
|
|
164
|
+
_append_log(
|
|
165
|
+
log_path,
|
|
166
|
+
"PR creation skipped: gh not installed and no GitHub token available",
|
|
167
|
+
)
|
|
168
|
+
subprocess.run(["git", "checkout", "main"], check=True)
|
|
169
|
+
_append_log(log_path, "Branch pushed")
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _step_merge_publish(release, ctx, log_path: Path) -> None:
|
|
173
|
+
from . import release as release_utils
|
|
174
|
+
import time
|
|
175
|
+
|
|
176
|
+
gh_path = shutil.which("gh")
|
|
177
|
+
pr_url = ctx.get("pr_url") or release.pr_url
|
|
178
|
+
if gh_path and pr_url:
|
|
179
|
+
_append_log(log_path, "Waiting for PR checks")
|
|
180
|
+
for _ in range(60):
|
|
181
|
+
try:
|
|
182
|
+
proc = subprocess.run(
|
|
183
|
+
[gh_path, "pr", "view", pr_url, "--json", "mergeable"],
|
|
184
|
+
capture_output=True,
|
|
185
|
+
text=True,
|
|
186
|
+
check=True,
|
|
187
|
+
)
|
|
188
|
+
state = json.loads(proc.stdout or "{}").get("mergeable")
|
|
189
|
+
if state == "MERGEABLE":
|
|
190
|
+
break
|
|
191
|
+
except Exception:
|
|
192
|
+
pass
|
|
193
|
+
time.sleep(1)
|
|
194
|
+
_append_log(log_path, "Merging PR")
|
|
195
|
+
try:
|
|
196
|
+
subprocess.run(
|
|
197
|
+
[gh_path, "pr", "merge", pr_url, "--merge", "--delete-branch"],
|
|
198
|
+
check=True,
|
|
199
|
+
)
|
|
200
|
+
subprocess.run(["git", "pull", "--ff-only", "origin", "main"], check=True)
|
|
201
|
+
except Exception as exc:
|
|
202
|
+
_append_log(log_path, f"PR merge failed: {exc}")
|
|
203
|
+
|
|
204
|
+
_append_log(log_path, "Uploading distribution")
|
|
205
|
+
release_utils.publish(
|
|
206
|
+
package=release.to_package(),
|
|
207
|
+
version=release.version,
|
|
208
|
+
creds=release.to_credentials(),
|
|
209
|
+
)
|
|
210
|
+
_append_log(log_path, "Upload complete")
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
PUBLISH_STEPS = [
|
|
214
|
+
("Check version availability", _step_check_pypi),
|
|
215
|
+
("Generate build", _step_promote_build),
|
|
216
|
+
("Push branch", _step_push_branch),
|
|
217
|
+
("Merge and publish", _step_merge_publish),
|
|
218
|
+
]
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
@csrf_exempt
|
|
222
|
+
def rfid_login(request):
|
|
223
|
+
"""Authenticate a user using an RFID."""
|
|
224
|
+
|
|
225
|
+
if request.method != "POST":
|
|
226
|
+
return JsonResponse({"detail": "POST required"}, status=400)
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
data = json.loads(request.body.decode())
|
|
230
|
+
except json.JSONDecodeError:
|
|
231
|
+
data = request.POST
|
|
232
|
+
|
|
233
|
+
rfid = data.get("rfid")
|
|
234
|
+
if not rfid:
|
|
235
|
+
return JsonResponse({"detail": "rfid required"}, status=400)
|
|
236
|
+
|
|
237
|
+
user = authenticate(request, rfid=rfid)
|
|
238
|
+
if user is None:
|
|
239
|
+
return JsonResponse({"detail": "invalid RFID"}, status=401)
|
|
240
|
+
|
|
241
|
+
login(request, user)
|
|
242
|
+
return JsonResponse({"id": user.id, "username": user.username})
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
@api_login_required
|
|
246
|
+
def product_list(request):
|
|
247
|
+
"""Return a JSON list of products."""
|
|
248
|
+
|
|
249
|
+
products = list(
|
|
250
|
+
Product.objects.values("id", "name", "description", "renewal_period")
|
|
251
|
+
)
|
|
252
|
+
return JsonResponse({"products": products})
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
@csrf_exempt
|
|
256
|
+
@api_login_required
|
|
257
|
+
def add_subscription(request):
|
|
258
|
+
"""Create a subscription for an energy account from POSTed JSON."""
|
|
259
|
+
|
|
260
|
+
if request.method != "POST":
|
|
261
|
+
return JsonResponse({"detail": "POST required"}, status=400)
|
|
262
|
+
|
|
263
|
+
try:
|
|
264
|
+
data = json.loads(request.body.decode())
|
|
265
|
+
except json.JSONDecodeError:
|
|
266
|
+
data = request.POST
|
|
267
|
+
|
|
268
|
+
account_id = data.get("account_id")
|
|
269
|
+
product_id = data.get("product_id")
|
|
270
|
+
|
|
271
|
+
if not account_id or not product_id:
|
|
272
|
+
return JsonResponse(
|
|
273
|
+
{"detail": "account_id and product_id required"}, status=400
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
try:
|
|
277
|
+
product = Product.objects.get(id=product_id)
|
|
278
|
+
except Product.DoesNotExist:
|
|
279
|
+
return JsonResponse({"detail": "invalid product"}, status=404)
|
|
280
|
+
|
|
281
|
+
sub = Subscription.objects.create(
|
|
282
|
+
account_id=account_id,
|
|
283
|
+
product=product,
|
|
284
|
+
next_renewal=date.today() + timedelta(days=product.renewal_period),
|
|
285
|
+
)
|
|
286
|
+
return JsonResponse({"id": sub.id})
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
@api_login_required
|
|
290
|
+
def subscription_list(request):
|
|
291
|
+
"""Return subscriptions for the given account_id."""
|
|
292
|
+
|
|
293
|
+
account_id = request.GET.get("account_id")
|
|
294
|
+
if not account_id:
|
|
295
|
+
return JsonResponse({"detail": "account_id required"}, status=400)
|
|
296
|
+
|
|
297
|
+
subs = list(
|
|
298
|
+
Subscription.objects.filter(account_id=account_id)
|
|
299
|
+
.select_related("product")
|
|
300
|
+
.values(
|
|
301
|
+
"id",
|
|
302
|
+
"product__name",
|
|
303
|
+
"next_renewal",
|
|
304
|
+
)
|
|
305
|
+
)
|
|
306
|
+
return JsonResponse({"subscriptions": subs})
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
@csrf_exempt
|
|
310
|
+
@api_login_required
|
|
311
|
+
def rfid_batch(request):
|
|
312
|
+
"""Export or import RFID tags in batch."""
|
|
313
|
+
|
|
314
|
+
if request.method == "GET":
|
|
315
|
+
color = request.GET.get("color", RFID.BLACK).upper()
|
|
316
|
+
released = request.GET.get("released")
|
|
317
|
+
if released is not None:
|
|
318
|
+
released = released.lower()
|
|
319
|
+
qs = RFID.objects.all()
|
|
320
|
+
if color != "ALL":
|
|
321
|
+
qs = qs.filter(color=color)
|
|
322
|
+
if released in ("true", "false"):
|
|
323
|
+
qs = qs.filter(released=(released == "true"))
|
|
324
|
+
tags = [
|
|
325
|
+
{
|
|
326
|
+
"rfid": t.rfid,
|
|
327
|
+
"energy_accounts": list(t.energy_accounts.values_list("id", flat=True)),
|
|
328
|
+
"allowed": t.allowed,
|
|
329
|
+
"color": t.color,
|
|
330
|
+
"released": t.released,
|
|
331
|
+
}
|
|
332
|
+
for t in qs.order_by("rfid")
|
|
333
|
+
]
|
|
334
|
+
return JsonResponse({"rfids": tags})
|
|
335
|
+
|
|
336
|
+
if request.method == "POST":
|
|
337
|
+
try:
|
|
338
|
+
data = json.loads(request.body.decode())
|
|
339
|
+
except json.JSONDecodeError:
|
|
340
|
+
return JsonResponse({"detail": "invalid JSON"}, status=400)
|
|
341
|
+
|
|
342
|
+
tags = data.get("rfids") if isinstance(data, dict) else data
|
|
343
|
+
if not isinstance(tags, list):
|
|
344
|
+
return JsonResponse({"detail": "rfids list required"}, status=400)
|
|
345
|
+
|
|
346
|
+
count = 0
|
|
347
|
+
for row in tags:
|
|
348
|
+
rfid = (row.get("rfid") or "").strip()
|
|
349
|
+
if not rfid:
|
|
350
|
+
continue
|
|
351
|
+
allowed = row.get("allowed", True)
|
|
352
|
+
energy_accounts = row.get("energy_accounts") or []
|
|
353
|
+
color = (
|
|
354
|
+
(row.get("color") or RFID.BLACK).strip().upper() or RFID.BLACK
|
|
355
|
+
)
|
|
356
|
+
released = row.get("released", False)
|
|
357
|
+
if isinstance(released, str):
|
|
358
|
+
released = released.lower() == "true"
|
|
359
|
+
|
|
360
|
+
tag, _ = RFID.objects.update_or_create(
|
|
361
|
+
rfid=rfid.upper(),
|
|
362
|
+
defaults={
|
|
363
|
+
"allowed": allowed,
|
|
364
|
+
"color": color,
|
|
365
|
+
"released": released,
|
|
366
|
+
},
|
|
367
|
+
)
|
|
368
|
+
if energy_accounts:
|
|
369
|
+
tag.energy_accounts.set(EnergyAccount.objects.filter(id__in=energy_accounts))
|
|
370
|
+
else:
|
|
371
|
+
tag.energy_accounts.clear()
|
|
372
|
+
count += 1
|
|
373
|
+
|
|
374
|
+
return JsonResponse({"imported": count})
|
|
375
|
+
|
|
376
|
+
return JsonResponse({"detail": "GET or POST required"}, status=400)
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
@staff_member_required
|
|
380
|
+
def release_progress(request, pk: int, action: str):
|
|
381
|
+
release = get_object_or_404(PackageRelease, pk=pk)
|
|
382
|
+
if action != "publish":
|
|
383
|
+
raise Http404("Unknown action")
|
|
384
|
+
session_key = f"release_publish_{pk}"
|
|
385
|
+
ctx = request.session.get(session_key, {"step": 0})
|
|
386
|
+
step_count = ctx.get("step", 0)
|
|
387
|
+
step_param = request.GET.get("step")
|
|
388
|
+
|
|
389
|
+
identifier = f"{release.package.name}-{release.version}"
|
|
390
|
+
if release.revision:
|
|
391
|
+
identifier = f"{identifier}-{release.revision[:7]}"
|
|
392
|
+
log_name = ctx.get("log") or f"{identifier}.log"
|
|
393
|
+
log_path = Path("logs") / log_name
|
|
394
|
+
ctx.setdefault("log", log_name)
|
|
395
|
+
|
|
396
|
+
steps = PUBLISH_STEPS
|
|
397
|
+
error = ctx.get("error")
|
|
398
|
+
|
|
399
|
+
if step_param is not None and not error and step_count < len(steps):
|
|
400
|
+
to_run = int(step_param)
|
|
401
|
+
if to_run == step_count:
|
|
402
|
+
name, func = steps[to_run]
|
|
403
|
+
try:
|
|
404
|
+
func(release, ctx, log_path)
|
|
405
|
+
step_count += 1
|
|
406
|
+
ctx["step"] = step_count
|
|
407
|
+
request.session[session_key] = ctx
|
|
408
|
+
except Exception as exc: # pragma: no cover - best effort logging
|
|
409
|
+
_append_log(log_path, f"{name} failed: {exc}")
|
|
410
|
+
ctx["error"] = str(exc)
|
|
411
|
+
request.session[session_key] = ctx
|
|
412
|
+
|
|
413
|
+
done = step_count >= len(steps) and not ctx.get("error")
|
|
414
|
+
|
|
415
|
+
log_content = log_path.read_text(encoding="utf-8") if log_path.exists() else ""
|
|
416
|
+
next_step = step_count if not done and not ctx.get("error") else None
|
|
417
|
+
context = {
|
|
418
|
+
"release": release,
|
|
419
|
+
"action": "publish",
|
|
420
|
+
"steps": [s[0] for s in steps],
|
|
421
|
+
"current_step": step_count,
|
|
422
|
+
"next_step": next_step,
|
|
423
|
+
"done": done,
|
|
424
|
+
"error": ctx.get("error"),
|
|
425
|
+
"log_content": log_content,
|
|
426
|
+
"log_path": str(log_path),
|
|
427
|
+
"pr_url": ctx.get("pr_url"),
|
|
428
|
+
"cert_log": ctx.get("cert_log"),
|
|
429
|
+
}
|
|
430
|
+
request.session[session_key] = ctx
|
|
431
|
+
return render(request, "core/release_progress.html", context)
|
nodes/__init__.py
ADDED
|
File without changes
|
nodes/actions.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Dict, Iterable, Optional, Type
|
|
4
|
+
|
|
5
|
+
from .models import Node
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class NodeAction:
|
|
9
|
+
"""Base class for actions that operate on a :class:`~nodes.models.Node`."""
|
|
10
|
+
|
|
11
|
+
#: Human friendly name for this action
|
|
12
|
+
display_name: str = ""
|
|
13
|
+
#: Short slug used in URLs
|
|
14
|
+
slug: str = ""
|
|
15
|
+
#: Description of the action
|
|
16
|
+
description: str = ""
|
|
17
|
+
#: Whether this action supports running on remote nodes
|
|
18
|
+
supports_remote: bool = False
|
|
19
|
+
|
|
20
|
+
# registry of available actions
|
|
21
|
+
registry: Dict[str, Type["NodeAction"]] = {}
|
|
22
|
+
|
|
23
|
+
def __init_subclass__(cls, **kwargs):
|
|
24
|
+
super().__init_subclass__(**kwargs)
|
|
25
|
+
if cls.slug:
|
|
26
|
+
key = cls.slug
|
|
27
|
+
else:
|
|
28
|
+
key = cls.__name__.lower()
|
|
29
|
+
cls.slug = key
|
|
30
|
+
NodeAction.registry[key] = cls
|
|
31
|
+
|
|
32
|
+
@classmethod
|
|
33
|
+
def get_actions(cls) -> Iterable[Type["NodeAction"]]:
|
|
34
|
+
"""Return all registered node actions."""
|
|
35
|
+
return cls.registry.values()
|
|
36
|
+
|
|
37
|
+
@classmethod
|
|
38
|
+
def run(cls, node: Optional[Node] = None, **kwargs):
|
|
39
|
+
"""Execute this action on ``node``.
|
|
40
|
+
|
|
41
|
+
If ``node`` is ``None`` the local node is used. If the target node is
|
|
42
|
+
not the local host and ``supports_remote`` is ``False``, a
|
|
43
|
+
``NotImplementedError`` is raised.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
if node is None:
|
|
47
|
+
node = Node.get_local()
|
|
48
|
+
if node is None:
|
|
49
|
+
raise ValueError("No local node configured")
|
|
50
|
+
if not node.is_local and not cls.supports_remote:
|
|
51
|
+
raise NotImplementedError("Remote node actions are not yet implemented")
|
|
52
|
+
instance = cls()
|
|
53
|
+
return instance.execute(node, **kwargs)
|
|
54
|
+
|
|
55
|
+
def execute(self, node: Node, **kwargs): # pragma: no cover - interface
|
|
56
|
+
"""Perform the action on ``node``."""
|
|
57
|
+
raise NotImplementedError
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class CaptureScreenshotAction(NodeAction):
|
|
61
|
+
display_name = "Take Site Screenshot"
|
|
62
|
+
slug = "capture-screenshot"
|
|
63
|
+
|
|
64
|
+
def execute(self, node: Node, **kwargs): # pragma: no cover - uses selenium
|
|
65
|
+
from .utils import capture_screenshot, save_screenshot
|
|
66
|
+
|
|
67
|
+
url = f"http://{node.address}:{node.port}"
|
|
68
|
+
path = capture_screenshot(url)
|
|
69
|
+
save_screenshot(path, node=node, method="NODE_ACTION")
|
|
70
|
+
return path
|
|
71
|
+
|
|
72
|
+
|