arthexis 0.1.6__py3-none-any.whl → 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.6.dist-info → arthexis-0.1.7.dist-info}/METADATA +8 -8
- {arthexis-0.1.6.dist-info → arthexis-0.1.7.dist-info}/RECORD +32 -28
- config/celery.py +7 -0
- config/horologia_app.py +7 -0
- config/logging.py +8 -3
- config/settings.py +2 -2
- config/workgroup_app.py +7 -0
- core/admin.py +55 -11
- core/apps.py +2 -1
- core/checks.py +29 -0
- core/entity.py +29 -7
- core/models.py +49 -9
- core/release.py +29 -141
- core/system.py +2 -2
- core/test_system_info.py +21 -0
- core/tests.py +200 -1
- core/views.py +153 -134
- nodes/admin.py +211 -0
- nodes/apps.py +1 -1
- nodes/models.py +103 -7
- nodes/tests.py +27 -0
- ocpp/apps.py +1 -1
- ocpp/models.py +1 -1
- ocpp/simulator.py +4 -0
- ocpp/tests.py +5 -1
- pages/admin.py +8 -3
- pages/apps.py +1 -1
- pages/tests.py +23 -4
- pages/views.py +22 -3
- {arthexis-0.1.6.dist-info → arthexis-0.1.7.dist-info}/WHEEL +0 -0
- {arthexis-0.1.6.dist-info → arthexis-0.1.7.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.6.dist-info → arthexis-0.1.7.dist-info}/top_level.txt +0 -0
core/views.py
CHANGED
|
@@ -6,7 +6,7 @@ import requests
|
|
|
6
6
|
from django.contrib.admin.views.decorators import staff_member_required
|
|
7
7
|
from django.contrib.auth import authenticate, login
|
|
8
8
|
from django.http import Http404, JsonResponse
|
|
9
|
-
from django.shortcuts import get_object_or_404, render
|
|
9
|
+
from django.shortcuts import get_object_or_404, render, redirect
|
|
10
10
|
from django.views.decorators.csrf import csrf_exempt
|
|
11
11
|
from pathlib import Path
|
|
12
12
|
import subprocess
|
|
@@ -24,6 +24,12 @@ def _append_log(path: Path, message: str) -> None:
|
|
|
24
24
|
fh.write(message + "\n")
|
|
25
25
|
|
|
26
26
|
|
|
27
|
+
def _clean_repo() -> None:
|
|
28
|
+
"""Return the git repository to a clean state."""
|
|
29
|
+
subprocess.run(["git", "reset", "--hard"], check=False)
|
|
30
|
+
subprocess.run(["git", "clean", "-fd"], check=False)
|
|
31
|
+
|
|
32
|
+
|
|
27
33
|
def _changelog_notes(version: str) -> str:
|
|
28
34
|
path = Path("CHANGELOG.rst")
|
|
29
35
|
if not path.exists():
|
|
@@ -45,6 +51,51 @@ def _step_check_pypi(release, ctx, log_path: Path) -> None:
|
|
|
45
51
|
from . import release as release_utils
|
|
46
52
|
from packaging.version import Version
|
|
47
53
|
|
|
54
|
+
if not release_utils._git_clean():
|
|
55
|
+
proc = subprocess.run(
|
|
56
|
+
["git", "status", "--porcelain"],
|
|
57
|
+
capture_output=True,
|
|
58
|
+
text=True,
|
|
59
|
+
)
|
|
60
|
+
files = [line[3:] for line in proc.stdout.splitlines()]
|
|
61
|
+
fixture_files = [
|
|
62
|
+
f
|
|
63
|
+
for f in files
|
|
64
|
+
if "fixtures" in Path(f).parts and Path(f).suffix == ".json"
|
|
65
|
+
]
|
|
66
|
+
if not files or len(fixture_files) != len(files):
|
|
67
|
+
raise Exception("Git repository is not clean")
|
|
68
|
+
|
|
69
|
+
summary = []
|
|
70
|
+
for f in fixture_files:
|
|
71
|
+
path = Path(f)
|
|
72
|
+
try:
|
|
73
|
+
data = json.loads(path.read_text(encoding="utf-8"))
|
|
74
|
+
except Exception:
|
|
75
|
+
count = 0
|
|
76
|
+
models: list[str] = []
|
|
77
|
+
else:
|
|
78
|
+
if isinstance(data, list):
|
|
79
|
+
count = len(data)
|
|
80
|
+
models = sorted(
|
|
81
|
+
{obj.get("model", "") for obj in data if isinstance(obj, dict)}
|
|
82
|
+
)
|
|
83
|
+
elif isinstance(data, dict):
|
|
84
|
+
count = 1
|
|
85
|
+
models = [data.get("model", "")]
|
|
86
|
+
else: # pragma: no cover - unexpected structure
|
|
87
|
+
count = 0
|
|
88
|
+
models = []
|
|
89
|
+
summary.append({"path": f, "count": count, "models": models})
|
|
90
|
+
|
|
91
|
+
ctx["fixtures"] = summary
|
|
92
|
+
_append_log(
|
|
93
|
+
log_path,
|
|
94
|
+
"Committing fixture changes: " + ", ".join(fixture_files),
|
|
95
|
+
)
|
|
96
|
+
subprocess.run(["git", "add", *fixture_files], check=True)
|
|
97
|
+
subprocess.run(["git", "commit", "-m", "chore: update fixtures"], check=True)
|
|
98
|
+
|
|
48
99
|
version_path = Path("VERSION")
|
|
49
100
|
if version_path.exists():
|
|
50
101
|
current = version_path.read_text(encoding="utf-8").strip()
|
|
@@ -52,7 +103,6 @@ def _step_check_pypi(release, ctx, log_path: Path) -> None:
|
|
|
52
103
|
raise Exception(
|
|
53
104
|
f"Version {release.version} is older than existing {current}"
|
|
54
105
|
)
|
|
55
|
-
version_path.write_text(release.version + "\n", encoding="utf-8")
|
|
56
106
|
|
|
57
107
|
_append_log(log_path, f"Checking if version {release.version} exists on PyPI")
|
|
58
108
|
if release_utils.network_available():
|
|
@@ -75,141 +125,58 @@ def _step_check_pypi(release, ctx, log_path: Path) -> None:
|
|
|
75
125
|
|
|
76
126
|
def _step_promote_build(release, ctx, log_path: Path) -> None:
|
|
77
127
|
from . import release as release_utils
|
|
78
|
-
release.pypi_url = f"https://pypi.org/project/{release.package.name}/{release.version}/"
|
|
79
|
-
release.save(update_fields=["pypi_url"])
|
|
80
|
-
PackageRelease.dump_fixture()
|
|
81
128
|
_append_log(log_path, "Generating build files")
|
|
82
|
-
|
|
83
|
-
package=release.to_package(),
|
|
84
|
-
version=release.version,
|
|
85
|
-
creds=release.to_credentials(),
|
|
86
|
-
)
|
|
87
|
-
release.revision = commit_hash
|
|
88
|
-
release.save(update_fields=["revision"])
|
|
89
|
-
ctx["branch"] = branch
|
|
90
|
-
release_name = f"{release.package.name}-{release.version}-{commit_hash[:7]}"
|
|
91
|
-
new_log = log_path.with_name(f"{release_name}.log")
|
|
92
|
-
log_path.rename(new_log)
|
|
93
|
-
ctx["log"] = new_log.name
|
|
94
|
-
_append_log(new_log, "Build complete")
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def _step_push_branch(release, ctx, log_path: Path) -> None:
|
|
98
|
-
branch = ctx.get("branch")
|
|
99
|
-
_append_log(log_path, f"Pushing branch {branch}")
|
|
100
|
-
subprocess.run(["git", "push", "-u", "origin", branch], check=True)
|
|
101
|
-
pr_url = None
|
|
102
|
-
gh_path = shutil.which("gh")
|
|
103
|
-
if gh_path:
|
|
129
|
+
try:
|
|
104
130
|
try:
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
131
|
+
subprocess.run(["git", "fetch", "origin", "main"], check=True)
|
|
132
|
+
subprocess.run(["git", "rebase", "origin/main"], check=True)
|
|
133
|
+
except subprocess.CalledProcessError as exc:
|
|
134
|
+
subprocess.run(["git", "rebase", "--abort"], check=False)
|
|
135
|
+
raise Exception("Rebase onto main failed") from exc
|
|
136
|
+
release_utils.promote(
|
|
137
|
+
package=release.to_package(),
|
|
138
|
+
version=release.version,
|
|
139
|
+
creds=release.to_credentials(),
|
|
140
|
+
)
|
|
141
|
+
diff = subprocess.run(
|
|
142
|
+
[
|
|
143
|
+
"git",
|
|
144
|
+
"status",
|
|
145
|
+
"--porcelain",
|
|
146
|
+
"VERSION",
|
|
147
|
+
"core/fixtures/releases.json",
|
|
148
|
+
],
|
|
149
|
+
capture_output=True,
|
|
150
|
+
text=True,
|
|
151
|
+
)
|
|
152
|
+
if diff.stdout.strip():
|
|
153
|
+
subprocess.run(
|
|
154
|
+
["git", "add", "VERSION", "core/fixtures/releases.json"],
|
|
155
|
+
check=True,
|
|
156
|
+
)
|
|
157
|
+
subprocess.run(
|
|
108
158
|
[
|
|
109
|
-
|
|
110
|
-
"
|
|
111
|
-
"
|
|
112
|
-
"
|
|
113
|
-
title,
|
|
114
|
-
"--body",
|
|
115
|
-
body,
|
|
116
|
-
"--base",
|
|
117
|
-
"main",
|
|
118
|
-
"--head",
|
|
119
|
-
branch,
|
|
159
|
+
"git",
|
|
160
|
+
"commit",
|
|
161
|
+
"-m",
|
|
162
|
+
f"chore: update release metadata for v{release.version}",
|
|
120
163
|
],
|
|
121
164
|
check=True,
|
|
122
|
-
capture_output=True,
|
|
123
|
-
text=True,
|
|
124
|
-
)
|
|
125
|
-
pr_url = proc.stdout.strip()
|
|
126
|
-
ctx["pr_url"] = pr_url
|
|
127
|
-
release.pr_url = pr_url
|
|
128
|
-
release.save(update_fields=["pr_url"])
|
|
129
|
-
_append_log(log_path, f"PR created: {pr_url}")
|
|
130
|
-
cert_log = Path("logs") / "certifications.log"
|
|
131
|
-
_append_log(cert_log, f"{release.version} {branch} {pr_url}")
|
|
132
|
-
ctx["cert_log"] = str(cert_log)
|
|
133
|
-
except Exception as exc: # pragma: no cover - best effort
|
|
134
|
-
_append_log(log_path, f"PR creation failed: {exc}")
|
|
135
|
-
else:
|
|
136
|
-
token = release.get_github_token()
|
|
137
|
-
if token:
|
|
138
|
-
try: # pragma: no cover - best effort
|
|
139
|
-
remote = subprocess.run(
|
|
140
|
-
["git", "config", "--get", "remote.origin.url"],
|
|
141
|
-
check=True,
|
|
142
|
-
capture_output=True,
|
|
143
|
-
text=True,
|
|
144
|
-
).stdout.strip()
|
|
145
|
-
repo = remote.rsplit(":", 1)[-1].split("github.com/")[-1].removesuffix(".git")
|
|
146
|
-
title = f"Release candidate for {release.version}"
|
|
147
|
-
body = _changelog_notes(release.version)
|
|
148
|
-
resp = requests.post(
|
|
149
|
-
f"https://api.github.com/repos/{repo}/pulls",
|
|
150
|
-
json={
|
|
151
|
-
"title": title,
|
|
152
|
-
"head": branch,
|
|
153
|
-
"base": "main",
|
|
154
|
-
"body": body,
|
|
155
|
-
},
|
|
156
|
-
headers={"Authorization": f"token {token}"},
|
|
157
|
-
timeout=10,
|
|
158
|
-
)
|
|
159
|
-
resp.raise_for_status()
|
|
160
|
-
pr_url = resp.json().get("html_url")
|
|
161
|
-
if pr_url:
|
|
162
|
-
ctx["pr_url"] = pr_url
|
|
163
|
-
release.pr_url = pr_url
|
|
164
|
-
release.save(update_fields=["pr_url"])
|
|
165
|
-
_append_log(log_path, f"PR created: {pr_url}")
|
|
166
|
-
cert_log = Path("logs") / "certifications.log"
|
|
167
|
-
_append_log(cert_log, f"{release.version} {branch} {pr_url}")
|
|
168
|
-
ctx["cert_log"] = str(cert_log)
|
|
169
|
-
else:
|
|
170
|
-
_append_log(log_path, "PR creation failed: no URL returned")
|
|
171
|
-
except Exception as exc:
|
|
172
|
-
_append_log(log_path, f"PR creation failed: {exc}")
|
|
173
|
-
else:
|
|
174
|
-
_append_log(
|
|
175
|
-
log_path,
|
|
176
|
-
"PR creation skipped: gh not installed and no GitHub token available",
|
|
177
165
|
)
|
|
178
|
-
|
|
179
|
-
|
|
166
|
+
subprocess.run(["git", "push"], check=True)
|
|
167
|
+
PackageRelease.dump_fixture()
|
|
168
|
+
except Exception:
|
|
169
|
+
_clean_repo()
|
|
170
|
+
raise
|
|
171
|
+
release_name = f"{release.package.name}-{release.version}"
|
|
172
|
+
new_log = log_path.with_name(f"{release_name}.log")
|
|
173
|
+
log_path.rename(new_log)
|
|
174
|
+
ctx["log"] = new_log.name
|
|
175
|
+
_append_log(new_log, "Build complete")
|
|
180
176
|
|
|
181
177
|
|
|
182
|
-
def
|
|
178
|
+
def _step_publish(release, ctx, log_path: Path) -> None:
|
|
183
179
|
from . import release as release_utils
|
|
184
|
-
import time
|
|
185
|
-
|
|
186
|
-
gh_path = shutil.which("gh")
|
|
187
|
-
pr_url = ctx.get("pr_url") or release.pr_url
|
|
188
|
-
if gh_path and pr_url:
|
|
189
|
-
_append_log(log_path, "Waiting for PR checks")
|
|
190
|
-
for _ in range(60):
|
|
191
|
-
try:
|
|
192
|
-
proc = subprocess.run(
|
|
193
|
-
[gh_path, "pr", "view", pr_url, "--json", "mergeable"],
|
|
194
|
-
capture_output=True,
|
|
195
|
-
text=True,
|
|
196
|
-
check=True,
|
|
197
|
-
)
|
|
198
|
-
state = json.loads(proc.stdout or "{}").get("mergeable")
|
|
199
|
-
if state == "MERGEABLE":
|
|
200
|
-
break
|
|
201
|
-
except Exception:
|
|
202
|
-
pass
|
|
203
|
-
time.sleep(1)
|
|
204
|
-
_append_log(log_path, "Merging PR")
|
|
205
|
-
try:
|
|
206
|
-
subprocess.run(
|
|
207
|
-
[gh_path, "pr", "merge", pr_url, "--merge", "--delete-branch"],
|
|
208
|
-
check=True,
|
|
209
|
-
)
|
|
210
|
-
subprocess.run(["git", "pull", "--ff-only", "origin", "main"], check=True)
|
|
211
|
-
except Exception as exc:
|
|
212
|
-
_append_log(log_path, f"PR merge failed: {exc}")
|
|
213
180
|
|
|
214
181
|
_append_log(log_path, "Uploading distribution")
|
|
215
182
|
release_utils.publish(
|
|
@@ -217,14 +184,16 @@ def _step_merge_publish(release, ctx, log_path: Path) -> None:
|
|
|
217
184
|
version=release.version,
|
|
218
185
|
creds=release.to_credentials(),
|
|
219
186
|
)
|
|
187
|
+
release.pypi_url = f"https://pypi.org/project/{release.package.name}/{release.version}/"
|
|
188
|
+
release.save(update_fields=["pypi_url"])
|
|
189
|
+
PackageRelease.dump_fixture()
|
|
220
190
|
_append_log(log_path, "Upload complete")
|
|
221
191
|
|
|
222
192
|
|
|
223
193
|
PUBLISH_STEPS = [
|
|
224
194
|
("Check version availability", _step_check_pypi),
|
|
225
195
|
("Generate build", _step_promote_build),
|
|
226
|
-
("
|
|
227
|
-
("Merge and publish", _step_merge_publish),
|
|
196
|
+
("Publish", _step_publish),
|
|
228
197
|
]
|
|
229
198
|
|
|
230
199
|
|
|
@@ -392,13 +361,48 @@ def release_progress(request, pk: int, action: str):
|
|
|
392
361
|
if action != "publish":
|
|
393
362
|
raise Http404("Unknown action")
|
|
394
363
|
session_key = f"release_publish_{pk}"
|
|
395
|
-
|
|
364
|
+
lock_path = Path("locks") / f"release_publish_{pk}.json"
|
|
365
|
+
restart_path = Path("locks") / f"release_publish_{pk}.restarts"
|
|
366
|
+
|
|
367
|
+
if request.GET.get("restart"):
|
|
368
|
+
count = 0
|
|
369
|
+
if restart_path.exists():
|
|
370
|
+
try:
|
|
371
|
+
count = int(restart_path.read_text(encoding="utf-8"))
|
|
372
|
+
except Exception:
|
|
373
|
+
count = 0
|
|
374
|
+
restart_path.parent.mkdir(parents=True, exist_ok=True)
|
|
375
|
+
restart_path.write_text(str(count + 1), encoding="utf-8")
|
|
376
|
+
_clean_repo()
|
|
377
|
+
release.pypi_url = ""
|
|
378
|
+
release.save(update_fields=["pypi_url"])
|
|
379
|
+
request.session.pop(session_key, None)
|
|
380
|
+
if lock_path.exists():
|
|
381
|
+
lock_path.unlink()
|
|
382
|
+
log_dir = Path("logs")
|
|
383
|
+
for f in log_dir.glob(f"{release.package.name}-{release.version}*.log"):
|
|
384
|
+
f.unlink()
|
|
385
|
+
return redirect(request.path)
|
|
386
|
+
ctx = request.session.get(session_key)
|
|
387
|
+
if ctx is None and lock_path.exists():
|
|
388
|
+
try:
|
|
389
|
+
ctx = json.loads(lock_path.read_text(encoding="utf-8"))
|
|
390
|
+
except Exception:
|
|
391
|
+
ctx = {"step": 0}
|
|
392
|
+
if ctx is None:
|
|
393
|
+
ctx = {"step": 0}
|
|
394
|
+
if restart_path.exists():
|
|
395
|
+
restart_path.unlink()
|
|
396
|
+
restart_count = 0
|
|
397
|
+
if restart_path.exists():
|
|
398
|
+
try:
|
|
399
|
+
restart_count = int(restart_path.read_text(encoding="utf-8"))
|
|
400
|
+
except Exception:
|
|
401
|
+
restart_count = 0
|
|
396
402
|
step_count = ctx.get("step", 0)
|
|
397
403
|
step_param = request.GET.get("step")
|
|
398
404
|
|
|
399
405
|
identifier = f"{release.package.name}-{release.version}"
|
|
400
|
-
if release.revision:
|
|
401
|
-
identifier = f"{identifier}-{release.revision[:7]}"
|
|
402
406
|
log_name = f"{identifier}.log"
|
|
403
407
|
if ctx.get("log") != log_name:
|
|
404
408
|
ctx = {"step": 0, "log": log_name}
|
|
@@ -406,6 +410,10 @@ def release_progress(request, pk: int, action: str):
|
|
|
406
410
|
log_path = Path("logs") / log_name
|
|
407
411
|
ctx.setdefault("log", log_name)
|
|
408
412
|
|
|
413
|
+
if step_count == 0 and (step_param is None or step_param == "0"):
|
|
414
|
+
if log_path.exists():
|
|
415
|
+
log_path.unlink()
|
|
416
|
+
|
|
409
417
|
steps = PUBLISH_STEPS
|
|
410
418
|
error = ctx.get("error")
|
|
411
419
|
|
|
@@ -418,10 +426,14 @@ def release_progress(request, pk: int, action: str):
|
|
|
418
426
|
step_count += 1
|
|
419
427
|
ctx["step"] = step_count
|
|
420
428
|
request.session[session_key] = ctx
|
|
429
|
+
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
430
|
+
lock_path.write_text(json.dumps(ctx), encoding="utf-8")
|
|
421
431
|
except Exception as exc: # pragma: no cover - best effort logging
|
|
422
432
|
_append_log(log_path, f"{name} failed: {exc}")
|
|
423
433
|
ctx["error"] = str(exc)
|
|
424
434
|
request.session[session_key] = ctx
|
|
435
|
+
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
436
|
+
lock_path.write_text(json.dumps(ctx), encoding="utf-8")
|
|
425
437
|
|
|
426
438
|
done = step_count >= len(steps) and not ctx.get("error")
|
|
427
439
|
|
|
@@ -437,8 +449,15 @@ def release_progress(request, pk: int, action: str):
|
|
|
437
449
|
"error": ctx.get("error"),
|
|
438
450
|
"log_content": log_content,
|
|
439
451
|
"log_path": str(log_path),
|
|
440
|
-
"pr_url": ctx.get("pr_url"),
|
|
441
452
|
"cert_log": ctx.get("cert_log"),
|
|
453
|
+
"fixtures": ctx.get("fixtures"),
|
|
454
|
+
"restart_count": restart_count,
|
|
442
455
|
}
|
|
443
456
|
request.session[session_key] = ctx
|
|
457
|
+
if done or ctx.get("error"):
|
|
458
|
+
if lock_path.exists():
|
|
459
|
+
lock_path.unlink()
|
|
460
|
+
else:
|
|
461
|
+
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
462
|
+
lock_path.write_text(json.dumps(ctx), encoding="utf-8")
|
|
444
463
|
return render(request, "core/release_progress.html", context)
|
nodes/admin.py
CHANGED
|
@@ -9,10 +9,16 @@ from django.db import models
|
|
|
9
9
|
from django.conf import settings
|
|
10
10
|
from pathlib import Path
|
|
11
11
|
from django.http import HttpResponse
|
|
12
|
+
from django.template.response import TemplateResponse
|
|
13
|
+
from django.core.management import call_command
|
|
12
14
|
import base64
|
|
13
15
|
import pyperclip
|
|
14
16
|
from pyperclip import PyperclipException
|
|
15
17
|
import uuid
|
|
18
|
+
import subprocess
|
|
19
|
+
import io
|
|
20
|
+
import threading
|
|
21
|
+
import re
|
|
16
22
|
from .utils import capture_screenshot, save_screenshot
|
|
17
23
|
from .actions import NodeAction
|
|
18
24
|
|
|
@@ -23,11 +29,18 @@ from .models import (
|
|
|
23
29
|
ContentSample,
|
|
24
30
|
NodeTask,
|
|
25
31
|
NetMessage,
|
|
32
|
+
Operation,
|
|
33
|
+
Interrupt,
|
|
34
|
+
Logbook,
|
|
26
35
|
User,
|
|
27
36
|
)
|
|
28
37
|
from core.admin import UserAdmin as CoreUserAdmin
|
|
29
38
|
|
|
30
39
|
|
|
40
|
+
RUN_CONTEXTS: dict[int, dict] = {}
|
|
41
|
+
SIGIL_RE = re.compile(r"\[[A-Za-z0-9_]+\.[A-Za-z0-9_]+\]")
|
|
42
|
+
|
|
43
|
+
|
|
31
44
|
class NodeAdminForm(forms.ModelForm):
|
|
32
45
|
class Meta:
|
|
33
46
|
model = Node
|
|
@@ -356,4 +369,202 @@ class NodeTaskAdmin(admin.ModelAdmin):
|
|
|
356
369
|
execute.short_description = "Run task on nodes"
|
|
357
370
|
|
|
358
371
|
|
|
372
|
+
@admin.register(Operation)
|
|
373
|
+
class OperationAdmin(admin.ModelAdmin):
|
|
374
|
+
list_display = ("name",)
|
|
375
|
+
formfield_overrides = {models.TextField: {"widget": CodeEditorWidget}}
|
|
376
|
+
change_form_template = "admin/nodes/operation/change_form.html"
|
|
377
|
+
|
|
378
|
+
def get_urls(self):
|
|
379
|
+
urls = super().get_urls()
|
|
380
|
+
custom = [
|
|
381
|
+
path(
|
|
382
|
+
"<path:object_id>/run/",
|
|
383
|
+
self.admin_site.admin_view(self.run_view),
|
|
384
|
+
name="nodes_operation_run",
|
|
385
|
+
)
|
|
386
|
+
]
|
|
387
|
+
return custom + urls
|
|
388
|
+
|
|
389
|
+
def run_view(self, request, object_id):
|
|
390
|
+
operation = self.get_object(request, object_id)
|
|
391
|
+
if not operation:
|
|
392
|
+
self.message_user(request, "Unknown operation", messages.ERROR)
|
|
393
|
+
return redirect("..")
|
|
394
|
+
context = RUN_CONTEXTS.setdefault(operation.pk, {"inputs": {}})
|
|
395
|
+
template_text = operation.resolve_sigils("template")
|
|
396
|
+
|
|
397
|
+
# Interrupt handling
|
|
398
|
+
interrupt_id = request.GET.get("interrupt")
|
|
399
|
+
if interrupt_id:
|
|
400
|
+
try:
|
|
401
|
+
interrupt = operation.outgoing_interrupts.get(pk=interrupt_id)
|
|
402
|
+
except Interrupt.DoesNotExist:
|
|
403
|
+
self.message_user(request, "Unknown interrupt", messages.ERROR)
|
|
404
|
+
return redirect(request.path)
|
|
405
|
+
proc = context.get("process")
|
|
406
|
+
if proc and proc.poll() is None:
|
|
407
|
+
proc.terminate()
|
|
408
|
+
out, err = proc.communicate()
|
|
409
|
+
log = context.get("log")
|
|
410
|
+
if log:
|
|
411
|
+
log.output = out
|
|
412
|
+
log.error = err
|
|
413
|
+
log.interrupted = True
|
|
414
|
+
log.interrupt = interrupt
|
|
415
|
+
log.save()
|
|
416
|
+
RUN_CONTEXTS.pop(operation.pk, None)
|
|
417
|
+
return redirect(
|
|
418
|
+
reverse("admin:nodes_operation_run", args=[interrupt.to_operation.pk])
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
# Check running processes
|
|
422
|
+
proc = context.get("process")
|
|
423
|
+
thread = context.get("thread")
|
|
424
|
+
if proc and proc.poll() is not None:
|
|
425
|
+
out, err = proc.communicate()
|
|
426
|
+
log = context.pop("log")
|
|
427
|
+
log.output = out
|
|
428
|
+
log.error = err
|
|
429
|
+
log.save()
|
|
430
|
+
RUN_CONTEXTS.pop(operation.pk, None)
|
|
431
|
+
self.message_user(request, "Operation executed", messages.SUCCESS)
|
|
432
|
+
return redirect("..")
|
|
433
|
+
if thread and not thread.is_alive():
|
|
434
|
+
out = context.pop("out")
|
|
435
|
+
err = context.pop("err")
|
|
436
|
+
log = context.pop("log")
|
|
437
|
+
log.output = out.getvalue()
|
|
438
|
+
log.error = err.getvalue()
|
|
439
|
+
log.save()
|
|
440
|
+
RUN_CONTEXTS.pop(operation.pk, None)
|
|
441
|
+
self.message_user(request, "Operation executed", messages.SUCCESS)
|
|
442
|
+
return redirect("..")
|
|
443
|
+
|
|
444
|
+
interrupts = [
|
|
445
|
+
(i, i.resolve_sigils("preview"))
|
|
446
|
+
for i in operation.outgoing_interrupts.all().order_by("-priority")
|
|
447
|
+
]
|
|
448
|
+
logs = Logbook.objects.filter(operation=operation).order_by("created")
|
|
449
|
+
|
|
450
|
+
# Waiting for user-provided sigils
|
|
451
|
+
waiting = context.get("waiting_inputs")
|
|
452
|
+
if waiting:
|
|
453
|
+
if request.method == "POST":
|
|
454
|
+
for token in waiting:
|
|
455
|
+
name = token[1:-1].replace(".", "__")
|
|
456
|
+
context["inputs"][token] = request.POST.get(name, "")
|
|
457
|
+
command = context.pop("pending_command")
|
|
458
|
+
for token, value in context["inputs"].items():
|
|
459
|
+
command = command.replace(token, value)
|
|
460
|
+
context["waiting_inputs"] = None
|
|
461
|
+
self._start_operation(context, operation, command, request.user)
|
|
462
|
+
return redirect(request.path)
|
|
463
|
+
form_fields = [(t, t[1:-1].replace(".", "__")) for t in waiting]
|
|
464
|
+
tpl_context = {
|
|
465
|
+
**self.admin_site.each_context(request),
|
|
466
|
+
"operation": operation,
|
|
467
|
+
"interrupts": interrupts,
|
|
468
|
+
"logs": logs,
|
|
469
|
+
"waiting_inputs": form_fields,
|
|
470
|
+
"template": template_text,
|
|
471
|
+
}
|
|
472
|
+
return TemplateResponse(
|
|
473
|
+
request, "admin/nodes/operation/run.html", tpl_context
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
# Waiting for user continuation
|
|
477
|
+
if context.get("waiting_continue"):
|
|
478
|
+
if request.method == "POST":
|
|
479
|
+
context["waiting_continue"] = False
|
|
480
|
+
RUN_CONTEXTS.pop(operation.pk, None)
|
|
481
|
+
self.message_user(request, "Operation executed", messages.SUCCESS)
|
|
482
|
+
return redirect("..")
|
|
483
|
+
tpl_context = {
|
|
484
|
+
**self.admin_site.each_context(request),
|
|
485
|
+
"operation": operation,
|
|
486
|
+
"interrupts": interrupts,
|
|
487
|
+
"logs": logs,
|
|
488
|
+
"waiting_continue": True,
|
|
489
|
+
"template": template_text,
|
|
490
|
+
}
|
|
491
|
+
return TemplateResponse(
|
|
492
|
+
request, "admin/nodes/operation/run.html", tpl_context
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
# If a process or thread is running, show running state
|
|
496
|
+
if context.get("process") or context.get("thread"):
|
|
497
|
+
tpl_context = {
|
|
498
|
+
**self.admin_site.each_context(request),
|
|
499
|
+
"operation": operation,
|
|
500
|
+
"interrupts": interrupts,
|
|
501
|
+
"logs": logs,
|
|
502
|
+
"running": True,
|
|
503
|
+
"template": template_text,
|
|
504
|
+
}
|
|
505
|
+
return TemplateResponse(
|
|
506
|
+
request, "admin/nodes/operation/run.html", tpl_context
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
if request.method == "POST":
|
|
510
|
+
command = operation.resolve_sigils("command")
|
|
511
|
+
for token, value in context["inputs"].items():
|
|
512
|
+
command = command.replace(token, value)
|
|
513
|
+
unresolved = SIGIL_RE.findall(command)
|
|
514
|
+
if unresolved:
|
|
515
|
+
context["waiting_inputs"] = unresolved
|
|
516
|
+
context["pending_command"] = command
|
|
517
|
+
return redirect(request.path)
|
|
518
|
+
if command.strip() == "...":
|
|
519
|
+
log = Logbook.objects.create(
|
|
520
|
+
operation=operation,
|
|
521
|
+
user=request.user,
|
|
522
|
+
input_text=command,
|
|
523
|
+
output="Waiting for user continuation",
|
|
524
|
+
)
|
|
525
|
+
context["log"] = log
|
|
526
|
+
context["waiting_continue"] = True
|
|
527
|
+
return redirect(request.path)
|
|
528
|
+
self._start_operation(context, operation, command, request.user)
|
|
529
|
+
return redirect(request.path)
|
|
530
|
+
|
|
531
|
+
tpl_context = {
|
|
532
|
+
**self.admin_site.each_context(request),
|
|
533
|
+
"operation": operation,
|
|
534
|
+
"interrupts": interrupts,
|
|
535
|
+
"logs": logs,
|
|
536
|
+
"template": template_text,
|
|
537
|
+
}
|
|
538
|
+
return TemplateResponse(request, "admin/nodes/operation/run.html", tpl_context)
|
|
539
|
+
|
|
540
|
+
def _start_operation(self, ctx, operation, command, user):
|
|
541
|
+
log = Logbook.objects.create(operation=operation, user=user, input_text=command)
|
|
542
|
+
if operation.is_django:
|
|
543
|
+
out = io.StringIO()
|
|
544
|
+
err = io.StringIO()
|
|
545
|
+
|
|
546
|
+
def target():
|
|
547
|
+
try:
|
|
548
|
+
call_command(*command.split(), stdout=out, stderr=err)
|
|
549
|
+
except Exception as exc: # pragma: no cover - unexpected errors
|
|
550
|
+
err.write(str(exc))
|
|
551
|
+
|
|
552
|
+
thread = threading.Thread(target=target)
|
|
553
|
+
thread.start()
|
|
554
|
+
ctx.update({"thread": thread, "out": out, "err": err, "log": log})
|
|
555
|
+
else:
|
|
556
|
+
proc = subprocess.Popen(
|
|
557
|
+
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
|
558
|
+
)
|
|
559
|
+
ctx.update({"process": proc, "log": log})
|
|
560
|
+
|
|
561
|
+
def changeform_view(self, request, object_id=None, form_url="", extra_context=None):
|
|
562
|
+
extra_context = extra_context or {}
|
|
563
|
+
if object_id:
|
|
564
|
+
extra_context["run_url"] = reverse(
|
|
565
|
+
"admin:nodes_operation_run", args=[object_id]
|
|
566
|
+
)
|
|
567
|
+
return super().changeform_view(request, object_id, form_url, extra_context)
|
|
568
|
+
|
|
569
|
+
|
|
359
570
|
admin.site.register(User, CoreUserAdmin)
|
nodes/apps.py
CHANGED
|
@@ -68,7 +68,7 @@ def _trigger_startup_notification(**_: object) -> None:
|
|
|
68
68
|
class NodesConfig(AppConfig):
|
|
69
69
|
default_auto_field = "django.db.models.BigAutoField"
|
|
70
70
|
name = "nodes"
|
|
71
|
-
verbose_name = "
|
|
71
|
+
verbose_name = "2. Infrastructure"
|
|
72
72
|
|
|
73
73
|
def ready(self): # pragma: no cover - exercised on app start
|
|
74
74
|
request_started.connect(
|