arthexis 0.1.16__py3-none-any.whl → 0.1.28__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/METADATA +95 -41
- arthexis-0.1.28.dist-info/RECORD +112 -0
- config/asgi.py +1 -15
- config/middleware.py +47 -1
- config/settings.py +21 -30
- config/settings_helpers.py +176 -1
- config/urls.py +69 -1
- core/admin.py +805 -473
- core/apps.py +6 -8
- core/auto_upgrade.py +19 -4
- core/backends.py +13 -3
- core/celery_utils.py +73 -0
- core/changelog.py +66 -5
- core/environment.py +4 -5
- core/models.py +1825 -218
- core/notifications.py +1 -1
- core/reference_utils.py +10 -11
- core/release.py +55 -7
- core/sigil_builder.py +2 -2
- core/sigil_resolver.py +1 -66
- core/system.py +285 -4
- core/tasks.py +439 -138
- core/test_system_info.py +43 -5
- core/tests.py +516 -18
- core/user_data.py +94 -21
- core/views.py +348 -186
- nodes/admin.py +904 -67
- nodes/apps.py +12 -1
- nodes/feature_checks.py +30 -0
- nodes/models.py +800 -127
- nodes/rfid_sync.py +1 -1
- nodes/tasks.py +98 -3
- nodes/tests.py +1381 -152
- nodes/urls.py +15 -1
- nodes/utils.py +51 -3
- nodes/views.py +1382 -152
- ocpp/admin.py +1970 -152
- ocpp/consumers.py +839 -34
- ocpp/models.py +968 -17
- ocpp/network.py +398 -0
- ocpp/store.py +411 -43
- ocpp/tasks.py +261 -3
- ocpp/test_export_import.py +1 -0
- ocpp/test_rfid.py +194 -6
- ocpp/tests.py +1918 -87
- ocpp/transactions_io.py +9 -1
- ocpp/urls.py +8 -3
- ocpp/views.py +700 -53
- pages/admin.py +262 -30
- pages/apps.py +35 -0
- pages/context_processors.py +28 -21
- pages/defaults.py +1 -1
- pages/forms.py +31 -8
- pages/middleware.py +6 -2
- pages/models.py +86 -2
- pages/module_defaults.py +5 -5
- pages/site_config.py +137 -0
- pages/tests.py +1050 -126
- pages/urls.py +14 -2
- pages/utils.py +70 -0
- pages/views.py +622 -56
- arthexis-0.1.16.dist-info/RECORD +0 -111
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/WHEEL +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.16.dist-info → arthexis-0.1.28.dist-info}/top_level.txt +0 -0
core/apps.py
CHANGED
|
@@ -38,6 +38,7 @@ class CoreConfig(AppConfig):
|
|
|
38
38
|
patch_admin_sigil_builder_view,
|
|
39
39
|
generate_model_sigils,
|
|
40
40
|
)
|
|
41
|
+
from .celery_utils import normalize_periodic_task_name
|
|
41
42
|
from .admin_history import patch_admin_history
|
|
42
43
|
|
|
43
44
|
from django_otp.plugins.otp_totp.models import TOTPDevice as OTP_TOTPDevice
|
|
@@ -222,8 +223,11 @@ class CoreConfig(AppConfig):
|
|
|
222
223
|
schedule, _ = IntervalSchedule.objects.get_or_create(
|
|
223
224
|
every=1, period=IntervalSchedule.HOURS
|
|
224
225
|
)
|
|
225
|
-
|
|
226
|
-
|
|
226
|
+
task_name = normalize_periodic_task_name(
|
|
227
|
+
PeriodicTask.objects, "poll_email_collectors"
|
|
228
|
+
)
|
|
229
|
+
PeriodicTask.objects.update_or_create(
|
|
230
|
+
name=task_name,
|
|
227
231
|
defaults={
|
|
228
232
|
"interval": schedule,
|
|
229
233
|
"task": "core.tasks.poll_email_collectors",
|
|
@@ -348,9 +352,3 @@ class CoreConfig(AppConfig):
|
|
|
348
352
|
weak=False,
|
|
349
353
|
)
|
|
350
354
|
|
|
351
|
-
try:
|
|
352
|
-
from .mcp.auto_start import schedule_auto_start
|
|
353
|
-
|
|
354
|
-
schedule_auto_start(check_profiles_immediately=False)
|
|
355
|
-
except Exception: # pragma: no cover - defensive
|
|
356
|
-
logger.exception("Failed to schedule MCP auto-start")
|
core/auto_upgrade.py
CHANGED
|
@@ -10,6 +10,14 @@ from django.conf import settings
|
|
|
10
10
|
AUTO_UPGRADE_TASK_NAME = "auto-upgrade-check"
|
|
11
11
|
AUTO_UPGRADE_TASK_PATH = "core.tasks.check_github_updates"
|
|
12
12
|
|
|
13
|
+
DEFAULT_AUTO_UPGRADE_MODE = "version"
|
|
14
|
+
AUTO_UPGRADE_INTERVAL_MINUTES = {
|
|
15
|
+
"latest": 5,
|
|
16
|
+
"stable": 60,
|
|
17
|
+
DEFAULT_AUTO_UPGRADE_MODE: 720,
|
|
18
|
+
}
|
|
19
|
+
AUTO_UPGRADE_FALLBACK_INTERVAL = AUTO_UPGRADE_INTERVAL_MINUTES["stable"]
|
|
20
|
+
|
|
13
21
|
|
|
14
22
|
def ensure_auto_upgrade_periodic_task(
|
|
15
23
|
sender=None, *, base_dir: Path | None = None, **kwargs
|
|
@@ -30,8 +38,6 @@ def ensure_auto_upgrade_periodic_task(
|
|
|
30
38
|
|
|
31
39
|
lock_dir = base_dir / "locks"
|
|
32
40
|
mode_file = lock_dir / "auto_upgrade.lck"
|
|
33
|
-
if not mode_file.exists():
|
|
34
|
-
return
|
|
35
41
|
|
|
36
42
|
try: # pragma: no cover - optional dependency failures
|
|
37
43
|
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
|
@@ -39,8 +45,17 @@ def ensure_auto_upgrade_periodic_task(
|
|
|
39
45
|
except Exception:
|
|
40
46
|
return
|
|
41
47
|
|
|
42
|
-
|
|
43
|
-
|
|
48
|
+
if not mode_file.exists():
|
|
49
|
+
try:
|
|
50
|
+
PeriodicTask.objects.filter(name=AUTO_UPGRADE_TASK_NAME).delete()
|
|
51
|
+
except (OperationalError, ProgrammingError): # pragma: no cover - DB not ready
|
|
52
|
+
return
|
|
53
|
+
return
|
|
54
|
+
|
|
55
|
+
_mode = mode_file.read_text().strip().lower() or DEFAULT_AUTO_UPGRADE_MODE
|
|
56
|
+
interval_minutes = AUTO_UPGRADE_INTERVAL_MINUTES.get(
|
|
57
|
+
_mode, AUTO_UPGRADE_FALLBACK_INTERVAL
|
|
58
|
+
)
|
|
44
59
|
|
|
45
60
|
try:
|
|
46
61
|
schedule, _ = IntervalSchedule.objects.get_or_create(
|
core/backends.py
CHANGED
|
@@ -81,15 +81,22 @@ class RFIDBackend:
|
|
|
81
81
|
if not rfid_value:
|
|
82
82
|
return None
|
|
83
83
|
|
|
84
|
-
tag = RFID.
|
|
85
|
-
if not tag
|
|
84
|
+
tag = RFID.matching_queryset(rfid_value).filter(allowed=True).first()
|
|
85
|
+
if not tag:
|
|
86
86
|
return None
|
|
87
87
|
|
|
88
|
+
update_fields: list[str] = []
|
|
89
|
+
if tag.adopt_rfid(rfid_value):
|
|
90
|
+
update_fields.append("rfid")
|
|
91
|
+
if update_fields:
|
|
92
|
+
tag.save(update_fields=update_fields)
|
|
93
|
+
|
|
88
94
|
command = (tag.external_command or "").strip()
|
|
89
95
|
if command:
|
|
90
96
|
env = os.environ.copy()
|
|
91
97
|
env["RFID_VALUE"] = rfid_value
|
|
92
98
|
env["RFID_LABEL_ID"] = str(tag.pk)
|
|
99
|
+
env["RFID_ENDIANNESS"] = getattr(tag, "endianness", RFID.BIG_ENDIAN)
|
|
93
100
|
try:
|
|
94
101
|
completed = subprocess.run(
|
|
95
102
|
command,
|
|
@@ -117,6 +124,7 @@ class RFIDBackend:
|
|
|
117
124
|
env = os.environ.copy()
|
|
118
125
|
env["RFID_VALUE"] = rfid_value
|
|
119
126
|
env["RFID_LABEL_ID"] = str(tag.pk)
|
|
127
|
+
env["RFID_ENDIANNESS"] = getattr(tag, "endianness", RFID.BIG_ENDIAN)
|
|
120
128
|
with contextlib.suppress(Exception):
|
|
121
129
|
subprocess.Popen(
|
|
122
130
|
post_command,
|
|
@@ -209,7 +217,9 @@ class LocalhostAdminBackend(ModelBackend):
|
|
|
209
217
|
try:
|
|
210
218
|
ipaddress.ip_address(host)
|
|
211
219
|
except ValueError:
|
|
212
|
-
if
|
|
220
|
+
if host.lower() == "localhost":
|
|
221
|
+
host = "127.0.0.1"
|
|
222
|
+
elif not self._is_test_environment(request):
|
|
213
223
|
return None
|
|
214
224
|
forwarded = request.META.get("HTTP_X_FORWARDED_FOR")
|
|
215
225
|
if forwarded:
|
core/celery_utils.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"""Utilities for working with Celery periodic task names."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from typing import Set
|
|
7
|
+
|
|
8
|
+
from django.db import transaction
|
|
9
|
+
from django.db.utils import IntegrityError
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def slugify_task_name(name: str) -> str:
|
|
13
|
+
"""Return a slugified task name using dashes.
|
|
14
|
+
|
|
15
|
+
Celery stores periodic task names in the database and historically these
|
|
16
|
+
values included underscores or dotted module paths. The scheduler UI reads
|
|
17
|
+
these values directly, so we collapse consecutive underscores or dots into a
|
|
18
|
+
single dash to keep them human readable while remaining unique.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
slug = re.sub(r"[._]+", "-", name)
|
|
22
|
+
# Collapse any accidental duplicate separators that may result from the
|
|
23
|
+
# replacement so ``foo__bar`` and ``foo..bar`` both become ``foo-bar``.
|
|
24
|
+
slug = re.sub(r"-{2,}", "-", slug)
|
|
25
|
+
return slug
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def periodic_task_name_variants(name: str) -> Set[str]:
|
|
29
|
+
"""Return legacy and slugified variants for a periodic task name."""
|
|
30
|
+
|
|
31
|
+
slug = slugify_task_name(name)
|
|
32
|
+
if slug == name:
|
|
33
|
+
return {name}
|
|
34
|
+
return {name, slug}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def normalize_periodic_task_name(manager, name: str) -> str:
|
|
38
|
+
"""Ensure the stored periodic task name matches the slugified form.
|
|
39
|
+
|
|
40
|
+
The helper renames any rows that still use the legacy value so that follow-up
|
|
41
|
+
``update_or_create`` calls keep working without leaving duplicate tasks in
|
|
42
|
+
the scheduler. When a conflicting slug already exists we keep the slugged
|
|
43
|
+
version and remove the legacy entry.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
slug = slugify_task_name(name)
|
|
47
|
+
if slug == name:
|
|
48
|
+
return slug
|
|
49
|
+
|
|
50
|
+
for task in manager.filter(name=name):
|
|
51
|
+
conflict = manager.filter(name=slug).exclude(pk=task.pk).first()
|
|
52
|
+
if conflict:
|
|
53
|
+
# Preserve foreign key references when possible before removing the
|
|
54
|
+
# legacy row.
|
|
55
|
+
related_attr = getattr(task, "client_report_schedule", None)
|
|
56
|
+
if related_attr and getattr(conflict, "client_report_schedule", None) is None:
|
|
57
|
+
related_attr.periodic_task = conflict
|
|
58
|
+
related_attr.save(update_fields=["periodic_task"])
|
|
59
|
+
task.delete()
|
|
60
|
+
continue
|
|
61
|
+
|
|
62
|
+
task.name = slug
|
|
63
|
+
try:
|
|
64
|
+
with transaction.atomic():
|
|
65
|
+
task.save(update_fields=["name"])
|
|
66
|
+
except IntegrityError:
|
|
67
|
+
# Another process may have created the slug in between the select and
|
|
68
|
+
# the update. Fall back to deleting the legacy row to avoid duplicate
|
|
69
|
+
# scheduler entries.
|
|
70
|
+
task.refresh_from_db()
|
|
71
|
+
if task.name != slug:
|
|
72
|
+
task.delete()
|
|
73
|
+
return slug
|
core/changelog.py
CHANGED
|
@@ -154,9 +154,53 @@ def _parse_sections(text: str) -> List[ChangelogSection]:
|
|
|
154
154
|
return sections
|
|
155
155
|
|
|
156
156
|
|
|
157
|
+
def _latest_release_version(previous_text: str) -> Optional[str]:
|
|
158
|
+
for section in _parse_sections(previous_text):
|
|
159
|
+
if section.version:
|
|
160
|
+
return section.version
|
|
161
|
+
return None
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _find_release_commit(version: str) -> Optional[str]:
|
|
165
|
+
normalized = version.lstrip("v")
|
|
166
|
+
search_terms = [
|
|
167
|
+
f"Release v{normalized}",
|
|
168
|
+
f"Release {normalized}",
|
|
169
|
+
f"pre-release commit v{normalized}",
|
|
170
|
+
f"pre-release commit {normalized}",
|
|
171
|
+
]
|
|
172
|
+
for term in search_terms:
|
|
173
|
+
proc = subprocess.run(
|
|
174
|
+
[
|
|
175
|
+
"git",
|
|
176
|
+
"log",
|
|
177
|
+
"--max-count=1",
|
|
178
|
+
"--format=%H",
|
|
179
|
+
"--fixed-strings",
|
|
180
|
+
f"--grep={term}",
|
|
181
|
+
],
|
|
182
|
+
capture_output=True,
|
|
183
|
+
text=True,
|
|
184
|
+
check=False,
|
|
185
|
+
)
|
|
186
|
+
sha = proc.stdout.strip()
|
|
187
|
+
if sha:
|
|
188
|
+
return sha.splitlines()[0]
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _resolve_release_commit_from_text(previous_text: str) -> Optional[str]:
|
|
193
|
+
version = _latest_release_version(previous_text)
|
|
194
|
+
if not version:
|
|
195
|
+
return None
|
|
196
|
+
return _find_release_commit(version)
|
|
197
|
+
|
|
198
|
+
|
|
157
199
|
def _merge_sections(
|
|
158
200
|
new_sections: Iterable[ChangelogSection],
|
|
159
201
|
old_sections: Iterable[ChangelogSection],
|
|
202
|
+
*,
|
|
203
|
+
reopen_latest: bool = False,
|
|
160
204
|
) -> List[ChangelogSection]:
|
|
161
205
|
merged = list(new_sections)
|
|
162
206
|
old_sections_list = list(old_sections)
|
|
@@ -199,7 +243,8 @@ def _merge_sections(
|
|
|
199
243
|
existing = version_to_section.get(old.version)
|
|
200
244
|
if existing is None:
|
|
201
245
|
if (
|
|
202
|
-
|
|
246
|
+
reopen_latest
|
|
247
|
+
and first_release_version
|
|
203
248
|
and old.version == first_release_version
|
|
204
249
|
and not reopened_latest_version
|
|
205
250
|
and unreleased_section is not None
|
|
@@ -274,29 +319,45 @@ def _resolve_start_tag(explicit: str | None = None) -> Optional[str]:
|
|
|
274
319
|
return None
|
|
275
320
|
|
|
276
321
|
|
|
277
|
-
def determine_range_spec(
|
|
322
|
+
def determine_range_spec(
|
|
323
|
+
start_tag: str | None = None, *, previous_text: str | None = None
|
|
324
|
+
) -> str:
|
|
278
325
|
"""Return the git range specification to build the changelog."""
|
|
279
326
|
|
|
280
327
|
resolved = _resolve_start_tag(start_tag)
|
|
281
328
|
if resolved:
|
|
282
329
|
return f"{resolved}..HEAD"
|
|
330
|
+
|
|
331
|
+
if previous_text:
|
|
332
|
+
release_commit = _resolve_release_commit_from_text(previous_text)
|
|
333
|
+
if release_commit:
|
|
334
|
+
return f"{release_commit}..HEAD"
|
|
335
|
+
|
|
283
336
|
return "HEAD"
|
|
284
337
|
|
|
285
338
|
|
|
286
339
|
def collect_sections(
|
|
287
|
-
*,
|
|
340
|
+
*,
|
|
341
|
+
range_spec: str = "HEAD",
|
|
342
|
+
previous_text: str | None = None,
|
|
343
|
+
reopen_latest: bool = False,
|
|
288
344
|
) -> List[ChangelogSection]:
|
|
289
345
|
"""Return changelog sections for *range_spec*.
|
|
290
346
|
|
|
291
347
|
When ``previous_text`` is provided, sections not regenerated in the current run
|
|
292
|
-
are appended so long as they can be parsed from the existing changelog.
|
|
348
|
+
are appended so long as they can be parsed from the existing changelog. Set
|
|
349
|
+
``reopen_latest`` to ``True`` when the caller intends to move the most recent
|
|
350
|
+
release notes back into the ``Unreleased`` section (for example, when
|
|
351
|
+
preparing a release retry before a new tag is created).
|
|
293
352
|
"""
|
|
294
353
|
|
|
295
354
|
commits = _read_commits(range_spec)
|
|
296
355
|
sections = _sections_from_commits(commits)
|
|
297
356
|
if previous_text:
|
|
298
357
|
old_sections = _parse_sections(previous_text)
|
|
299
|
-
sections = _merge_sections(
|
|
358
|
+
sections = _merge_sections(
|
|
359
|
+
sections, old_sections, reopen_latest=reopen_latest
|
|
360
|
+
)
|
|
300
361
|
return sections
|
|
301
362
|
|
|
302
363
|
|
core/environment.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
-
|
|
5
4
|
from django.conf import settings
|
|
6
5
|
from django.contrib import admin
|
|
7
6
|
from django.template.response import TemplateResponse
|
|
@@ -20,18 +19,18 @@ def _environment_view(request):
|
|
|
20
19
|
context = admin.site.each_context(request)
|
|
21
20
|
context.update(
|
|
22
21
|
{
|
|
23
|
-
"title": _("
|
|
22
|
+
"title": _("Environment"),
|
|
24
23
|
"env_vars": env_vars,
|
|
24
|
+
"environment_tasks": [],
|
|
25
25
|
}
|
|
26
26
|
)
|
|
27
27
|
return TemplateResponse(request, "admin/environment.html", context)
|
|
28
28
|
|
|
29
|
-
|
|
30
29
|
def _config_view(request):
|
|
31
30
|
context = admin.site.each_context(request)
|
|
32
31
|
context.update(
|
|
33
32
|
{
|
|
34
|
-
"title": _("Config"),
|
|
33
|
+
"title": _("Django Config"),
|
|
35
34
|
"django_settings": _get_django_settings(),
|
|
36
35
|
}
|
|
37
36
|
)
|
|
@@ -39,7 +38,7 @@ def _config_view(request):
|
|
|
39
38
|
|
|
40
39
|
|
|
41
40
|
def patch_admin_environment_view() -> None:
|
|
42
|
-
"""
|
|
41
|
+
"""Register the Environment and Config admin views on the main admin site."""
|
|
43
42
|
original_get_urls = admin.site.get_urls
|
|
44
43
|
|
|
45
44
|
def get_urls():
|