arthexis 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/METADATA +36 -26
- arthexis-0.1.12.dist-info/RECORD +102 -0
- config/context_processors.py +1 -0
- config/settings.py +31 -5
- config/urls.py +5 -4
- core/admin.py +430 -90
- core/apps.py +48 -2
- core/backends.py +38 -0
- core/environment.py +23 -5
- core/mailer.py +3 -1
- core/models.py +303 -31
- core/reference_utils.py +20 -9
- core/release.py +4 -0
- core/sigil_builder.py +7 -2
- core/sigil_resolver.py +35 -4
- core/system.py +250 -1
- core/tasks.py +92 -40
- core/temp_passwords.py +181 -0
- core/test_system_info.py +62 -2
- core/tests.py +169 -3
- core/user_data.py +51 -8
- core/views.py +371 -20
- nodes/admin.py +453 -8
- nodes/backends.py +21 -6
- nodes/dns.py +203 -0
- nodes/feature_checks.py +133 -0
- nodes/models.py +374 -31
- nodes/reports.py +411 -0
- nodes/tests.py +677 -38
- nodes/utils.py +32 -0
- nodes/views.py +14 -0
- ocpp/admin.py +278 -15
- ocpp/consumers.py +517 -16
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +237 -4
- ocpp/reference_utils.py +42 -0
- ocpp/simulator.py +321 -22
- ocpp/store.py +110 -2
- ocpp/test_rfid.py +169 -7
- ocpp/tests.py +819 -6
- ocpp/transactions_io.py +17 -3
- ocpp/views.py +233 -19
- pages/admin.py +144 -4
- pages/context_processors.py +21 -7
- pages/defaults.py +13 -0
- pages/forms.py +38 -0
- pages/models.py +189 -15
- pages/tests.py +281 -8
- pages/urls.py +4 -0
- pages/views.py +137 -21
- arthexis-0.1.10.dist-info/RECORD +0 -95
- {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/WHEEL +0 -0
- {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/licenses/LICENSE +0 -0
- {arthexis-0.1.10.dist-info → arthexis-0.1.12.dist-info}/top_level.txt +0 -0
nodes/reports.py
ADDED
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
"""Utilities for generating Celery-focused admin reports."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections import deque
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from datetime import datetime, timedelta, timezone as dt_timezone
|
|
8
|
+
import numbers
|
|
9
|
+
import re
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Iterable, Iterator
|
|
12
|
+
|
|
13
|
+
from django.conf import settings
|
|
14
|
+
from django.utils import timezone
|
|
15
|
+
from django.utils.translation import gettext_lazy as _
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(frozen=True)
|
|
19
|
+
class ReportPeriod:
|
|
20
|
+
"""Representation of an available reporting window."""
|
|
21
|
+
|
|
22
|
+
key: str
|
|
23
|
+
label: str
|
|
24
|
+
delta: timedelta
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
REPORT_PERIOD_ORDER = ("1d", "7d", "30d")
|
|
28
|
+
REPORT_PERIODS: dict[str, ReportPeriod] = {
|
|
29
|
+
"1d": ReportPeriod("1d", _("Single day"), timedelta(days=1)),
|
|
30
|
+
"7d": ReportPeriod("7d", _("Seven days"), timedelta(days=7)),
|
|
31
|
+
"30d": ReportPeriod("30d", _("Monthly"), timedelta(days=30)),
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass(frozen=True)
|
|
36
|
+
class ScheduledTaskSummary:
|
|
37
|
+
"""Human-friendly representation of a Celery scheduled task."""
|
|
38
|
+
|
|
39
|
+
name: str
|
|
40
|
+
task: str
|
|
41
|
+
schedule_type: str
|
|
42
|
+
schedule_description: str
|
|
43
|
+
next_run: datetime | None
|
|
44
|
+
enabled: bool
|
|
45
|
+
source: str
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass(frozen=True)
|
|
49
|
+
class CeleryLogEntry:
|
|
50
|
+
"""A parsed log entry relevant to Celery activity."""
|
|
51
|
+
|
|
52
|
+
timestamp: datetime
|
|
53
|
+
level: str
|
|
54
|
+
logger: str
|
|
55
|
+
message: str
|
|
56
|
+
source: str
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclass(frozen=True)
|
|
60
|
+
class CeleryLogCollection:
|
|
61
|
+
"""Container for log entries and the sources scanned."""
|
|
62
|
+
|
|
63
|
+
entries: list[CeleryLogEntry]
|
|
64
|
+
checked_sources: list[str]
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
_LOG_LINE_PATTERN = re.compile(
|
|
68
|
+
r"^(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(?:,\d{1,6})?) "
|
|
69
|
+
r"\[(?P<level>[A-Z]+)\] (?P<logger>[^:]+): (?P<message>.*)$"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def iter_report_periods() -> Iterator[ReportPeriod]:
|
|
74
|
+
"""Yield configured reporting periods in display order."""
|
|
75
|
+
|
|
76
|
+
for key in REPORT_PERIOD_ORDER:
|
|
77
|
+
period = REPORT_PERIODS[key]
|
|
78
|
+
yield period
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def resolve_period(period_key: str | None) -> ReportPeriod:
|
|
82
|
+
"""Return the requested reporting period or fall back to the default."""
|
|
83
|
+
|
|
84
|
+
if not period_key:
|
|
85
|
+
return REPORT_PERIODS[REPORT_PERIOD_ORDER[0]]
|
|
86
|
+
return REPORT_PERIODS.get(period_key, REPORT_PERIODS[REPORT_PERIOD_ORDER[0]])
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def collect_scheduled_tasks(now: datetime, window_end: datetime) -> list[ScheduledTaskSummary]:
|
|
90
|
+
"""Return Celery tasks scheduled to run before ``window_end``.
|
|
91
|
+
|
|
92
|
+
Tasks with unknown scheduling information are included to avoid omitting
|
|
93
|
+
potentially important configuration.
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
summaries: list[ScheduledTaskSummary] = []
|
|
97
|
+
summaries.extend(_collect_db_tasks(now))
|
|
98
|
+
summaries.extend(_collect_settings_tasks(now))
|
|
99
|
+
|
|
100
|
+
filtered: list[ScheduledTaskSummary] = []
|
|
101
|
+
for summary in summaries:
|
|
102
|
+
if summary.next_run is None or summary.next_run <= window_end:
|
|
103
|
+
filtered.append(summary)
|
|
104
|
+
|
|
105
|
+
far_future = datetime.max.replace(tzinfo=dt_timezone.utc)
|
|
106
|
+
filtered.sort(
|
|
107
|
+
key=lambda item: (
|
|
108
|
+
item.next_run or far_future,
|
|
109
|
+
item.name.lower(),
|
|
110
|
+
)
|
|
111
|
+
)
|
|
112
|
+
return filtered
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def collect_celery_log_entries(
|
|
116
|
+
start: datetime, end: datetime, *, max_lines: int = 500
|
|
117
|
+
) -> CeleryLogCollection:
|
|
118
|
+
"""Return Celery-related log entries within ``start`` and ``end``."""
|
|
119
|
+
|
|
120
|
+
entries: list[CeleryLogEntry] = []
|
|
121
|
+
checked_sources: list[str] = []
|
|
122
|
+
|
|
123
|
+
for path in _candidate_log_files():
|
|
124
|
+
checked_sources.append(path.name)
|
|
125
|
+
for entry in _read_log_entries(path, max_lines=max_lines):
|
|
126
|
+
if entry.timestamp < start or entry.timestamp > end:
|
|
127
|
+
continue
|
|
128
|
+
entries.append(entry)
|
|
129
|
+
|
|
130
|
+
entries.sort(key=lambda item: item.timestamp, reverse=True)
|
|
131
|
+
return CeleryLogCollection(entries=entries, checked_sources=checked_sources)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _collect_db_tasks(now: datetime) -> list[ScheduledTaskSummary]:
|
|
135
|
+
try: # pragma: no cover - optional dependency guard
|
|
136
|
+
from django_celery_beat.models import PeriodicTask
|
|
137
|
+
except Exception:
|
|
138
|
+
return []
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
tasks = list(
|
|
142
|
+
PeriodicTask.objects.select_related(
|
|
143
|
+
"interval", "crontab", "solar", "clocked"
|
|
144
|
+
)
|
|
145
|
+
)
|
|
146
|
+
except Exception: # pragma: no cover - database unavailable
|
|
147
|
+
return []
|
|
148
|
+
|
|
149
|
+
summaries: list[ScheduledTaskSummary] = []
|
|
150
|
+
for task in tasks:
|
|
151
|
+
schedule = getattr(task, "schedule", None)
|
|
152
|
+
next_run = _estimate_next_run(now, schedule, task.last_run_at, task.start_time)
|
|
153
|
+
schedule_type = _determine_schedule_type(task)
|
|
154
|
+
schedule_description = _describe_db_schedule(task)
|
|
155
|
+
summaries.append(
|
|
156
|
+
ScheduledTaskSummary(
|
|
157
|
+
name=task.name,
|
|
158
|
+
task=task.task,
|
|
159
|
+
schedule_type=schedule_type,
|
|
160
|
+
schedule_description=schedule_description,
|
|
161
|
+
next_run=next_run,
|
|
162
|
+
enabled=bool(task.enabled),
|
|
163
|
+
source=str(_("Database")),
|
|
164
|
+
)
|
|
165
|
+
)
|
|
166
|
+
return summaries
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _collect_settings_tasks(now: datetime) -> list[ScheduledTaskSummary]:
|
|
170
|
+
schedule_config = getattr(settings, "CELERY_BEAT_SCHEDULE", {})
|
|
171
|
+
summaries: list[ScheduledTaskSummary] = []
|
|
172
|
+
|
|
173
|
+
for name, config in schedule_config.items():
|
|
174
|
+
task_name = str(config.get("task", ""))
|
|
175
|
+
schedule = config.get("schedule")
|
|
176
|
+
next_run = _estimate_next_run(now, schedule, None, None)
|
|
177
|
+
schedule_type = _describe_schedule_type(schedule)
|
|
178
|
+
schedule_description = _describe_settings_schedule(schedule)
|
|
179
|
+
summaries.append(
|
|
180
|
+
ScheduledTaskSummary(
|
|
181
|
+
name=name,
|
|
182
|
+
task=task_name,
|
|
183
|
+
schedule_type=schedule_type,
|
|
184
|
+
schedule_description=schedule_description,
|
|
185
|
+
next_run=next_run,
|
|
186
|
+
enabled=True,
|
|
187
|
+
source=str(_("Settings")),
|
|
188
|
+
)
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
return summaries
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def _determine_schedule_type(task) -> str:
|
|
195
|
+
if getattr(task, "clocked_id", None):
|
|
196
|
+
return "clocked"
|
|
197
|
+
if getattr(task, "solar_id", None):
|
|
198
|
+
return "solar"
|
|
199
|
+
if getattr(task, "crontab_id", None):
|
|
200
|
+
return "crontab"
|
|
201
|
+
if getattr(task, "interval_id", None):
|
|
202
|
+
return "interval"
|
|
203
|
+
return "unknown"
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def _estimate_next_run(
|
|
207
|
+
now: datetime,
|
|
208
|
+
schedule,
|
|
209
|
+
last_run_at: datetime | None,
|
|
210
|
+
start_time: datetime | None,
|
|
211
|
+
) -> datetime | None:
|
|
212
|
+
if schedule is None:
|
|
213
|
+
return None
|
|
214
|
+
|
|
215
|
+
if isinstance(schedule, timedelta):
|
|
216
|
+
return now + schedule
|
|
217
|
+
|
|
218
|
+
if isinstance(schedule, numbers.Real):
|
|
219
|
+
return now + timedelta(seconds=float(schedule))
|
|
220
|
+
|
|
221
|
+
if isinstance(schedule, datetime):
|
|
222
|
+
candidate = _make_aware(schedule)
|
|
223
|
+
if candidate and candidate >= now:
|
|
224
|
+
return candidate
|
|
225
|
+
return candidate
|
|
226
|
+
|
|
227
|
+
schedule_now = _schedule_now(schedule, now)
|
|
228
|
+
candidate_start = _coerce_with_schedule(schedule, start_time)
|
|
229
|
+
if candidate_start and candidate_start > schedule_now:
|
|
230
|
+
return candidate_start
|
|
231
|
+
|
|
232
|
+
reference = _coerce_with_schedule(schedule, last_run_at) or schedule_now
|
|
233
|
+
|
|
234
|
+
try:
|
|
235
|
+
remaining = schedule.remaining_estimate(reference)
|
|
236
|
+
if remaining is None:
|
|
237
|
+
return None
|
|
238
|
+
return schedule_now + remaining
|
|
239
|
+
except Exception:
|
|
240
|
+
try:
|
|
241
|
+
due, next_time_to_run = schedule.is_due(reference)
|
|
242
|
+
except Exception:
|
|
243
|
+
return None
|
|
244
|
+
if due:
|
|
245
|
+
return schedule_now
|
|
246
|
+
try:
|
|
247
|
+
seconds = float(next_time_to_run)
|
|
248
|
+
except (TypeError, ValueError):
|
|
249
|
+
return None
|
|
250
|
+
return schedule_now + timedelta(seconds=seconds)
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def _schedule_now(schedule, fallback: datetime) -> datetime:
|
|
254
|
+
if hasattr(schedule, "now") and hasattr(schedule, "maybe_make_aware"):
|
|
255
|
+
try:
|
|
256
|
+
current = schedule.maybe_make_aware(schedule.now())
|
|
257
|
+
if isinstance(current, datetime):
|
|
258
|
+
return current
|
|
259
|
+
except Exception:
|
|
260
|
+
pass
|
|
261
|
+
return fallback
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def _coerce_with_schedule(schedule, value: datetime | None) -> datetime | None:
|
|
265
|
+
if value is None:
|
|
266
|
+
return None
|
|
267
|
+
if hasattr(schedule, "maybe_make_aware"):
|
|
268
|
+
try:
|
|
269
|
+
coerced = schedule.maybe_make_aware(value)
|
|
270
|
+
if isinstance(coerced, datetime):
|
|
271
|
+
return coerced
|
|
272
|
+
except Exception:
|
|
273
|
+
pass
|
|
274
|
+
return _make_aware(value)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def _make_aware(value: datetime) -> datetime:
|
|
278
|
+
if timezone.is_naive(value):
|
|
279
|
+
try:
|
|
280
|
+
return timezone.make_aware(value)
|
|
281
|
+
except Exception:
|
|
282
|
+
return value
|
|
283
|
+
return value
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def _describe_db_schedule(task) -> str:
|
|
287
|
+
schedule = getattr(task, "schedule", None)
|
|
288
|
+
if schedule is None:
|
|
289
|
+
return ""
|
|
290
|
+
|
|
291
|
+
try:
|
|
292
|
+
human_readable = getattr(schedule, "human_readable", None)
|
|
293
|
+
if callable(human_readable):
|
|
294
|
+
return str(human_readable())
|
|
295
|
+
if isinstance(human_readable, str):
|
|
296
|
+
return human_readable
|
|
297
|
+
except Exception:
|
|
298
|
+
pass
|
|
299
|
+
|
|
300
|
+
for attr in ("clocked", "solar", "crontab", "interval"):
|
|
301
|
+
obj = getattr(task, attr, None)
|
|
302
|
+
if obj is not None:
|
|
303
|
+
return str(obj)
|
|
304
|
+
return str(schedule)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def _describe_schedule_type(schedule) -> str:
|
|
308
|
+
type_name = type(schedule).__name__ if schedule is not None else "unknown"
|
|
309
|
+
return type_name.replace("Schedule", "").lower()
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def _describe_settings_schedule(schedule) -> str:
|
|
313
|
+
if schedule is None:
|
|
314
|
+
return ""
|
|
315
|
+
|
|
316
|
+
try:
|
|
317
|
+
human_readable = getattr(schedule, "human_readable", None)
|
|
318
|
+
if callable(human_readable):
|
|
319
|
+
return str(human_readable())
|
|
320
|
+
if isinstance(human_readable, str):
|
|
321
|
+
return human_readable
|
|
322
|
+
except Exception:
|
|
323
|
+
pass
|
|
324
|
+
|
|
325
|
+
if isinstance(schedule, timedelta):
|
|
326
|
+
return str(schedule)
|
|
327
|
+
if isinstance(schedule, numbers.Real):
|
|
328
|
+
return _("Every %(seconds)s seconds") % {"seconds": schedule}
|
|
329
|
+
return str(schedule)
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def _candidate_log_files() -> Iterable[Path]:
|
|
333
|
+
log_dir = Path(settings.LOG_DIR)
|
|
334
|
+
candidates = [
|
|
335
|
+
log_dir / "celery.log",
|
|
336
|
+
log_dir / "celery-worker.log",
|
|
337
|
+
log_dir / "celery-beat.log",
|
|
338
|
+
log_dir / getattr(settings, "LOG_FILE_NAME", ""),
|
|
339
|
+
]
|
|
340
|
+
|
|
341
|
+
seen: set[Path] = set()
|
|
342
|
+
for path in candidates:
|
|
343
|
+
if not path:
|
|
344
|
+
continue
|
|
345
|
+
if path in seen:
|
|
346
|
+
continue
|
|
347
|
+
seen.add(path)
|
|
348
|
+
if path.exists():
|
|
349
|
+
yield path
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def _read_log_entries(path: Path, *, max_lines: int) -> Iterator[CeleryLogEntry]:
|
|
353
|
+
try:
|
|
354
|
+
with path.open("r", encoding="utf-8", errors="ignore") as handle:
|
|
355
|
+
lines = deque(handle, maxlen=max_lines)
|
|
356
|
+
except OSError: # pragma: no cover - filesystem errors
|
|
357
|
+
return iter(())
|
|
358
|
+
|
|
359
|
+
return (
|
|
360
|
+
entry
|
|
361
|
+
for entry in (_parse_log_line(line, path.name) for line in lines)
|
|
362
|
+
if entry is not None
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
def _parse_log_line(line: str, source: str) -> CeleryLogEntry | None:
|
|
367
|
+
match = _LOG_LINE_PATTERN.match(line)
|
|
368
|
+
if not match:
|
|
369
|
+
return None
|
|
370
|
+
|
|
371
|
+
timestamp = _parse_timestamp(match.group("timestamp"))
|
|
372
|
+
if timestamp is None:
|
|
373
|
+
return None
|
|
374
|
+
|
|
375
|
+
logger_name = match.group("logger").strip()
|
|
376
|
+
message = match.group("message").strip()
|
|
377
|
+
level = match.group("level").strip()
|
|
378
|
+
|
|
379
|
+
if not _is_celery_related(logger_name, message):
|
|
380
|
+
return None
|
|
381
|
+
|
|
382
|
+
return CeleryLogEntry(
|
|
383
|
+
timestamp=timestamp,
|
|
384
|
+
level=level,
|
|
385
|
+
logger=logger_name,
|
|
386
|
+
message=message,
|
|
387
|
+
source=source,
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def _parse_timestamp(value: str) -> datetime | None:
|
|
392
|
+
for fmt in ("%Y-%m-%d %H:%M:%S,%f", "%Y-%m-%d %H:%M:%S"):
|
|
393
|
+
try:
|
|
394
|
+
dt = datetime.strptime(value, fmt)
|
|
395
|
+
return _make_aware(dt)
|
|
396
|
+
except ValueError:
|
|
397
|
+
continue
|
|
398
|
+
try:
|
|
399
|
+
dt = datetime.fromisoformat(value)
|
|
400
|
+
except ValueError:
|
|
401
|
+
return None
|
|
402
|
+
return _make_aware(dt)
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
def _is_celery_related(logger_name: str, message: str) -> bool:
|
|
406
|
+
logger_lower = logger_name.lower()
|
|
407
|
+
message_lower = message.lower()
|
|
408
|
+
if any(keyword in logger_lower for keyword in ("celery", "task", "beat")):
|
|
409
|
+
return True
|
|
410
|
+
return "celery" in message_lower or "task" in message_lower
|
|
411
|
+
|