arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- arthexis-0.1.26.dist-info/METADATA +272 -0
- arthexis-0.1.26.dist-info/RECORD +111 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
- config/__init__.py +5 -5
- config/active_app.py +15 -15
- config/asgi.py +29 -29
- config/auth_app.py +7 -7
- config/celery.py +32 -25
- config/context_processors.py +67 -68
- config/horologia_app.py +7 -7
- config/loadenv.py +11 -11
- config/logging.py +59 -48
- config/middleware.py +71 -25
- config/offline.py +49 -49
- config/settings.py +676 -492
- config/settings_helpers.py +109 -0
- config/urls.py +228 -159
- config/wsgi.py +17 -17
- core/admin.py +4052 -2066
- core/admin_history.py +50 -50
- core/admindocs.py +192 -151
- core/apps.py +350 -223
- core/auto_upgrade.py +72 -0
- core/backends.py +311 -124
- core/changelog.py +403 -0
- core/entity.py +149 -133
- core/environment.py +60 -43
- core/fields.py +168 -75
- core/form_fields.py +75 -0
- core/github_helper.py +188 -25
- core/github_issues.py +183 -172
- core/github_repos.py +72 -0
- core/lcd_screen.py +78 -78
- core/liveupdate.py +25 -25
- core/log_paths.py +114 -100
- core/mailer.py +89 -83
- core/middleware.py +91 -91
- core/models.py +5041 -2195
- core/notifications.py +105 -105
- core/public_wifi.py +267 -227
- core/reference_utils.py +107 -0
- core/release.py +940 -346
- core/rfid_import_export.py +113 -0
- core/sigil_builder.py +149 -131
- core/sigil_context.py +20 -20
- core/sigil_resolver.py +250 -284
- core/system.py +1425 -230
- core/tasks.py +538 -199
- core/temp_passwords.py +181 -0
- core/test_system_info.py +202 -43
- core/tests.py +2673 -1069
- core/tests_liveupdate.py +17 -17
- core/urls.py +11 -11
- core/user_data.py +681 -495
- core/views.py +2484 -789
- core/widgets.py +213 -51
- nodes/admin.py +2236 -445
- nodes/apps.py +98 -70
- nodes/backends.py +160 -53
- nodes/dns.py +203 -0
- nodes/feature_checks.py +133 -0
- nodes/lcd.py +165 -165
- nodes/models.py +2375 -870
- nodes/reports.py +411 -0
- nodes/rfid_sync.py +210 -0
- nodes/signals.py +18 -0
- nodes/tasks.py +141 -46
- nodes/tests.py +5045 -1489
- nodes/urls.py +29 -13
- nodes/utils.py +172 -73
- nodes/views.py +1768 -304
- ocpp/admin.py +1775 -481
- ocpp/apps.py +25 -25
- ocpp/consumers.py +1843 -630
- ocpp/evcs.py +844 -928
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +1417 -640
- ocpp/network.py +398 -0
- ocpp/reference_utils.py +42 -0
- ocpp/routing.py +11 -9
- ocpp/simulator.py +745 -368
- ocpp/status_display.py +26 -0
- ocpp/store.py +603 -403
- ocpp/tasks.py +479 -31
- ocpp/test_export_import.py +131 -130
- ocpp/test_rfid.py +1072 -540
- ocpp/tests.py +5494 -2296
- ocpp/transactions_io.py +197 -165
- ocpp/urls.py +50 -50
- ocpp/views.py +2024 -912
- pages/admin.py +1123 -396
- pages/apps.py +45 -10
- pages/checks.py +40 -40
- pages/context_processors.py +151 -85
- pages/defaults.py +13 -0
- pages/forms.py +221 -0
- pages/middleware.py +213 -153
- pages/models.py +720 -252
- pages/module_defaults.py +156 -0
- pages/site_config.py +137 -0
- pages/tasks.py +74 -0
- pages/tests.py +4009 -1389
- pages/urls.py +38 -20
- pages/utils.py +93 -12
- pages/views.py +1736 -762
- arthexis-0.1.9.dist-info/METADATA +0 -168
- arthexis-0.1.9.dist-info/RECORD +0 -92
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- nodes/actions.py +0 -70
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
ocpp/tasks.py
CHANGED
|
@@ -1,31 +1,479 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
from
|
|
6
|
-
from
|
|
7
|
-
|
|
8
|
-
from .
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
1
|
+
import base64
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
import uuid
|
|
5
|
+
from datetime import date, datetime, time, timedelta
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from asgiref.sync import async_to_sync
|
|
9
|
+
from celery import shared_task
|
|
10
|
+
from django.conf import settings
|
|
11
|
+
from django.contrib.auth import get_user_model
|
|
12
|
+
from django.db.models import Q
|
|
13
|
+
from django.utils import timezone
|
|
14
|
+
import requests
|
|
15
|
+
from requests import RequestException
|
|
16
|
+
from cryptography.hazmat.primitives import hashes
|
|
17
|
+
from cryptography.hazmat.primitives.asymmetric import padding
|
|
18
|
+
|
|
19
|
+
from core import mailer
|
|
20
|
+
from nodes.models import Node
|
|
21
|
+
|
|
22
|
+
from . import store
|
|
23
|
+
from .models import Charger, MeterValue, Transaction
|
|
24
|
+
from .network import (
|
|
25
|
+
newest_transaction_timestamp,
|
|
26
|
+
serialize_charger_for_network,
|
|
27
|
+
serialize_transactions_for_forwarding,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _sign_payload(payload_json: str, private_key) -> str | None:
|
|
34
|
+
if not private_key:
|
|
35
|
+
return None
|
|
36
|
+
try:
|
|
37
|
+
signature = private_key.sign(
|
|
38
|
+
payload_json.encode(),
|
|
39
|
+
padding.PKCS1v15(),
|
|
40
|
+
hashes.SHA256(),
|
|
41
|
+
)
|
|
42
|
+
except Exception:
|
|
43
|
+
return None
|
|
44
|
+
return base64.b64encode(signature).decode()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@shared_task
|
|
48
|
+
def check_charge_point_configuration(charger_pk: int) -> bool:
|
|
49
|
+
"""Request the latest configuration from a connected charge point."""
|
|
50
|
+
|
|
51
|
+
try:
|
|
52
|
+
charger = Charger.objects.get(pk=charger_pk)
|
|
53
|
+
except Charger.DoesNotExist:
|
|
54
|
+
logger.warning(
|
|
55
|
+
"Unable to request configuration for missing charger %s",
|
|
56
|
+
charger_pk,
|
|
57
|
+
)
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
connector_value = charger.connector_id
|
|
61
|
+
if connector_value is not None:
|
|
62
|
+
logger.debug(
|
|
63
|
+
"Skipping charger %s: connector %s is not eligible for automatic configuration checks",
|
|
64
|
+
charger.charger_id,
|
|
65
|
+
connector_value,
|
|
66
|
+
)
|
|
67
|
+
return False
|
|
68
|
+
|
|
69
|
+
ws = store.get_connection(charger.charger_id, connector_value)
|
|
70
|
+
if ws is None:
|
|
71
|
+
logger.info(
|
|
72
|
+
"Charge point %s is not connected; configuration request skipped",
|
|
73
|
+
charger.charger_id,
|
|
74
|
+
)
|
|
75
|
+
return False
|
|
76
|
+
|
|
77
|
+
message_id = uuid.uuid4().hex
|
|
78
|
+
payload: dict[str, object] = {}
|
|
79
|
+
msg = json.dumps([2, message_id, "GetConfiguration", payload])
|
|
80
|
+
|
|
81
|
+
try:
|
|
82
|
+
async_to_sync(ws.send)(msg)
|
|
83
|
+
except Exception as exc: # pragma: no cover - network error
|
|
84
|
+
logger.warning(
|
|
85
|
+
"Failed to send GetConfiguration to %s (%s)",
|
|
86
|
+
charger.charger_id,
|
|
87
|
+
exc,
|
|
88
|
+
)
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
log_key = store.identity_key(charger.charger_id, connector_value)
|
|
92
|
+
store.add_log(log_key, f"< {msg}", log_type="charger")
|
|
93
|
+
store.register_pending_call(
|
|
94
|
+
message_id,
|
|
95
|
+
{
|
|
96
|
+
"action": "GetConfiguration",
|
|
97
|
+
"charger_id": charger.charger_id,
|
|
98
|
+
"connector_id": connector_value,
|
|
99
|
+
"log_key": log_key,
|
|
100
|
+
"requested_at": timezone.now(),
|
|
101
|
+
},
|
|
102
|
+
)
|
|
103
|
+
store.schedule_call_timeout(
|
|
104
|
+
message_id,
|
|
105
|
+
timeout=5.0,
|
|
106
|
+
action="GetConfiguration",
|
|
107
|
+
log_key=log_key,
|
|
108
|
+
message=(
|
|
109
|
+
"GetConfiguration timed out: charger did not respond"
|
|
110
|
+
" (operation may not be supported)"
|
|
111
|
+
),
|
|
112
|
+
)
|
|
113
|
+
logger.info(
|
|
114
|
+
"Requested configuration from charge point %s",
|
|
115
|
+
charger.charger_id,
|
|
116
|
+
)
|
|
117
|
+
return True
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@shared_task
|
|
121
|
+
def schedule_daily_charge_point_configuration_checks() -> int:
|
|
122
|
+
"""Dispatch configuration requests for eligible charge points."""
|
|
123
|
+
|
|
124
|
+
charger_ids = list(
|
|
125
|
+
Charger.objects.filter(connector_id__isnull=True).values_list("pk", flat=True)
|
|
126
|
+
)
|
|
127
|
+
if not charger_ids:
|
|
128
|
+
logger.debug("No eligible charge points available for configuration check")
|
|
129
|
+
return 0
|
|
130
|
+
|
|
131
|
+
scheduled = 0
|
|
132
|
+
for charger_pk in charger_ids:
|
|
133
|
+
check_charge_point_configuration.delay(charger_pk)
|
|
134
|
+
scheduled += 1
|
|
135
|
+
logger.info(
|
|
136
|
+
"Scheduled configuration checks for %s charge point(s)", scheduled
|
|
137
|
+
)
|
|
138
|
+
return scheduled
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
@shared_task
|
|
142
|
+
def purge_meter_values() -> int:
|
|
143
|
+
"""Delete meter values older than 7 days.
|
|
144
|
+
|
|
145
|
+
Values tied to transactions without a recorded meter_stop are preserved so
|
|
146
|
+
that ongoing or incomplete sessions retain their energy data.
|
|
147
|
+
Returns the number of deleted rows.
|
|
148
|
+
"""
|
|
149
|
+
cutoff = timezone.now() - timedelta(days=7)
|
|
150
|
+
qs = MeterValue.objects.filter(timestamp__lt=cutoff).filter(
|
|
151
|
+
Q(transaction__isnull=True) | Q(transaction__meter_stop__isnull=False)
|
|
152
|
+
)
|
|
153
|
+
deleted, _ = qs.delete()
|
|
154
|
+
logger.info("Purged %s meter values", deleted)
|
|
155
|
+
return deleted
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
# Backwards compatibility alias
|
|
159
|
+
purge_meter_readings = purge_meter_values
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@shared_task
|
|
163
|
+
def push_forwarded_charge_points() -> int:
|
|
164
|
+
"""Push local charge point sessions to configured upstream nodes."""
|
|
165
|
+
|
|
166
|
+
local = Node.get_local()
|
|
167
|
+
if not local:
|
|
168
|
+
logger.debug("Forwarding skipped: local node not registered")
|
|
169
|
+
return 0
|
|
170
|
+
|
|
171
|
+
private_key = local.get_private_key()
|
|
172
|
+
if private_key is None:
|
|
173
|
+
logger.warning("Forwarding skipped: missing local node private key")
|
|
174
|
+
return 0
|
|
175
|
+
|
|
176
|
+
chargers_qs = (
|
|
177
|
+
Charger.objects.filter(export_transactions=True, forwarded_to__isnull=False)
|
|
178
|
+
.select_related("forwarded_to", "node_origin")
|
|
179
|
+
.order_by("pk")
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
node_filter = Q(node_origin__isnull=True)
|
|
183
|
+
if local.pk:
|
|
184
|
+
node_filter |= Q(node_origin=local)
|
|
185
|
+
|
|
186
|
+
chargers = list(chargers_qs.filter(node_filter))
|
|
187
|
+
if not chargers:
|
|
188
|
+
return 0
|
|
189
|
+
|
|
190
|
+
grouped: dict[Node, list[Charger]] = {}
|
|
191
|
+
for charger in chargers:
|
|
192
|
+
target = charger.forwarded_to
|
|
193
|
+
if not target:
|
|
194
|
+
continue
|
|
195
|
+
if local.pk and target.pk == local.pk:
|
|
196
|
+
continue
|
|
197
|
+
grouped.setdefault(target, []).append(charger)
|
|
198
|
+
|
|
199
|
+
if not grouped:
|
|
200
|
+
return 0
|
|
201
|
+
|
|
202
|
+
forwarded_total = 0
|
|
203
|
+
|
|
204
|
+
for node, node_chargers in grouped.items():
|
|
205
|
+
if not node_chargers:
|
|
206
|
+
continue
|
|
207
|
+
|
|
208
|
+
initializing = [ch for ch in node_chargers if ch.forwarding_watermark is None]
|
|
209
|
+
charger_by_pk = {ch.pk: ch for ch in node_chargers}
|
|
210
|
+
transactions_map: dict[int, list[Transaction]] = {}
|
|
211
|
+
|
|
212
|
+
for charger in node_chargers:
|
|
213
|
+
watermark = charger.forwarding_watermark
|
|
214
|
+
if watermark is None:
|
|
215
|
+
continue
|
|
216
|
+
tx_queryset = (
|
|
217
|
+
Transaction.objects.filter(charger=charger, start_time__gt=watermark)
|
|
218
|
+
.select_related("charger")
|
|
219
|
+
.prefetch_related("meter_values")
|
|
220
|
+
.order_by("start_time")
|
|
221
|
+
)
|
|
222
|
+
txs = list(tx_queryset)
|
|
223
|
+
if txs:
|
|
224
|
+
transactions_map[charger.pk] = txs
|
|
225
|
+
|
|
226
|
+
transaction_payload = {"chargers": [], "transactions": []}
|
|
227
|
+
for charger_pk, txs in transactions_map.items():
|
|
228
|
+
charger = charger_by_pk[charger_pk]
|
|
229
|
+
transaction_payload["chargers"].append(
|
|
230
|
+
{
|
|
231
|
+
"charger_id": charger.charger_id,
|
|
232
|
+
"connector_id": charger.connector_id,
|
|
233
|
+
"require_rfid": charger.require_rfid,
|
|
234
|
+
}
|
|
235
|
+
)
|
|
236
|
+
transaction_payload["transactions"].extend(
|
|
237
|
+
serialize_transactions_for_forwarding(txs)
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
payload = {
|
|
241
|
+
"requester": str(local.uuid),
|
|
242
|
+
"requester_mac": local.mac_address,
|
|
243
|
+
"requester_public_key": local.public_key,
|
|
244
|
+
"chargers": [serialize_charger_for_network(ch) for ch in initializing],
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
has_transactions = bool(transaction_payload["transactions"])
|
|
248
|
+
if has_transactions or payload["chargers"]:
|
|
249
|
+
payload["transactions"] = transaction_payload
|
|
250
|
+
else:
|
|
251
|
+
continue
|
|
252
|
+
|
|
253
|
+
payload_json = json.dumps(payload, separators=(",", ":"), sort_keys=True)
|
|
254
|
+
signature = _sign_payload(payload_json, private_key)
|
|
255
|
+
headers = {"Content-Type": "application/json"}
|
|
256
|
+
if signature:
|
|
257
|
+
headers["X-Signature"] = signature
|
|
258
|
+
|
|
259
|
+
success = False
|
|
260
|
+
attempted = False
|
|
261
|
+
for url in node.iter_remote_urls("/nodes/network/chargers/forward/"):
|
|
262
|
+
if not url:
|
|
263
|
+
continue
|
|
264
|
+
|
|
265
|
+
attempted = True
|
|
266
|
+
try:
|
|
267
|
+
response = requests.post(
|
|
268
|
+
url, data=payload_json, headers=headers, timeout=5
|
|
269
|
+
)
|
|
270
|
+
except RequestException as exc:
|
|
271
|
+
logger.warning("Failed to forward chargers to %s: %s", node, exc)
|
|
272
|
+
continue
|
|
273
|
+
|
|
274
|
+
if not response.ok:
|
|
275
|
+
logger.warning(
|
|
276
|
+
"Forwarding request to %s via %s returned %s",
|
|
277
|
+
node,
|
|
278
|
+
url,
|
|
279
|
+
response.status_code,
|
|
280
|
+
)
|
|
281
|
+
continue
|
|
282
|
+
|
|
283
|
+
try:
|
|
284
|
+
data = response.json()
|
|
285
|
+
except ValueError:
|
|
286
|
+
logger.warning("Invalid JSON payload received from %s", node)
|
|
287
|
+
continue
|
|
288
|
+
|
|
289
|
+
if data.get("status") != "ok":
|
|
290
|
+
detail = data.get("detail") if isinstance(data, dict) else None
|
|
291
|
+
logger.warning(
|
|
292
|
+
"Forwarding rejected by %s via %s: %s",
|
|
293
|
+
node,
|
|
294
|
+
url,
|
|
295
|
+
detail or response.text or "Remote node rejected the request.",
|
|
296
|
+
)
|
|
297
|
+
continue
|
|
298
|
+
|
|
299
|
+
success = True
|
|
300
|
+
break
|
|
301
|
+
|
|
302
|
+
if not success:
|
|
303
|
+
if not attempted:
|
|
304
|
+
logger.warning(
|
|
305
|
+
"No reachable host found for %s when forwarding chargers", node
|
|
306
|
+
)
|
|
307
|
+
continue
|
|
308
|
+
|
|
309
|
+
updates: dict[int, datetime] = {}
|
|
310
|
+
now = timezone.now()
|
|
311
|
+
for charger in initializing:
|
|
312
|
+
updates[charger.pk] = now
|
|
313
|
+
for charger_pk, txs in transactions_map.items():
|
|
314
|
+
latest = newest_transaction_timestamp(txs)
|
|
315
|
+
if latest:
|
|
316
|
+
updates[charger_pk] = latest
|
|
317
|
+
|
|
318
|
+
for pk, timestamp in updates.items():
|
|
319
|
+
Charger.objects.filter(pk=pk).update(forwarding_watermark=timestamp)
|
|
320
|
+
|
|
321
|
+
forwarded_total += len(transaction_payload["transactions"])
|
|
322
|
+
|
|
323
|
+
return forwarded_total
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
# Backwards compatibility alias for legacy schedules
|
|
327
|
+
sync_remote_chargers = push_forwarded_charge_points
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def _resolve_report_window() -> tuple[datetime, datetime, date]:
|
|
331
|
+
"""Return the start/end datetimes for today's reporting window."""
|
|
332
|
+
|
|
333
|
+
current_tz = timezone.get_current_timezone()
|
|
334
|
+
today = timezone.localdate()
|
|
335
|
+
start = timezone.make_aware(datetime.combine(today, time.min), current_tz)
|
|
336
|
+
end = start + timedelta(days=1)
|
|
337
|
+
return start, end, today
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def _session_report_recipients() -> list[str]:
|
|
341
|
+
"""Return the list of recipients for the daily session report."""
|
|
342
|
+
|
|
343
|
+
User = get_user_model()
|
|
344
|
+
recipients = list(
|
|
345
|
+
User.objects.filter(is_superuser=True)
|
|
346
|
+
.exclude(email="")
|
|
347
|
+
.values_list("email", flat=True)
|
|
348
|
+
)
|
|
349
|
+
if recipients:
|
|
350
|
+
return recipients
|
|
351
|
+
|
|
352
|
+
fallback = getattr(settings, "DEFAULT_FROM_EMAIL", "").strip()
|
|
353
|
+
return [fallback] if fallback else []
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
def _format_duration(delta: timedelta | None) -> str:
|
|
357
|
+
"""Return a compact string for ``delta`` or ``"in progress"``."""
|
|
358
|
+
|
|
359
|
+
if delta is None:
|
|
360
|
+
return "in progress"
|
|
361
|
+
total_seconds = int(delta.total_seconds())
|
|
362
|
+
hours, remainder = divmod(total_seconds, 3600)
|
|
363
|
+
minutes, seconds = divmod(remainder, 60)
|
|
364
|
+
parts: list[str] = []
|
|
365
|
+
if hours:
|
|
366
|
+
parts.append(f"{hours}h")
|
|
367
|
+
if minutes:
|
|
368
|
+
parts.append(f"{minutes}m")
|
|
369
|
+
if seconds or not parts:
|
|
370
|
+
parts.append(f"{seconds}s")
|
|
371
|
+
return " ".join(parts)
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
def _format_charger(transaction: Transaction) -> str:
|
|
375
|
+
"""Return a human friendly label for ``transaction``'s charger."""
|
|
376
|
+
|
|
377
|
+
charger = transaction.charger
|
|
378
|
+
if charger is None:
|
|
379
|
+
return "Unknown charger"
|
|
380
|
+
for attr in ("display_name", "name", "charger_id"):
|
|
381
|
+
value = getattr(charger, attr, "")
|
|
382
|
+
if value:
|
|
383
|
+
return str(value)
|
|
384
|
+
return str(charger)
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
@shared_task
|
|
388
|
+
def send_daily_session_report() -> int:
|
|
389
|
+
"""Send a summary of today's OCPP sessions when email is available."""
|
|
390
|
+
|
|
391
|
+
if not mailer.can_send_email():
|
|
392
|
+
logger.info("Skipping OCPP session report: email not configured")
|
|
393
|
+
return 0
|
|
394
|
+
|
|
395
|
+
celery_lock = Path(settings.BASE_DIR) / "locks" / "celery.lck"
|
|
396
|
+
if not celery_lock.exists():
|
|
397
|
+
logger.info("Skipping OCPP session report: celery feature disabled")
|
|
398
|
+
return 0
|
|
399
|
+
|
|
400
|
+
recipients = _session_report_recipients()
|
|
401
|
+
if not recipients:
|
|
402
|
+
logger.info("Skipping OCPP session report: no recipients found")
|
|
403
|
+
return 0
|
|
404
|
+
|
|
405
|
+
start, end, today = _resolve_report_window()
|
|
406
|
+
transactions = list(
|
|
407
|
+
Transaction.objects.filter(start_time__gte=start, start_time__lt=end)
|
|
408
|
+
.select_related("charger", "account")
|
|
409
|
+
.order_by("start_time")
|
|
410
|
+
)
|
|
411
|
+
if not transactions:
|
|
412
|
+
logger.info("No OCPP sessions recorded on %s", today.isoformat())
|
|
413
|
+
return 0
|
|
414
|
+
|
|
415
|
+
total_energy = sum(transaction.kw for transaction in transactions)
|
|
416
|
+
lines = [
|
|
417
|
+
f"OCPP session report for {today.isoformat()}",
|
|
418
|
+
"",
|
|
419
|
+
f"Total sessions: {len(transactions)}",
|
|
420
|
+
f"Total energy: {total_energy:.2f} kWh",
|
|
421
|
+
"",
|
|
422
|
+
]
|
|
423
|
+
|
|
424
|
+
for index, transaction in enumerate(transactions, start=1):
|
|
425
|
+
start_local = timezone.localtime(transaction.start_time)
|
|
426
|
+
stop_local = (
|
|
427
|
+
timezone.localtime(transaction.stop_time)
|
|
428
|
+
if transaction.stop_time
|
|
429
|
+
else None
|
|
430
|
+
)
|
|
431
|
+
duration = _format_duration(
|
|
432
|
+
stop_local - start_local if stop_local else None
|
|
433
|
+
)
|
|
434
|
+
account = transaction.account.name if transaction.account else "N/A"
|
|
435
|
+
connector = (
|
|
436
|
+
f"Connector {transaction.connector_id}" if transaction.connector_id else None
|
|
437
|
+
)
|
|
438
|
+
lines.append(f"{index}. {_format_charger(transaction)}")
|
|
439
|
+
lines.append(f" Account: {account}")
|
|
440
|
+
if transaction.rfid:
|
|
441
|
+
lines.append(f" RFID: {transaction.rfid}")
|
|
442
|
+
identifier = transaction.vehicle_identifier
|
|
443
|
+
if identifier:
|
|
444
|
+
label = "VID" if transaction.vehicle_identifier_source == "vid" else "VIN"
|
|
445
|
+
lines.append(f" {label}: {identifier}")
|
|
446
|
+
if connector:
|
|
447
|
+
lines.append(f" {connector}")
|
|
448
|
+
lines.append(
|
|
449
|
+
" Start: "
|
|
450
|
+
f"{start_local.strftime('%H:%M:%S %Z')}"
|
|
451
|
+
)
|
|
452
|
+
if stop_local:
|
|
453
|
+
lines.append(
|
|
454
|
+
" Stop: "
|
|
455
|
+
f"{stop_local.strftime('%H:%M:%S %Z')} ({duration})"
|
|
456
|
+
)
|
|
457
|
+
else:
|
|
458
|
+
lines.append(" Stop: in progress")
|
|
459
|
+
lines.append(f" Energy: {transaction.kw:.2f} kWh")
|
|
460
|
+
lines.append("")
|
|
461
|
+
|
|
462
|
+
subject = f"OCPP session report for {today.isoformat()}"
|
|
463
|
+
body = "\n".join(lines).strip()
|
|
464
|
+
|
|
465
|
+
node = Node.get_local()
|
|
466
|
+
if node is not None:
|
|
467
|
+
node.send_mail(subject, body, recipients)
|
|
468
|
+
else:
|
|
469
|
+
mailer.send(
|
|
470
|
+
subject,
|
|
471
|
+
body,
|
|
472
|
+
recipients,
|
|
473
|
+
getattr(settings, "DEFAULT_FROM_EMAIL", None),
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
logger.info(
|
|
477
|
+
"Sent OCPP session report for %s to %s", today.isoformat(), ", ".join(recipients)
|
|
478
|
+
)
|
|
479
|
+
return len(transactions)
|