arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- arthexis-0.1.26.dist-info/METADATA +272 -0
- arthexis-0.1.26.dist-info/RECORD +111 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
- config/__init__.py +5 -5
- config/active_app.py +15 -15
- config/asgi.py +29 -29
- config/auth_app.py +7 -7
- config/celery.py +32 -25
- config/context_processors.py +67 -68
- config/horologia_app.py +7 -7
- config/loadenv.py +11 -11
- config/logging.py +59 -48
- config/middleware.py +71 -25
- config/offline.py +49 -49
- config/settings.py +676 -492
- config/settings_helpers.py +109 -0
- config/urls.py +228 -159
- config/wsgi.py +17 -17
- core/admin.py +4052 -2066
- core/admin_history.py +50 -50
- core/admindocs.py +192 -151
- core/apps.py +350 -223
- core/auto_upgrade.py +72 -0
- core/backends.py +311 -124
- core/changelog.py +403 -0
- core/entity.py +149 -133
- core/environment.py +60 -43
- core/fields.py +168 -75
- core/form_fields.py +75 -0
- core/github_helper.py +188 -25
- core/github_issues.py +183 -172
- core/github_repos.py +72 -0
- core/lcd_screen.py +78 -78
- core/liveupdate.py +25 -25
- core/log_paths.py +114 -100
- core/mailer.py +89 -83
- core/middleware.py +91 -91
- core/models.py +5041 -2195
- core/notifications.py +105 -105
- core/public_wifi.py +267 -227
- core/reference_utils.py +107 -0
- core/release.py +940 -346
- core/rfid_import_export.py +113 -0
- core/sigil_builder.py +149 -131
- core/sigil_context.py +20 -20
- core/sigil_resolver.py +250 -284
- core/system.py +1425 -230
- core/tasks.py +538 -199
- core/temp_passwords.py +181 -0
- core/test_system_info.py +202 -43
- core/tests.py +2673 -1069
- core/tests_liveupdate.py +17 -17
- core/urls.py +11 -11
- core/user_data.py +681 -495
- core/views.py +2484 -789
- core/widgets.py +213 -51
- nodes/admin.py +2236 -445
- nodes/apps.py +98 -70
- nodes/backends.py +160 -53
- nodes/dns.py +203 -0
- nodes/feature_checks.py +133 -0
- nodes/lcd.py +165 -165
- nodes/models.py +2375 -870
- nodes/reports.py +411 -0
- nodes/rfid_sync.py +210 -0
- nodes/signals.py +18 -0
- nodes/tasks.py +141 -46
- nodes/tests.py +5045 -1489
- nodes/urls.py +29 -13
- nodes/utils.py +172 -73
- nodes/views.py +1768 -304
- ocpp/admin.py +1775 -481
- ocpp/apps.py +25 -25
- ocpp/consumers.py +1843 -630
- ocpp/evcs.py +844 -928
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +1417 -640
- ocpp/network.py +398 -0
- ocpp/reference_utils.py +42 -0
- ocpp/routing.py +11 -9
- ocpp/simulator.py +745 -368
- ocpp/status_display.py +26 -0
- ocpp/store.py +603 -403
- ocpp/tasks.py +479 -31
- ocpp/test_export_import.py +131 -130
- ocpp/test_rfid.py +1072 -540
- ocpp/tests.py +5494 -2296
- ocpp/transactions_io.py +197 -165
- ocpp/urls.py +50 -50
- ocpp/views.py +2024 -912
- pages/admin.py +1123 -396
- pages/apps.py +45 -10
- pages/checks.py +40 -40
- pages/context_processors.py +151 -85
- pages/defaults.py +13 -0
- pages/forms.py +221 -0
- pages/middleware.py +213 -153
- pages/models.py +720 -252
- pages/module_defaults.py +156 -0
- pages/site_config.py +137 -0
- pages/tasks.py +74 -0
- pages/tests.py +4009 -1389
- pages/urls.py +38 -20
- pages/utils.py +93 -12
- pages/views.py +1736 -762
- arthexis-0.1.9.dist-info/METADATA +0 -168
- arthexis-0.1.9.dist-info/RECORD +0 -92
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- nodes/actions.py +0 -70
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
ocpp/store.py
CHANGED
|
@@ -1,403 +1,603 @@
|
|
|
1
|
-
"""In-memory store for OCPP data with file backed logs."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
from datetime import datetime
|
|
7
|
-
import json
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
return
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
def
|
|
151
|
-
"""
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
def
|
|
208
|
-
"""
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
serial
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
for
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
)
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
1
|
+
"""In-memory store for OCPP data with file backed logs."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
import json
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
import re
|
|
10
|
+
import threading
|
|
11
|
+
|
|
12
|
+
from core.log_paths import select_log_dir
|
|
13
|
+
|
|
14
|
+
IDENTITY_SEPARATOR = "#"
|
|
15
|
+
AGGREGATE_SLUG = "all"
|
|
16
|
+
PENDING_SLUG = "pending"
|
|
17
|
+
|
|
18
|
+
MAX_CONNECTIONS_PER_IP = 2
|
|
19
|
+
|
|
20
|
+
connections: dict[str, object] = {}
|
|
21
|
+
transactions: dict[str, object] = {}
|
|
22
|
+
logs: dict[str, dict[str, list[str]]] = {"charger": {}, "simulator": {}}
|
|
23
|
+
# store per charger session logs before they are flushed to disk
|
|
24
|
+
history: dict[str, dict[str, object]] = {}
|
|
25
|
+
simulators = {}
|
|
26
|
+
ip_connections: dict[str, set[object]] = {}
|
|
27
|
+
pending_calls: dict[str, dict[str, object]] = {}
|
|
28
|
+
_pending_call_events: dict[str, threading.Event] = {}
|
|
29
|
+
_pending_call_results: dict[str, dict[str, object]] = {}
|
|
30
|
+
_pending_call_lock = threading.Lock()
|
|
31
|
+
triggered_followups: dict[str, list[dict[str, object]]] = {}
|
|
32
|
+
|
|
33
|
+
# mapping of charger id / cp_path to friendly names used for log files
|
|
34
|
+
log_names: dict[str, dict[str, str]] = {"charger": {}, "simulator": {}}
|
|
35
|
+
|
|
36
|
+
BASE_DIR = Path(__file__).resolve().parent.parent
|
|
37
|
+
LOG_DIR = select_log_dir(BASE_DIR)
|
|
38
|
+
SESSION_DIR = LOG_DIR / "sessions"
|
|
39
|
+
SESSION_DIR.mkdir(exist_ok=True)
|
|
40
|
+
LOCK_DIR = BASE_DIR / "locks"
|
|
41
|
+
LOCK_DIR.mkdir(exist_ok=True)
|
|
42
|
+
SESSION_LOCK = LOCK_DIR / "charging.lck"
|
|
43
|
+
_lock_task: asyncio.Task | None = None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def connector_slug(value: int | str | None) -> str:
|
|
47
|
+
"""Return the canonical slug for a connector value."""
|
|
48
|
+
|
|
49
|
+
if value in (None, "", AGGREGATE_SLUG):
|
|
50
|
+
return AGGREGATE_SLUG
|
|
51
|
+
try:
|
|
52
|
+
return str(int(value))
|
|
53
|
+
except (TypeError, ValueError):
|
|
54
|
+
return str(value)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def identity_key(serial: str, connector: int | str | None) -> str:
|
|
58
|
+
"""Return the identity key used for in-memory store lookups."""
|
|
59
|
+
|
|
60
|
+
return f"{serial}{IDENTITY_SEPARATOR}{connector_slug(connector)}"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def register_ip_connection(ip: str | None, consumer: object) -> bool:
|
|
64
|
+
"""Track a websocket connection for the provided client IP."""
|
|
65
|
+
|
|
66
|
+
if not ip:
|
|
67
|
+
return True
|
|
68
|
+
conns = ip_connections.setdefault(ip, set())
|
|
69
|
+
if consumer in conns:
|
|
70
|
+
return True
|
|
71
|
+
if len(conns) >= MAX_CONNECTIONS_PER_IP:
|
|
72
|
+
return False
|
|
73
|
+
conns.add(consumer)
|
|
74
|
+
return True
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def release_ip_connection(ip: str | None, consumer: object) -> None:
|
|
78
|
+
"""Remove a websocket connection from the active client registry."""
|
|
79
|
+
|
|
80
|
+
if not ip:
|
|
81
|
+
return
|
|
82
|
+
conns = ip_connections.get(ip)
|
|
83
|
+
if not conns:
|
|
84
|
+
return
|
|
85
|
+
conns.discard(consumer)
|
|
86
|
+
if not conns:
|
|
87
|
+
ip_connections.pop(ip, None)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def pending_key(serial: str) -> str:
|
|
91
|
+
"""Return the key used before a connector id has been negotiated."""
|
|
92
|
+
|
|
93
|
+
return f"{serial}{IDENTITY_SEPARATOR}{PENDING_SLUG}"
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _candidate_keys(serial: str, connector: int | str | None) -> list[str]:
|
|
97
|
+
"""Return possible keys for lookups with fallbacks."""
|
|
98
|
+
|
|
99
|
+
keys: list[str] = []
|
|
100
|
+
if connector not in (None, "", AGGREGATE_SLUG):
|
|
101
|
+
keys.append(identity_key(serial, connector))
|
|
102
|
+
else:
|
|
103
|
+
keys.append(identity_key(serial, None))
|
|
104
|
+
prefix = f"{serial}{IDENTITY_SEPARATOR}"
|
|
105
|
+
for key in connections.keys():
|
|
106
|
+
if key.startswith(prefix) and key not in keys:
|
|
107
|
+
keys.append(key)
|
|
108
|
+
keys.append(pending_key(serial))
|
|
109
|
+
keys.append(serial)
|
|
110
|
+
seen: set[str] = set()
|
|
111
|
+
result: list[str] = []
|
|
112
|
+
for key in keys:
|
|
113
|
+
if key and key not in seen:
|
|
114
|
+
seen.add(key)
|
|
115
|
+
result.append(key)
|
|
116
|
+
return result
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def iter_identity_keys(serial: str) -> list[str]:
|
|
120
|
+
"""Return all known keys for the provided serial."""
|
|
121
|
+
|
|
122
|
+
prefix = f"{serial}{IDENTITY_SEPARATOR}"
|
|
123
|
+
keys = [key for key in connections.keys() if key.startswith(prefix)]
|
|
124
|
+
if serial in connections:
|
|
125
|
+
keys.append(serial)
|
|
126
|
+
return keys
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def is_connected(serial: str, connector: int | str | None = None) -> bool:
|
|
130
|
+
"""Return whether a connection exists for the provided charger identity."""
|
|
131
|
+
|
|
132
|
+
if connector in (None, "", AGGREGATE_SLUG):
|
|
133
|
+
prefix = f"{serial}{IDENTITY_SEPARATOR}"
|
|
134
|
+
return (
|
|
135
|
+
any(key.startswith(prefix) for key in connections) or serial in connections
|
|
136
|
+
)
|
|
137
|
+
return any(key in connections for key in _candidate_keys(serial, connector))
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def get_connection(serial: str, connector: int | str | None = None):
|
|
141
|
+
"""Return the websocket consumer for the requested identity, if any."""
|
|
142
|
+
|
|
143
|
+
for key in _candidate_keys(serial, connector):
|
|
144
|
+
conn = connections.get(key)
|
|
145
|
+
if conn is not None:
|
|
146
|
+
return conn
|
|
147
|
+
return None
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def set_connection(serial: str, connector: int | str | None, consumer) -> str:
|
|
151
|
+
"""Store a websocket consumer under the negotiated identity."""
|
|
152
|
+
|
|
153
|
+
key = identity_key(serial, connector)
|
|
154
|
+
connections[key] = consumer
|
|
155
|
+
return key
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def pop_connection(serial: str, connector: int | str | None = None):
|
|
159
|
+
"""Remove a stored connection for the given identity."""
|
|
160
|
+
|
|
161
|
+
for key in _candidate_keys(serial, connector):
|
|
162
|
+
conn = connections.pop(key, None)
|
|
163
|
+
if conn is not None:
|
|
164
|
+
return conn
|
|
165
|
+
return None
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def get_transaction(serial: str, connector: int | str | None = None):
|
|
169
|
+
"""Return the active transaction for the provided identity."""
|
|
170
|
+
|
|
171
|
+
for key in _candidate_keys(serial, connector):
|
|
172
|
+
tx = transactions.get(key)
|
|
173
|
+
if tx is not None:
|
|
174
|
+
return tx
|
|
175
|
+
return None
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def set_transaction(serial: str, connector: int | str | None, tx) -> str:
|
|
179
|
+
"""Store an active transaction under the provided identity."""
|
|
180
|
+
|
|
181
|
+
key = identity_key(serial, connector)
|
|
182
|
+
transactions[key] = tx
|
|
183
|
+
return key
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def pop_transaction(serial: str, connector: int | str | None = None):
|
|
187
|
+
"""Remove and return an active transaction for the identity."""
|
|
188
|
+
|
|
189
|
+
for key in _candidate_keys(serial, connector):
|
|
190
|
+
tx = transactions.pop(key, None)
|
|
191
|
+
if tx is not None:
|
|
192
|
+
return tx
|
|
193
|
+
return None
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def register_pending_call(message_id: str, metadata: dict[str, object]) -> None:
|
|
197
|
+
"""Store metadata about an outstanding CSMS call."""
|
|
198
|
+
|
|
199
|
+
copy = dict(metadata)
|
|
200
|
+
with _pending_call_lock:
|
|
201
|
+
pending_calls[message_id] = copy
|
|
202
|
+
event = threading.Event()
|
|
203
|
+
_pending_call_events[message_id] = event
|
|
204
|
+
_pending_call_results.pop(message_id, None)
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def pop_pending_call(message_id: str) -> dict[str, object] | None:
|
|
208
|
+
"""Return and remove metadata for a previously registered call."""
|
|
209
|
+
|
|
210
|
+
with _pending_call_lock:
|
|
211
|
+
return pending_calls.pop(message_id, None)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def record_pending_call_result(
|
|
215
|
+
message_id: str,
|
|
216
|
+
*,
|
|
217
|
+
metadata: dict[str, object] | None = None,
|
|
218
|
+
success: bool = True,
|
|
219
|
+
payload: object | None = None,
|
|
220
|
+
error_code: str | None = None,
|
|
221
|
+
error_description: str | None = None,
|
|
222
|
+
error_details: object | None = None,
|
|
223
|
+
) -> None:
|
|
224
|
+
"""Record the outcome for a previously registered pending call."""
|
|
225
|
+
|
|
226
|
+
result = {
|
|
227
|
+
"metadata": dict(metadata or {}),
|
|
228
|
+
"success": success,
|
|
229
|
+
"payload": payload,
|
|
230
|
+
"error_code": error_code,
|
|
231
|
+
"error_description": error_description,
|
|
232
|
+
"error_details": error_details,
|
|
233
|
+
}
|
|
234
|
+
with _pending_call_lock:
|
|
235
|
+
_pending_call_results[message_id] = result
|
|
236
|
+
event = _pending_call_events.pop(message_id, None)
|
|
237
|
+
if event:
|
|
238
|
+
event.set()
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def wait_for_pending_call(
|
|
242
|
+
message_id: str, *, timeout: float = 5.0
|
|
243
|
+
) -> dict[str, object] | None:
|
|
244
|
+
"""Wait for a pending call to be resolved and return the stored result."""
|
|
245
|
+
|
|
246
|
+
with _pending_call_lock:
|
|
247
|
+
existing = _pending_call_results.pop(message_id, None)
|
|
248
|
+
if existing is not None:
|
|
249
|
+
return existing
|
|
250
|
+
event = _pending_call_events.get(message_id)
|
|
251
|
+
if not event:
|
|
252
|
+
return None
|
|
253
|
+
if not event.wait(timeout):
|
|
254
|
+
return None
|
|
255
|
+
with _pending_call_lock:
|
|
256
|
+
result = _pending_call_results.pop(message_id, None)
|
|
257
|
+
_pending_call_events.pop(message_id, None)
|
|
258
|
+
return result
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def schedule_call_timeout(
|
|
262
|
+
message_id: str,
|
|
263
|
+
*,
|
|
264
|
+
timeout: float = 5.0,
|
|
265
|
+
action: str | None = None,
|
|
266
|
+
log_key: str | None = None,
|
|
267
|
+
log_type: str = "charger",
|
|
268
|
+
message: str | None = None,
|
|
269
|
+
) -> None:
|
|
270
|
+
"""Schedule a timeout notice if a pending call is not answered."""
|
|
271
|
+
|
|
272
|
+
def _notify() -> None:
|
|
273
|
+
with _pending_call_lock:
|
|
274
|
+
metadata = pending_calls.get(message_id)
|
|
275
|
+
if not metadata:
|
|
276
|
+
return
|
|
277
|
+
if action and metadata.get("action") != action:
|
|
278
|
+
return
|
|
279
|
+
if metadata.get("timeout_notice_sent"):
|
|
280
|
+
return
|
|
281
|
+
target_log = log_key or metadata.get("log_key")
|
|
282
|
+
if not target_log:
|
|
283
|
+
metadata["timeout_notice_sent"] = True
|
|
284
|
+
return
|
|
285
|
+
label = message
|
|
286
|
+
if not label:
|
|
287
|
+
action_label = action or str(metadata.get("action") or "Call")
|
|
288
|
+
label = f"{action_label} request timed out"
|
|
289
|
+
add_log(target_log, label, log_type=log_type)
|
|
290
|
+
metadata["timeout_notice_sent"] = True
|
|
291
|
+
|
|
292
|
+
timer = threading.Timer(timeout, _notify)
|
|
293
|
+
timer.daemon = True
|
|
294
|
+
timer.start()
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def register_triggered_followup(
|
|
298
|
+
serial: str,
|
|
299
|
+
action: str,
|
|
300
|
+
*,
|
|
301
|
+
connector: int | str | None = None,
|
|
302
|
+
log_key: str | None = None,
|
|
303
|
+
target: str | None = None,
|
|
304
|
+
) -> None:
|
|
305
|
+
"""Record that ``serial`` should send ``action`` after a TriggerMessage."""
|
|
306
|
+
|
|
307
|
+
entry = {
|
|
308
|
+
"action": action,
|
|
309
|
+
"connector": connector_slug(connector),
|
|
310
|
+
"log_key": log_key,
|
|
311
|
+
"target": target,
|
|
312
|
+
}
|
|
313
|
+
triggered_followups.setdefault(serial, []).append(entry)
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def consume_triggered_followup(
|
|
317
|
+
serial: str, action: str, connector: int | str | None = None
|
|
318
|
+
) -> dict[str, object] | None:
|
|
319
|
+
"""Return metadata for a previously registered follow-up message."""
|
|
320
|
+
|
|
321
|
+
entries = triggered_followups.get(serial)
|
|
322
|
+
if not entries:
|
|
323
|
+
return None
|
|
324
|
+
connector_slug_value = connector_slug(connector)
|
|
325
|
+
for index, entry in enumerate(entries):
|
|
326
|
+
if entry.get("action") != action:
|
|
327
|
+
continue
|
|
328
|
+
expected_slug = entry.get("connector")
|
|
329
|
+
if expected_slug == AGGREGATE_SLUG:
|
|
330
|
+
matched = True
|
|
331
|
+
else:
|
|
332
|
+
matched = connector_slug_value == expected_slug
|
|
333
|
+
if not matched:
|
|
334
|
+
continue
|
|
335
|
+
result = entries.pop(index)
|
|
336
|
+
if not entries:
|
|
337
|
+
triggered_followups.pop(serial, None)
|
|
338
|
+
return result
|
|
339
|
+
return None
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def clear_pending_calls(serial: str) -> None:
|
|
343
|
+
"""Remove any pending calls associated with the provided charger id."""
|
|
344
|
+
|
|
345
|
+
with _pending_call_lock:
|
|
346
|
+
to_remove = [
|
|
347
|
+
key
|
|
348
|
+
for key, value in pending_calls.items()
|
|
349
|
+
if value.get("charger_id") == serial
|
|
350
|
+
]
|
|
351
|
+
for key in to_remove:
|
|
352
|
+
pending_calls.pop(key, None)
|
|
353
|
+
_pending_call_events.pop(key, None)
|
|
354
|
+
_pending_call_results.pop(key, None)
|
|
355
|
+
triggered_followups.pop(serial, None)
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
def reassign_identity(old_key: str, new_key: str) -> str:
|
|
359
|
+
"""Move any stored data from ``old_key`` to ``new_key``."""
|
|
360
|
+
|
|
361
|
+
if old_key == new_key:
|
|
362
|
+
return new_key
|
|
363
|
+
if not old_key:
|
|
364
|
+
return new_key
|
|
365
|
+
for mapping in (connections, transactions, history):
|
|
366
|
+
if old_key in mapping:
|
|
367
|
+
mapping[new_key] = mapping.pop(old_key)
|
|
368
|
+
for log_type in logs:
|
|
369
|
+
store = logs[log_type]
|
|
370
|
+
if old_key in store:
|
|
371
|
+
store[new_key] = store.pop(old_key)
|
|
372
|
+
for log_type in log_names:
|
|
373
|
+
names = log_names[log_type]
|
|
374
|
+
if old_key in names:
|
|
375
|
+
names[new_key] = names.pop(old_key)
|
|
376
|
+
return new_key
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
async def _touch_lock() -> None:
|
|
380
|
+
try:
|
|
381
|
+
while True:
|
|
382
|
+
SESSION_LOCK.touch()
|
|
383
|
+
await asyncio.sleep(60)
|
|
384
|
+
except asyncio.CancelledError:
|
|
385
|
+
pass
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
def start_session_lock() -> None:
|
|
389
|
+
global _lock_task
|
|
390
|
+
SESSION_LOCK.touch()
|
|
391
|
+
loop = asyncio.get_event_loop()
|
|
392
|
+
if _lock_task is None or _lock_task.done():
|
|
393
|
+
_lock_task = loop.create_task(_touch_lock())
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
def stop_session_lock() -> None:
|
|
397
|
+
global _lock_task
|
|
398
|
+
if _lock_task:
|
|
399
|
+
_lock_task.cancel()
|
|
400
|
+
_lock_task = None
|
|
401
|
+
if SESSION_LOCK.exists():
|
|
402
|
+
SESSION_LOCK.unlink()
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
def register_log_name(cid: str, name: str, log_type: str = "charger") -> None:
|
|
406
|
+
"""Register a friendly name for the id used in log files."""
|
|
407
|
+
|
|
408
|
+
names = log_names[log_type]
|
|
409
|
+
# Ensure lookups are case-insensitive by overwriting any existing entry
|
|
410
|
+
# that matches the provided cid regardless of case.
|
|
411
|
+
for key in list(names.keys()):
|
|
412
|
+
if key.lower() == cid.lower():
|
|
413
|
+
cid = key
|
|
414
|
+
break
|
|
415
|
+
names[cid] = name
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
def _safe_name(name: str) -> str:
|
|
419
|
+
return re.sub(r"[^\w.-]", "_", name)
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
def _file_path(cid: str, log_type: str = "charger") -> Path:
|
|
423
|
+
name = log_names[log_type].get(cid, cid)
|
|
424
|
+
return LOG_DIR / f"{log_type}.{_safe_name(name)}.log"
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def add_log(cid: str, entry: str, log_type: str = "charger") -> None:
|
|
428
|
+
"""Append a timestamped log entry for the given id and log type."""
|
|
429
|
+
|
|
430
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
|
|
431
|
+
entry = f"{timestamp} {entry}"
|
|
432
|
+
|
|
433
|
+
store = logs[log_type]
|
|
434
|
+
# Store log entries under the cid as provided but allow retrieval using
|
|
435
|
+
# any casing by recording entries in a case-insensitive manner.
|
|
436
|
+
key = next((k for k in store.keys() if k.lower() == cid.lower()), cid)
|
|
437
|
+
store.setdefault(key, []).append(entry)
|
|
438
|
+
path = _file_path(key, log_type)
|
|
439
|
+
with path.open("a", encoding="utf-8") as handle:
|
|
440
|
+
handle.write(entry + "\n")
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def _session_folder(cid: str) -> Path:
|
|
444
|
+
"""Return the folder path for session logs for the given charger."""
|
|
445
|
+
|
|
446
|
+
name = log_names["charger"].get(cid, cid)
|
|
447
|
+
folder = SESSION_DIR / _safe_name(name)
|
|
448
|
+
folder.mkdir(parents=True, exist_ok=True)
|
|
449
|
+
return folder
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
def start_session_log(cid: str, tx_id: int) -> None:
|
|
453
|
+
"""Begin logging a session for the given charger and transaction id."""
|
|
454
|
+
|
|
455
|
+
history[cid] = {
|
|
456
|
+
"transaction": tx_id,
|
|
457
|
+
"start": datetime.now(timezone.utc),
|
|
458
|
+
"messages": [],
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def add_session_message(cid: str, message: str) -> None:
|
|
463
|
+
"""Record a raw message for the current session if one is active."""
|
|
464
|
+
|
|
465
|
+
sess = history.get(cid)
|
|
466
|
+
if not sess:
|
|
467
|
+
return
|
|
468
|
+
sess["messages"].append(
|
|
469
|
+
{
|
|
470
|
+
"timestamp": datetime.now(timezone.utc)
|
|
471
|
+
.isoformat()
|
|
472
|
+
.replace("+00:00", "Z"),
|
|
473
|
+
"message": message,
|
|
474
|
+
}
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
def end_session_log(cid: str) -> None:
|
|
479
|
+
"""Write any recorded session log to disk for the given charger."""
|
|
480
|
+
|
|
481
|
+
sess = history.pop(cid, None)
|
|
482
|
+
if not sess:
|
|
483
|
+
return
|
|
484
|
+
folder = _session_folder(cid)
|
|
485
|
+
date = sess["start"].strftime("%Y%m%d")
|
|
486
|
+
tx_id = sess.get("transaction")
|
|
487
|
+
filename = f"{date}_{tx_id}.json"
|
|
488
|
+
path = folder / filename
|
|
489
|
+
with path.open("w", encoding="utf-8") as handle:
|
|
490
|
+
json.dump(sess["messages"], handle, ensure_ascii=False, indent=2)
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
def _log_key_candidates(cid: str, log_type: str) -> list[str]:
|
|
494
|
+
"""Return log identifiers to inspect for the requested cid."""
|
|
495
|
+
|
|
496
|
+
if IDENTITY_SEPARATOR not in cid:
|
|
497
|
+
return [cid]
|
|
498
|
+
serial, slug = cid.split(IDENTITY_SEPARATOR, 1)
|
|
499
|
+
slug = slug or AGGREGATE_SLUG
|
|
500
|
+
if slug != AGGREGATE_SLUG:
|
|
501
|
+
return [cid]
|
|
502
|
+
keys: list[str] = [identity_key(serial, None)]
|
|
503
|
+
prefix = f"{serial}{IDENTITY_SEPARATOR}"
|
|
504
|
+
for source in (log_names[log_type], logs[log_type]):
|
|
505
|
+
for key in source.keys():
|
|
506
|
+
if key.startswith(prefix) and key not in keys:
|
|
507
|
+
keys.append(key)
|
|
508
|
+
return keys
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def _resolve_log_identifier(cid: str, log_type: str) -> tuple[str, str | None]:
|
|
512
|
+
"""Return the canonical key and friendly name for ``cid``."""
|
|
513
|
+
|
|
514
|
+
names = log_names[log_type]
|
|
515
|
+
name = names.get(cid)
|
|
516
|
+
if name is None:
|
|
517
|
+
lower = cid.lower()
|
|
518
|
+
for key, value in names.items():
|
|
519
|
+
if key.lower() == lower:
|
|
520
|
+
cid = key
|
|
521
|
+
name = value
|
|
522
|
+
break
|
|
523
|
+
else:
|
|
524
|
+
try:
|
|
525
|
+
if log_type == "simulator":
|
|
526
|
+
from .models import Simulator
|
|
527
|
+
|
|
528
|
+
sim = Simulator.objects.filter(cp_path__iexact=cid).first()
|
|
529
|
+
if sim:
|
|
530
|
+
cid = sim.cp_path
|
|
531
|
+
name = sim.name
|
|
532
|
+
names[cid] = name
|
|
533
|
+
else:
|
|
534
|
+
from .models import Charger
|
|
535
|
+
|
|
536
|
+
serial = cid.split(IDENTITY_SEPARATOR, 1)[0]
|
|
537
|
+
ch = Charger.objects.filter(charger_id__iexact=serial).first()
|
|
538
|
+
if ch and ch.name:
|
|
539
|
+
name = ch.name
|
|
540
|
+
names[cid] = name
|
|
541
|
+
except Exception: # pragma: no cover - best effort lookup
|
|
542
|
+
pass
|
|
543
|
+
return cid, name
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
def _log_file_for_identifier(cid: str, name: str | None, log_type: str) -> Path:
|
|
547
|
+
path = _file_path(cid, log_type)
|
|
548
|
+
if not path.exists():
|
|
549
|
+
target = f"{log_type}.{_safe_name(name or cid).lower()}"
|
|
550
|
+
for file in LOG_DIR.glob(f"{log_type}.*.log"):
|
|
551
|
+
if file.stem.lower() == target:
|
|
552
|
+
path = file
|
|
553
|
+
break
|
|
554
|
+
return path
|
|
555
|
+
|
|
556
|
+
|
|
557
|
+
def _memory_logs_for_identifier(cid: str, log_type: str) -> list[str]:
|
|
558
|
+
store = logs[log_type]
|
|
559
|
+
lower = cid.lower()
|
|
560
|
+
for key, entries in store.items():
|
|
561
|
+
if key.lower() == lower:
|
|
562
|
+
return entries
|
|
563
|
+
return []
|
|
564
|
+
|
|
565
|
+
|
|
566
|
+
def get_logs(cid: str, log_type: str = "charger") -> list[str]:
|
|
567
|
+
"""Return all log entries for the given id and type."""
|
|
568
|
+
|
|
569
|
+
entries: list[str] = []
|
|
570
|
+
seen_paths: set[Path] = set()
|
|
571
|
+
seen_keys: set[str] = set()
|
|
572
|
+
for key in _log_key_candidates(cid, log_type):
|
|
573
|
+
resolved, name = _resolve_log_identifier(key, log_type)
|
|
574
|
+
path = _log_file_for_identifier(resolved, name, log_type)
|
|
575
|
+
if path.exists() and path not in seen_paths:
|
|
576
|
+
entries.extend(path.read_text(encoding="utf-8").splitlines())
|
|
577
|
+
seen_paths.add(path)
|
|
578
|
+
memory_entries = _memory_logs_for_identifier(resolved, log_type)
|
|
579
|
+
lower_key = resolved.lower()
|
|
580
|
+
if memory_entries and lower_key not in seen_keys:
|
|
581
|
+
entries.extend(memory_entries)
|
|
582
|
+
seen_keys.add(lower_key)
|
|
583
|
+
return entries
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
def clear_log(cid: str, log_type: str = "charger") -> None:
|
|
587
|
+
"""Remove any stored logs for the given id and type."""
|
|
588
|
+
for key in _log_key_candidates(cid, log_type):
|
|
589
|
+
store_map = logs[log_type]
|
|
590
|
+
resolved = next(
|
|
591
|
+
(k for k in list(store_map.keys()) if k.lower() == key.lower()),
|
|
592
|
+
key,
|
|
593
|
+
)
|
|
594
|
+
store_map.pop(resolved, None)
|
|
595
|
+
path = _file_path(resolved, log_type)
|
|
596
|
+
if not path.exists():
|
|
597
|
+
target = f"{log_type}.{_safe_name(log_names[log_type].get(resolved, resolved)).lower()}"
|
|
598
|
+
for file in LOG_DIR.glob(f"{log_type}.*.log"):
|
|
599
|
+
if file.stem.lower() == target:
|
|
600
|
+
path = file
|
|
601
|
+
break
|
|
602
|
+
if path.exists():
|
|
603
|
+
path.unlink()
|