arthexis 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. {arthexis-0.1.13.dist-info → arthexis-0.1.15.dist-info}/METADATA +224 -221
  2. arthexis-0.1.15.dist-info/RECORD +110 -0
  3. {arthexis-0.1.13.dist-info → arthexis-0.1.15.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +43 -43
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -32
  9. config/context_processors.py +67 -69
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +25 -25
  14. config/offline.py +49 -49
  15. config/settings.py +691 -682
  16. config/settings_helpers.py +109 -109
  17. config/urls.py +171 -166
  18. config/wsgi.py +17 -17
  19. core/admin.py +3795 -2809
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +151 -151
  22. core/apps.py +356 -272
  23. core/auto_upgrade.py +57 -57
  24. core/backends.py +265 -236
  25. core/changelog.py +342 -0
  26. core/entity.py +149 -133
  27. core/environment.py +61 -61
  28. core/fields.py +168 -168
  29. core/form_fields.py +75 -75
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +178 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +114 -100
  36. core/mailer.py +85 -85
  37. core/middleware.py +91 -91
  38. core/models.py +3637 -2795
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +108 -108
  42. core/release.py +840 -368
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -149
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +315 -315
  47. core/system.py +952 -493
  48. core/tasks.py +408 -394
  49. core/temp_passwords.py +181 -181
  50. core/test_system_info.py +186 -139
  51. core/tests.py +2168 -1521
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +641 -633
  55. core/views.py +2201 -1417
  56. core/widgets.py +213 -94
  57. core/workgroup_urls.py +17 -17
  58. core/workgroup_views.py +94 -94
  59. nodes/admin.py +1720 -1161
  60. nodes/apps.py +87 -85
  61. nodes/backends.py +160 -160
  62. nodes/dns.py +203 -203
  63. nodes/feature_checks.py +133 -133
  64. nodes/lcd.py +165 -165
  65. nodes/models.py +1764 -1597
  66. nodes/reports.py +411 -411
  67. nodes/rfid_sync.py +195 -0
  68. nodes/signals.py +18 -0
  69. nodes/tasks.py +46 -46
  70. nodes/tests.py +3830 -3116
  71. nodes/urls.py +15 -14
  72. nodes/utils.py +121 -105
  73. nodes/views.py +683 -619
  74. ocpp/admin.py +948 -948
  75. ocpp/apps.py +25 -25
  76. ocpp/consumers.py +1565 -1459
  77. ocpp/evcs.py +844 -844
  78. ocpp/evcs_discovery.py +158 -158
  79. ocpp/models.py +917 -917
  80. ocpp/reference_utils.py +42 -42
  81. ocpp/routing.py +11 -11
  82. ocpp/simulator.py +745 -745
  83. ocpp/status_display.py +26 -26
  84. ocpp/store.py +601 -541
  85. ocpp/tasks.py +31 -31
  86. ocpp/test_export_import.py +130 -130
  87. ocpp/test_rfid.py +913 -702
  88. ocpp/tests.py +4445 -4094
  89. ocpp/transactions_io.py +189 -189
  90. ocpp/urls.py +50 -50
  91. ocpp/views.py +1479 -1251
  92. pages/admin.py +769 -539
  93. pages/apps.py +10 -10
  94. pages/checks.py +40 -40
  95. pages/context_processors.py +127 -119
  96. pages/defaults.py +13 -13
  97. pages/forms.py +198 -198
  98. pages/middleware.py +209 -153
  99. pages/models.py +643 -426
  100. pages/tasks.py +74 -0
  101. pages/tests.py +3025 -2200
  102. pages/urls.py +26 -25
  103. pages/utils.py +23 -12
  104. pages/views.py +1176 -1128
  105. arthexis-0.1.13.dist-info/RECORD +0 -105
  106. nodes/actions.py +0 -70
  107. {arthexis-0.1.13.dist-info → arthexis-0.1.15.dist-info}/WHEEL +0 -0
  108. {arthexis-0.1.13.dist-info → arthexis-0.1.15.dist-info}/top_level.txt +0 -0
ocpp/store.py CHANGED
@@ -1,541 +1,601 @@
1
- """In-memory store for OCPP data with file backed logs."""
2
-
3
- from __future__ import annotations
4
-
5
- import asyncio
6
- from datetime import datetime
7
- import json
8
- from pathlib import Path
9
- import re
10
- import threading
11
-
12
- from core.log_paths import select_log_dir
13
-
14
- IDENTITY_SEPARATOR = "#"
15
- AGGREGATE_SLUG = "all"
16
- PENDING_SLUG = "pending"
17
-
18
- MAX_CONNECTIONS_PER_IP = 2
19
-
20
- connections: dict[str, object] = {}
21
- transactions: dict[str, object] = {}
22
- logs: dict[str, dict[str, list[str]]] = {"charger": {}, "simulator": {}}
23
- # store per charger session logs before they are flushed to disk
24
- history: dict[str, dict[str, object]] = {}
25
- simulators = {}
26
- ip_connections: dict[str, set[object]] = {}
27
- pending_calls: dict[str, dict[str, object]] = {}
28
- triggered_followups: dict[str, list[dict[str, object]]] = {}
29
-
30
- # mapping of charger id / cp_path to friendly names used for log files
31
- log_names: dict[str, dict[str, str]] = {"charger": {}, "simulator": {}}
32
-
33
- BASE_DIR = Path(__file__).resolve().parent.parent
34
- LOG_DIR = select_log_dir(BASE_DIR)
35
- SESSION_DIR = LOG_DIR / "sessions"
36
- SESSION_DIR.mkdir(exist_ok=True)
37
- LOCK_DIR = BASE_DIR / "locks"
38
- LOCK_DIR.mkdir(exist_ok=True)
39
- SESSION_LOCK = LOCK_DIR / "charging.lck"
40
- _lock_task: asyncio.Task | None = None
41
-
42
-
43
- def connector_slug(value: int | str | None) -> str:
44
- """Return the canonical slug for a connector value."""
45
-
46
- if value in (None, "", AGGREGATE_SLUG):
47
- return AGGREGATE_SLUG
48
- try:
49
- return str(int(value))
50
- except (TypeError, ValueError):
51
- return str(value)
52
-
53
-
54
- def identity_key(serial: str, connector: int | str | None) -> str:
55
- """Return the identity key used for in-memory store lookups."""
56
-
57
- return f"{serial}{IDENTITY_SEPARATOR}{connector_slug(connector)}"
58
-
59
-
60
- def register_ip_connection(ip: str | None, consumer: object) -> bool:
61
- """Track a websocket connection for the provided client IP."""
62
-
63
- if not ip:
64
- return True
65
- conns = ip_connections.setdefault(ip, set())
66
- if consumer in conns:
67
- return True
68
- if len(conns) >= MAX_CONNECTIONS_PER_IP:
69
- return False
70
- conns.add(consumer)
71
- return True
72
-
73
-
74
- def release_ip_connection(ip: str | None, consumer: object) -> None:
75
- """Remove a websocket connection from the active client registry."""
76
-
77
- if not ip:
78
- return
79
- conns = ip_connections.get(ip)
80
- if not conns:
81
- return
82
- conns.discard(consumer)
83
- if not conns:
84
- ip_connections.pop(ip, None)
85
-
86
-
87
- def pending_key(serial: str) -> str:
88
- """Return the key used before a connector id has been negotiated."""
89
-
90
- return f"{serial}{IDENTITY_SEPARATOR}{PENDING_SLUG}"
91
-
92
-
93
- def _candidate_keys(serial: str, connector: int | str | None) -> list[str]:
94
- """Return possible keys for lookups with fallbacks."""
95
-
96
- keys: list[str] = []
97
- if connector not in (None, "", AGGREGATE_SLUG):
98
- keys.append(identity_key(serial, connector))
99
- else:
100
- keys.append(identity_key(serial, None))
101
- prefix = f"{serial}{IDENTITY_SEPARATOR}"
102
- for key in connections.keys():
103
- if key.startswith(prefix) and key not in keys:
104
- keys.append(key)
105
- keys.append(pending_key(serial))
106
- keys.append(serial)
107
- seen: set[str] = set()
108
- result: list[str] = []
109
- for key in keys:
110
- if key and key not in seen:
111
- seen.add(key)
112
- result.append(key)
113
- return result
114
-
115
-
116
- def iter_identity_keys(serial: str) -> list[str]:
117
- """Return all known keys for the provided serial."""
118
-
119
- prefix = f"{serial}{IDENTITY_SEPARATOR}"
120
- keys = [key for key in connections.keys() if key.startswith(prefix)]
121
- if serial in connections:
122
- keys.append(serial)
123
- return keys
124
-
125
-
126
- def is_connected(serial: str, connector: int | str | None = None) -> bool:
127
- """Return whether a connection exists for the provided charger identity."""
128
-
129
- if connector in (None, "", AGGREGATE_SLUG):
130
- prefix = f"{serial}{IDENTITY_SEPARATOR}"
131
- return (
132
- any(key.startswith(prefix) for key in connections) or serial in connections
133
- )
134
- return any(key in connections for key in _candidate_keys(serial, connector))
135
-
136
-
137
- def get_connection(serial: str, connector: int | str | None = None):
138
- """Return the websocket consumer for the requested identity, if any."""
139
-
140
- for key in _candidate_keys(serial, connector):
141
- conn = connections.get(key)
142
- if conn is not None:
143
- return conn
144
- return None
145
-
146
-
147
- def set_connection(serial: str, connector: int | str | None, consumer) -> str:
148
- """Store a websocket consumer under the negotiated identity."""
149
-
150
- key = identity_key(serial, connector)
151
- connections[key] = consumer
152
- return key
153
-
154
-
155
- def pop_connection(serial: str, connector: int | str | None = None):
156
- """Remove a stored connection for the given identity."""
157
-
158
- for key in _candidate_keys(serial, connector):
159
- conn = connections.pop(key, None)
160
- if conn is not None:
161
- return conn
162
- return None
163
-
164
-
165
- def get_transaction(serial: str, connector: int | str | None = None):
166
- """Return the active transaction for the provided identity."""
167
-
168
- for key in _candidate_keys(serial, connector):
169
- tx = transactions.get(key)
170
- if tx is not None:
171
- return tx
172
- return None
173
-
174
-
175
- def set_transaction(serial: str, connector: int | str | None, tx) -> str:
176
- """Store an active transaction under the provided identity."""
177
-
178
- key = identity_key(serial, connector)
179
- transactions[key] = tx
180
- return key
181
-
182
-
183
- def pop_transaction(serial: str, connector: int | str | None = None):
184
- """Remove and return an active transaction for the identity."""
185
-
186
- for key in _candidate_keys(serial, connector):
187
- tx = transactions.pop(key, None)
188
- if tx is not None:
189
- return tx
190
- return None
191
-
192
-
193
- def register_pending_call(message_id: str, metadata: dict[str, object]) -> None:
194
- """Store metadata about an outstanding CSMS call."""
195
-
196
- pending_calls[message_id] = dict(metadata)
197
-
198
-
199
- def pop_pending_call(message_id: str) -> dict[str, object] | None:
200
- """Return and remove metadata for a previously registered call."""
201
-
202
- return pending_calls.pop(message_id, None)
203
-
204
-
205
- def schedule_call_timeout(
206
- message_id: str,
207
- *,
208
- timeout: float = 5.0,
209
- action: str | None = None,
210
- log_key: str | None = None,
211
- log_type: str = "charger",
212
- message: str | None = None,
213
- ) -> None:
214
- """Schedule a timeout notice if a pending call is not answered."""
215
-
216
- def _notify() -> None:
217
- metadata = pending_calls.get(message_id)
218
- if not metadata:
219
- return
220
- if action and metadata.get("action") != action:
221
- return
222
- if metadata.get("timeout_notice_sent"):
223
- return
224
- target_log = log_key or metadata.get("log_key")
225
- if not target_log:
226
- metadata["timeout_notice_sent"] = True
227
- return
228
- label = message
229
- if not label:
230
- action_label = action or str(metadata.get("action") or "Call")
231
- label = f"{action_label} request timed out"
232
- add_log(target_log, label, log_type=log_type)
233
- metadata["timeout_notice_sent"] = True
234
-
235
- timer = threading.Timer(timeout, _notify)
236
- timer.daemon = True
237
- timer.start()
238
-
239
-
240
- def register_triggered_followup(
241
- serial: str,
242
- action: str,
243
- *,
244
- connector: int | str | None = None,
245
- log_key: str | None = None,
246
- target: str | None = None,
247
- ) -> None:
248
- """Record that ``serial`` should send ``action`` after a TriggerMessage."""
249
-
250
- entry = {
251
- "action": action,
252
- "connector": connector_slug(connector),
253
- "log_key": log_key,
254
- "target": target,
255
- }
256
- triggered_followups.setdefault(serial, []).append(entry)
257
-
258
-
259
- def consume_triggered_followup(
260
- serial: str, action: str, connector: int | str | None = None
261
- ) -> dict[str, object] | None:
262
- """Return metadata for a previously registered follow-up message."""
263
-
264
- entries = triggered_followups.get(serial)
265
- if not entries:
266
- return None
267
- connector_slug_value = connector_slug(connector)
268
- for index, entry in enumerate(entries):
269
- if entry.get("action") != action:
270
- continue
271
- expected_slug = entry.get("connector")
272
- if expected_slug == AGGREGATE_SLUG:
273
- matched = True
274
- else:
275
- matched = connector_slug_value == expected_slug
276
- if not matched:
277
- continue
278
- result = entries.pop(index)
279
- if not entries:
280
- triggered_followups.pop(serial, None)
281
- return result
282
- return None
283
-
284
-
285
- def clear_pending_calls(serial: str) -> None:
286
- """Remove any pending calls associated with the provided charger id."""
287
-
288
- to_remove = [
289
- key
290
- for key, value in pending_calls.items()
291
- if value.get("charger_id") == serial
292
- ]
293
- for key in to_remove:
294
- pending_calls.pop(key, None)
295
- triggered_followups.pop(serial, None)
296
-
297
-
298
- def reassign_identity(old_key: str, new_key: str) -> str:
299
- """Move any stored data from ``old_key`` to ``new_key``."""
300
-
301
- if old_key == new_key:
302
- return new_key
303
- if not old_key:
304
- return new_key
305
- for mapping in (connections, transactions, history):
306
- if old_key in mapping:
307
- mapping[new_key] = mapping.pop(old_key)
308
- for log_type in logs:
309
- store = logs[log_type]
310
- if old_key in store:
311
- store[new_key] = store.pop(old_key)
312
- for log_type in log_names:
313
- names = log_names[log_type]
314
- if old_key in names:
315
- names[new_key] = names.pop(old_key)
316
- return new_key
317
-
318
-
319
- async def _touch_lock() -> None:
320
- try:
321
- while True:
322
- SESSION_LOCK.touch()
323
- await asyncio.sleep(60)
324
- except asyncio.CancelledError:
325
- pass
326
-
327
-
328
- def start_session_lock() -> None:
329
- global _lock_task
330
- SESSION_LOCK.touch()
331
- loop = asyncio.get_event_loop()
332
- if _lock_task is None or _lock_task.done():
333
- _lock_task = loop.create_task(_touch_lock())
334
-
335
-
336
- def stop_session_lock() -> None:
337
- global _lock_task
338
- if _lock_task:
339
- _lock_task.cancel()
340
- _lock_task = None
341
- if SESSION_LOCK.exists():
342
- SESSION_LOCK.unlink()
343
-
344
-
345
- def register_log_name(cid: str, name: str, log_type: str = "charger") -> None:
346
- """Register a friendly name for the id used in log files."""
347
-
348
- names = log_names[log_type]
349
- # Ensure lookups are case-insensitive by overwriting any existing entry
350
- # that matches the provided cid regardless of case.
351
- for key in list(names.keys()):
352
- if key.lower() == cid.lower():
353
- cid = key
354
- break
355
- names[cid] = name
356
-
357
-
358
- def _safe_name(name: str) -> str:
359
- return re.sub(r"[^\w.-]", "_", name)
360
-
361
-
362
- def _file_path(cid: str, log_type: str = "charger") -> Path:
363
- name = log_names[log_type].get(cid, cid)
364
- return LOG_DIR / f"{log_type}.{_safe_name(name)}.log"
365
-
366
-
367
- def add_log(cid: str, entry: str, log_type: str = "charger") -> None:
368
- """Append a timestamped log entry for the given id and log type."""
369
-
370
- timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
371
- entry = f"{timestamp} {entry}"
372
-
373
- store = logs[log_type]
374
- # Store log entries under the cid as provided but allow retrieval using
375
- # any casing by recording entries in a case-insensitive manner.
376
- key = next((k for k in store.keys() if k.lower() == cid.lower()), cid)
377
- store.setdefault(key, []).append(entry)
378
- path = _file_path(key, log_type)
379
- with path.open("a", encoding="utf-8") as handle:
380
- handle.write(entry + "\n")
381
-
382
-
383
- def _session_folder(cid: str) -> Path:
384
- """Return the folder path for session logs for the given charger."""
385
-
386
- name = log_names["charger"].get(cid, cid)
387
- folder = SESSION_DIR / _safe_name(name)
388
- folder.mkdir(parents=True, exist_ok=True)
389
- return folder
390
-
391
-
392
- def start_session_log(cid: str, tx_id: int) -> None:
393
- """Begin logging a session for the given charger and transaction id."""
394
-
395
- history[cid] = {
396
- "transaction": tx_id,
397
- "start": datetime.utcnow(),
398
- "messages": [],
399
- }
400
-
401
-
402
- def add_session_message(cid: str, message: str) -> None:
403
- """Record a raw message for the current session if one is active."""
404
-
405
- sess = history.get(cid)
406
- if not sess:
407
- return
408
- sess["messages"].append(
409
- {
410
- "timestamp": datetime.utcnow().isoformat() + "Z",
411
- "message": message,
412
- }
413
- )
414
-
415
-
416
- def end_session_log(cid: str) -> None:
417
- """Write any recorded session log to disk for the given charger."""
418
-
419
- sess = history.pop(cid, None)
420
- if not sess:
421
- return
422
- folder = _session_folder(cid)
423
- date = sess["start"].strftime("%Y%m%d")
424
- tx_id = sess.get("transaction")
425
- filename = f"{date}_{tx_id}.json"
426
- path = folder / filename
427
- with path.open("w", encoding="utf-8") as handle:
428
- json.dump(sess["messages"], handle, ensure_ascii=False, indent=2)
429
-
430
-
431
- def _log_key_candidates(cid: str, log_type: str) -> list[str]:
432
- """Return log identifiers to inspect for the requested cid."""
433
-
434
- if IDENTITY_SEPARATOR not in cid:
435
- return [cid]
436
- serial, slug = cid.split(IDENTITY_SEPARATOR, 1)
437
- slug = slug or AGGREGATE_SLUG
438
- if slug != AGGREGATE_SLUG:
439
- return [cid]
440
- keys: list[str] = [identity_key(serial, None)]
441
- prefix = f"{serial}{IDENTITY_SEPARATOR}"
442
- for source in (log_names[log_type], logs[log_type]):
443
- for key in source.keys():
444
- if key.startswith(prefix) and key not in keys:
445
- keys.append(key)
446
- return keys
447
-
448
-
449
- def _resolve_log_identifier(cid: str, log_type: str) -> tuple[str, str | None]:
450
- """Return the canonical key and friendly name for ``cid``."""
451
-
452
- names = log_names[log_type]
453
- name = names.get(cid)
454
- if name is None:
455
- lower = cid.lower()
456
- for key, value in names.items():
457
- if key.lower() == lower:
458
- cid = key
459
- name = value
460
- break
461
- else:
462
- try:
463
- if log_type == "simulator":
464
- from .models import Simulator
465
-
466
- sim = Simulator.objects.filter(cp_path__iexact=cid).first()
467
- if sim:
468
- cid = sim.cp_path
469
- name = sim.name
470
- names[cid] = name
471
- else:
472
- from .models import Charger
473
-
474
- serial = cid.split(IDENTITY_SEPARATOR, 1)[0]
475
- ch = Charger.objects.filter(charger_id__iexact=serial).first()
476
- if ch and ch.name:
477
- name = ch.name
478
- names[cid] = name
479
- except Exception: # pragma: no cover - best effort lookup
480
- pass
481
- return cid, name
482
-
483
-
484
- def _log_file_for_identifier(cid: str, name: str | None, log_type: str) -> Path:
485
- path = _file_path(cid, log_type)
486
- if not path.exists():
487
- target = f"{log_type}.{_safe_name(name or cid).lower()}"
488
- for file in LOG_DIR.glob(f"{log_type}.*.log"):
489
- if file.stem.lower() == target:
490
- path = file
491
- break
492
- return path
493
-
494
-
495
- def _memory_logs_for_identifier(cid: str, log_type: str) -> list[str]:
496
- store = logs[log_type]
497
- lower = cid.lower()
498
- for key, entries in store.items():
499
- if key.lower() == lower:
500
- return entries
501
- return []
502
-
503
-
504
- def get_logs(cid: str, log_type: str = "charger") -> list[str]:
505
- """Return all log entries for the given id and type."""
506
-
507
- entries: list[str] = []
508
- seen_paths: set[Path] = set()
509
- seen_keys: set[str] = set()
510
- for key in _log_key_candidates(cid, log_type):
511
- resolved, name = _resolve_log_identifier(key, log_type)
512
- path = _log_file_for_identifier(resolved, name, log_type)
513
- if path.exists() and path not in seen_paths:
514
- entries.extend(path.read_text(encoding="utf-8").splitlines())
515
- seen_paths.add(path)
516
- memory_entries = _memory_logs_for_identifier(resolved, log_type)
517
- lower_key = resolved.lower()
518
- if memory_entries and lower_key not in seen_keys:
519
- entries.extend(memory_entries)
520
- seen_keys.add(lower_key)
521
- return entries
522
-
523
-
524
- def clear_log(cid: str, log_type: str = "charger") -> None:
525
- """Remove any stored logs for the given id and type."""
526
- for key in _log_key_candidates(cid, log_type):
527
- store_map = logs[log_type]
528
- resolved = next(
529
- (k for k in list(store_map.keys()) if k.lower() == key.lower()),
530
- key,
531
- )
532
- store_map.pop(resolved, None)
533
- path = _file_path(resolved, log_type)
534
- if not path.exists():
535
- target = f"{log_type}.{_safe_name(log_names[log_type].get(resolved, resolved)).lower()}"
536
- for file in LOG_DIR.glob(f"{log_type}.*.log"):
537
- if file.stem.lower() == target:
538
- path = file
539
- break
540
- if path.exists():
541
- path.unlink()
1
+ """In-memory store for OCPP data with file backed logs."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ from datetime import datetime
7
+ import json
8
+ from pathlib import Path
9
+ import re
10
+ import threading
11
+
12
+ from core.log_paths import select_log_dir
13
+
14
+ IDENTITY_SEPARATOR = "#"
15
+ AGGREGATE_SLUG = "all"
16
+ PENDING_SLUG = "pending"
17
+
18
+ MAX_CONNECTIONS_PER_IP = 2
19
+
20
+ connections: dict[str, object] = {}
21
+ transactions: dict[str, object] = {}
22
+ logs: dict[str, dict[str, list[str]]] = {"charger": {}, "simulator": {}}
23
+ # store per charger session logs before they are flushed to disk
24
+ history: dict[str, dict[str, object]] = {}
25
+ simulators = {}
26
+ ip_connections: dict[str, set[object]] = {}
27
+ pending_calls: dict[str, dict[str, object]] = {}
28
+ _pending_call_events: dict[str, threading.Event] = {}
29
+ _pending_call_results: dict[str, dict[str, object]] = {}
30
+ _pending_call_lock = threading.Lock()
31
+ triggered_followups: dict[str, list[dict[str, object]]] = {}
32
+
33
+ # mapping of charger id / cp_path to friendly names used for log files
34
+ log_names: dict[str, dict[str, str]] = {"charger": {}, "simulator": {}}
35
+
36
+ BASE_DIR = Path(__file__).resolve().parent.parent
37
+ LOG_DIR = select_log_dir(BASE_DIR)
38
+ SESSION_DIR = LOG_DIR / "sessions"
39
+ SESSION_DIR.mkdir(exist_ok=True)
40
+ LOCK_DIR = BASE_DIR / "locks"
41
+ LOCK_DIR.mkdir(exist_ok=True)
42
+ SESSION_LOCK = LOCK_DIR / "charging.lck"
43
+ _lock_task: asyncio.Task | None = None
44
+
45
+
46
+ def connector_slug(value: int | str | None) -> str:
47
+ """Return the canonical slug for a connector value."""
48
+
49
+ if value in (None, "", AGGREGATE_SLUG):
50
+ return AGGREGATE_SLUG
51
+ try:
52
+ return str(int(value))
53
+ except (TypeError, ValueError):
54
+ return str(value)
55
+
56
+
57
+ def identity_key(serial: str, connector: int | str | None) -> str:
58
+ """Return the identity key used for in-memory store lookups."""
59
+
60
+ return f"{serial}{IDENTITY_SEPARATOR}{connector_slug(connector)}"
61
+
62
+
63
+ def register_ip_connection(ip: str | None, consumer: object) -> bool:
64
+ """Track a websocket connection for the provided client IP."""
65
+
66
+ if not ip:
67
+ return True
68
+ conns = ip_connections.setdefault(ip, set())
69
+ if consumer in conns:
70
+ return True
71
+ if len(conns) >= MAX_CONNECTIONS_PER_IP:
72
+ return False
73
+ conns.add(consumer)
74
+ return True
75
+
76
+
77
+ def release_ip_connection(ip: str | None, consumer: object) -> None:
78
+ """Remove a websocket connection from the active client registry."""
79
+
80
+ if not ip:
81
+ return
82
+ conns = ip_connections.get(ip)
83
+ if not conns:
84
+ return
85
+ conns.discard(consumer)
86
+ if not conns:
87
+ ip_connections.pop(ip, None)
88
+
89
+
90
+ def pending_key(serial: str) -> str:
91
+ """Return the key used before a connector id has been negotiated."""
92
+
93
+ return f"{serial}{IDENTITY_SEPARATOR}{PENDING_SLUG}"
94
+
95
+
96
+ def _candidate_keys(serial: str, connector: int | str | None) -> list[str]:
97
+ """Return possible keys for lookups with fallbacks."""
98
+
99
+ keys: list[str] = []
100
+ if connector not in (None, "", AGGREGATE_SLUG):
101
+ keys.append(identity_key(serial, connector))
102
+ else:
103
+ keys.append(identity_key(serial, None))
104
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
105
+ for key in connections.keys():
106
+ if key.startswith(prefix) and key not in keys:
107
+ keys.append(key)
108
+ keys.append(pending_key(serial))
109
+ keys.append(serial)
110
+ seen: set[str] = set()
111
+ result: list[str] = []
112
+ for key in keys:
113
+ if key and key not in seen:
114
+ seen.add(key)
115
+ result.append(key)
116
+ return result
117
+
118
+
119
+ def iter_identity_keys(serial: str) -> list[str]:
120
+ """Return all known keys for the provided serial."""
121
+
122
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
123
+ keys = [key for key in connections.keys() if key.startswith(prefix)]
124
+ if serial in connections:
125
+ keys.append(serial)
126
+ return keys
127
+
128
+
129
+ def is_connected(serial: str, connector: int | str | None = None) -> bool:
130
+ """Return whether a connection exists for the provided charger identity."""
131
+
132
+ if connector in (None, "", AGGREGATE_SLUG):
133
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
134
+ return (
135
+ any(key.startswith(prefix) for key in connections) or serial in connections
136
+ )
137
+ return any(key in connections for key in _candidate_keys(serial, connector))
138
+
139
+
140
+ def get_connection(serial: str, connector: int | str | None = None):
141
+ """Return the websocket consumer for the requested identity, if any."""
142
+
143
+ for key in _candidate_keys(serial, connector):
144
+ conn = connections.get(key)
145
+ if conn is not None:
146
+ return conn
147
+ return None
148
+
149
+
150
+ def set_connection(serial: str, connector: int | str | None, consumer) -> str:
151
+ """Store a websocket consumer under the negotiated identity."""
152
+
153
+ key = identity_key(serial, connector)
154
+ connections[key] = consumer
155
+ return key
156
+
157
+
158
+ def pop_connection(serial: str, connector: int | str | None = None):
159
+ """Remove a stored connection for the given identity."""
160
+
161
+ for key in _candidate_keys(serial, connector):
162
+ conn = connections.pop(key, None)
163
+ if conn is not None:
164
+ return conn
165
+ return None
166
+
167
+
168
+ def get_transaction(serial: str, connector: int | str | None = None):
169
+ """Return the active transaction for the provided identity."""
170
+
171
+ for key in _candidate_keys(serial, connector):
172
+ tx = transactions.get(key)
173
+ if tx is not None:
174
+ return tx
175
+ return None
176
+
177
+
178
+ def set_transaction(serial: str, connector: int | str | None, tx) -> str:
179
+ """Store an active transaction under the provided identity."""
180
+
181
+ key = identity_key(serial, connector)
182
+ transactions[key] = tx
183
+ return key
184
+
185
+
186
+ def pop_transaction(serial: str, connector: int | str | None = None):
187
+ """Remove and return an active transaction for the identity."""
188
+
189
+ for key in _candidate_keys(serial, connector):
190
+ tx = transactions.pop(key, None)
191
+ if tx is not None:
192
+ return tx
193
+ return None
194
+
195
+
196
+ def register_pending_call(message_id: str, metadata: dict[str, object]) -> None:
197
+ """Store metadata about an outstanding CSMS call."""
198
+
199
+ copy = dict(metadata)
200
+ with _pending_call_lock:
201
+ pending_calls[message_id] = copy
202
+ event = threading.Event()
203
+ _pending_call_events[message_id] = event
204
+ _pending_call_results.pop(message_id, None)
205
+
206
+
207
+ def pop_pending_call(message_id: str) -> dict[str, object] | None:
208
+ """Return and remove metadata for a previously registered call."""
209
+
210
+ with _pending_call_lock:
211
+ return pending_calls.pop(message_id, None)
212
+
213
+
214
+ def record_pending_call_result(
215
+ message_id: str,
216
+ *,
217
+ metadata: dict[str, object] | None = None,
218
+ success: bool = True,
219
+ payload: object | None = None,
220
+ error_code: str | None = None,
221
+ error_description: str | None = None,
222
+ error_details: object | None = None,
223
+ ) -> None:
224
+ """Record the outcome for a previously registered pending call."""
225
+
226
+ result = {
227
+ "metadata": dict(metadata or {}),
228
+ "success": success,
229
+ "payload": payload,
230
+ "error_code": error_code,
231
+ "error_description": error_description,
232
+ "error_details": error_details,
233
+ }
234
+ with _pending_call_lock:
235
+ _pending_call_results[message_id] = result
236
+ event = _pending_call_events.pop(message_id, None)
237
+ if event:
238
+ event.set()
239
+
240
+
241
+ def wait_for_pending_call(
242
+ message_id: str, *, timeout: float = 5.0
243
+ ) -> dict[str, object] | None:
244
+ """Wait for a pending call to be resolved and return the stored result."""
245
+
246
+ with _pending_call_lock:
247
+ existing = _pending_call_results.pop(message_id, None)
248
+ if existing is not None:
249
+ return existing
250
+ event = _pending_call_events.get(message_id)
251
+ if not event:
252
+ return None
253
+ if not event.wait(timeout):
254
+ return None
255
+ with _pending_call_lock:
256
+ result = _pending_call_results.pop(message_id, None)
257
+ _pending_call_events.pop(message_id, None)
258
+ return result
259
+
260
+
261
+ def schedule_call_timeout(
262
+ message_id: str,
263
+ *,
264
+ timeout: float = 5.0,
265
+ action: str | None = None,
266
+ log_key: str | None = None,
267
+ log_type: str = "charger",
268
+ message: str | None = None,
269
+ ) -> None:
270
+ """Schedule a timeout notice if a pending call is not answered."""
271
+
272
+ def _notify() -> None:
273
+ with _pending_call_lock:
274
+ metadata = pending_calls.get(message_id)
275
+ if not metadata:
276
+ return
277
+ if action and metadata.get("action") != action:
278
+ return
279
+ if metadata.get("timeout_notice_sent"):
280
+ return
281
+ target_log = log_key or metadata.get("log_key")
282
+ if not target_log:
283
+ metadata["timeout_notice_sent"] = True
284
+ return
285
+ label = message
286
+ if not label:
287
+ action_label = action or str(metadata.get("action") or "Call")
288
+ label = f"{action_label} request timed out"
289
+ add_log(target_log, label, log_type=log_type)
290
+ metadata["timeout_notice_sent"] = True
291
+
292
+ timer = threading.Timer(timeout, _notify)
293
+ timer.daemon = True
294
+ timer.start()
295
+
296
+
297
+ def register_triggered_followup(
298
+ serial: str,
299
+ action: str,
300
+ *,
301
+ connector: int | str | None = None,
302
+ log_key: str | None = None,
303
+ target: str | None = None,
304
+ ) -> None:
305
+ """Record that ``serial`` should send ``action`` after a TriggerMessage."""
306
+
307
+ entry = {
308
+ "action": action,
309
+ "connector": connector_slug(connector),
310
+ "log_key": log_key,
311
+ "target": target,
312
+ }
313
+ triggered_followups.setdefault(serial, []).append(entry)
314
+
315
+
316
+ def consume_triggered_followup(
317
+ serial: str, action: str, connector: int | str | None = None
318
+ ) -> dict[str, object] | None:
319
+ """Return metadata for a previously registered follow-up message."""
320
+
321
+ entries = triggered_followups.get(serial)
322
+ if not entries:
323
+ return None
324
+ connector_slug_value = connector_slug(connector)
325
+ for index, entry in enumerate(entries):
326
+ if entry.get("action") != action:
327
+ continue
328
+ expected_slug = entry.get("connector")
329
+ if expected_slug == AGGREGATE_SLUG:
330
+ matched = True
331
+ else:
332
+ matched = connector_slug_value == expected_slug
333
+ if not matched:
334
+ continue
335
+ result = entries.pop(index)
336
+ if not entries:
337
+ triggered_followups.pop(serial, None)
338
+ return result
339
+ return None
340
+
341
+
342
+ def clear_pending_calls(serial: str) -> None:
343
+ """Remove any pending calls associated with the provided charger id."""
344
+
345
+ with _pending_call_lock:
346
+ to_remove = [
347
+ key
348
+ for key, value in pending_calls.items()
349
+ if value.get("charger_id") == serial
350
+ ]
351
+ for key in to_remove:
352
+ pending_calls.pop(key, None)
353
+ _pending_call_events.pop(key, None)
354
+ _pending_call_results.pop(key, None)
355
+ triggered_followups.pop(serial, None)
356
+
357
+
358
+ def reassign_identity(old_key: str, new_key: str) -> str:
359
+ """Move any stored data from ``old_key`` to ``new_key``."""
360
+
361
+ if old_key == new_key:
362
+ return new_key
363
+ if not old_key:
364
+ return new_key
365
+ for mapping in (connections, transactions, history):
366
+ if old_key in mapping:
367
+ mapping[new_key] = mapping.pop(old_key)
368
+ for log_type in logs:
369
+ store = logs[log_type]
370
+ if old_key in store:
371
+ store[new_key] = store.pop(old_key)
372
+ for log_type in log_names:
373
+ names = log_names[log_type]
374
+ if old_key in names:
375
+ names[new_key] = names.pop(old_key)
376
+ return new_key
377
+
378
+
379
+ async def _touch_lock() -> None:
380
+ try:
381
+ while True:
382
+ SESSION_LOCK.touch()
383
+ await asyncio.sleep(60)
384
+ except asyncio.CancelledError:
385
+ pass
386
+
387
+
388
+ def start_session_lock() -> None:
389
+ global _lock_task
390
+ SESSION_LOCK.touch()
391
+ loop = asyncio.get_event_loop()
392
+ if _lock_task is None or _lock_task.done():
393
+ _lock_task = loop.create_task(_touch_lock())
394
+
395
+
396
+ def stop_session_lock() -> None:
397
+ global _lock_task
398
+ if _lock_task:
399
+ _lock_task.cancel()
400
+ _lock_task = None
401
+ if SESSION_LOCK.exists():
402
+ SESSION_LOCK.unlink()
403
+
404
+
405
+ def register_log_name(cid: str, name: str, log_type: str = "charger") -> None:
406
+ """Register a friendly name for the id used in log files."""
407
+
408
+ names = log_names[log_type]
409
+ # Ensure lookups are case-insensitive by overwriting any existing entry
410
+ # that matches the provided cid regardless of case.
411
+ for key in list(names.keys()):
412
+ if key.lower() == cid.lower():
413
+ cid = key
414
+ break
415
+ names[cid] = name
416
+
417
+
418
+ def _safe_name(name: str) -> str:
419
+ return re.sub(r"[^\w.-]", "_", name)
420
+
421
+
422
+ def _file_path(cid: str, log_type: str = "charger") -> Path:
423
+ name = log_names[log_type].get(cid, cid)
424
+ return LOG_DIR / f"{log_type}.{_safe_name(name)}.log"
425
+
426
+
427
+ def add_log(cid: str, entry: str, log_type: str = "charger") -> None:
428
+ """Append a timestamped log entry for the given id and log type."""
429
+
430
+ timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
431
+ entry = f"{timestamp} {entry}"
432
+
433
+ store = logs[log_type]
434
+ # Store log entries under the cid as provided but allow retrieval using
435
+ # any casing by recording entries in a case-insensitive manner.
436
+ key = next((k for k in store.keys() if k.lower() == cid.lower()), cid)
437
+ store.setdefault(key, []).append(entry)
438
+ path = _file_path(key, log_type)
439
+ with path.open("a", encoding="utf-8") as handle:
440
+ handle.write(entry + "\n")
441
+
442
+
443
+ def _session_folder(cid: str) -> Path:
444
+ """Return the folder path for session logs for the given charger."""
445
+
446
+ name = log_names["charger"].get(cid, cid)
447
+ folder = SESSION_DIR / _safe_name(name)
448
+ folder.mkdir(parents=True, exist_ok=True)
449
+ return folder
450
+
451
+
452
+ def start_session_log(cid: str, tx_id: int) -> None:
453
+ """Begin logging a session for the given charger and transaction id."""
454
+
455
+ history[cid] = {
456
+ "transaction": tx_id,
457
+ "start": datetime.utcnow(),
458
+ "messages": [],
459
+ }
460
+
461
+
462
+ def add_session_message(cid: str, message: str) -> None:
463
+ """Record a raw message for the current session if one is active."""
464
+
465
+ sess = history.get(cid)
466
+ if not sess:
467
+ return
468
+ sess["messages"].append(
469
+ {
470
+ "timestamp": datetime.utcnow().isoformat() + "Z",
471
+ "message": message,
472
+ }
473
+ )
474
+
475
+
476
+ def end_session_log(cid: str) -> None:
477
+ """Write any recorded session log to disk for the given charger."""
478
+
479
+ sess = history.pop(cid, None)
480
+ if not sess:
481
+ return
482
+ folder = _session_folder(cid)
483
+ date = sess["start"].strftime("%Y%m%d")
484
+ tx_id = sess.get("transaction")
485
+ filename = f"{date}_{tx_id}.json"
486
+ path = folder / filename
487
+ with path.open("w", encoding="utf-8") as handle:
488
+ json.dump(sess["messages"], handle, ensure_ascii=False, indent=2)
489
+
490
+
491
+ def _log_key_candidates(cid: str, log_type: str) -> list[str]:
492
+ """Return log identifiers to inspect for the requested cid."""
493
+
494
+ if IDENTITY_SEPARATOR not in cid:
495
+ return [cid]
496
+ serial, slug = cid.split(IDENTITY_SEPARATOR, 1)
497
+ slug = slug or AGGREGATE_SLUG
498
+ if slug != AGGREGATE_SLUG:
499
+ return [cid]
500
+ keys: list[str] = [identity_key(serial, None)]
501
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
502
+ for source in (log_names[log_type], logs[log_type]):
503
+ for key in source.keys():
504
+ if key.startswith(prefix) and key not in keys:
505
+ keys.append(key)
506
+ return keys
507
+
508
+
509
+ def _resolve_log_identifier(cid: str, log_type: str) -> tuple[str, str | None]:
510
+ """Return the canonical key and friendly name for ``cid``."""
511
+
512
+ names = log_names[log_type]
513
+ name = names.get(cid)
514
+ if name is None:
515
+ lower = cid.lower()
516
+ for key, value in names.items():
517
+ if key.lower() == lower:
518
+ cid = key
519
+ name = value
520
+ break
521
+ else:
522
+ try:
523
+ if log_type == "simulator":
524
+ from .models import Simulator
525
+
526
+ sim = Simulator.objects.filter(cp_path__iexact=cid).first()
527
+ if sim:
528
+ cid = sim.cp_path
529
+ name = sim.name
530
+ names[cid] = name
531
+ else:
532
+ from .models import Charger
533
+
534
+ serial = cid.split(IDENTITY_SEPARATOR, 1)[0]
535
+ ch = Charger.objects.filter(charger_id__iexact=serial).first()
536
+ if ch and ch.name:
537
+ name = ch.name
538
+ names[cid] = name
539
+ except Exception: # pragma: no cover - best effort lookup
540
+ pass
541
+ return cid, name
542
+
543
+
544
+ def _log_file_for_identifier(cid: str, name: str | None, log_type: str) -> Path:
545
+ path = _file_path(cid, log_type)
546
+ if not path.exists():
547
+ target = f"{log_type}.{_safe_name(name or cid).lower()}"
548
+ for file in LOG_DIR.glob(f"{log_type}.*.log"):
549
+ if file.stem.lower() == target:
550
+ path = file
551
+ break
552
+ return path
553
+
554
+
555
+ def _memory_logs_for_identifier(cid: str, log_type: str) -> list[str]:
556
+ store = logs[log_type]
557
+ lower = cid.lower()
558
+ for key, entries in store.items():
559
+ if key.lower() == lower:
560
+ return entries
561
+ return []
562
+
563
+
564
+ def get_logs(cid: str, log_type: str = "charger") -> list[str]:
565
+ """Return all log entries for the given id and type."""
566
+
567
+ entries: list[str] = []
568
+ seen_paths: set[Path] = set()
569
+ seen_keys: set[str] = set()
570
+ for key in _log_key_candidates(cid, log_type):
571
+ resolved, name = _resolve_log_identifier(key, log_type)
572
+ path = _log_file_for_identifier(resolved, name, log_type)
573
+ if path.exists() and path not in seen_paths:
574
+ entries.extend(path.read_text(encoding="utf-8").splitlines())
575
+ seen_paths.add(path)
576
+ memory_entries = _memory_logs_for_identifier(resolved, log_type)
577
+ lower_key = resolved.lower()
578
+ if memory_entries and lower_key not in seen_keys:
579
+ entries.extend(memory_entries)
580
+ seen_keys.add(lower_key)
581
+ return entries
582
+
583
+
584
+ def clear_log(cid: str, log_type: str = "charger") -> None:
585
+ """Remove any stored logs for the given id and type."""
586
+ for key in _log_key_candidates(cid, log_type):
587
+ store_map = logs[log_type]
588
+ resolved = next(
589
+ (k for k in list(store_map.keys()) if k.lower() == key.lower()),
590
+ key,
591
+ )
592
+ store_map.pop(resolved, None)
593
+ path = _file_path(resolved, log_type)
594
+ if not path.exists():
595
+ target = f"{log_type}.{_safe_name(log_names[log_type].get(resolved, resolved)).lower()}"
596
+ for file in LOG_DIR.glob(f"{log_type}.*.log"):
597
+ if file.stem.lower() == target:
598
+ path = file
599
+ break
600
+ if path.exists():
601
+ path.unlink()