arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (112) hide show
  1. arthexis-0.1.26.dist-info/METADATA +272 -0
  2. arthexis-0.1.26.dist-info/RECORD +111 -0
  3. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +29 -29
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -25
  9. config/context_processors.py +67 -68
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +71 -25
  14. config/offline.py +49 -49
  15. config/settings.py +676 -492
  16. config/settings_helpers.py +109 -0
  17. config/urls.py +228 -159
  18. config/wsgi.py +17 -17
  19. core/admin.py +4052 -2066
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +192 -151
  22. core/apps.py +350 -223
  23. core/auto_upgrade.py +72 -0
  24. core/backends.py +311 -124
  25. core/changelog.py +403 -0
  26. core/entity.py +149 -133
  27. core/environment.py +60 -43
  28. core/fields.py +168 -75
  29. core/form_fields.py +75 -0
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +183 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +114 -100
  36. core/mailer.py +89 -83
  37. core/middleware.py +91 -91
  38. core/models.py +5041 -2195
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +107 -0
  42. core/release.py +940 -346
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -131
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +250 -284
  47. core/system.py +1425 -230
  48. core/tasks.py +538 -199
  49. core/temp_passwords.py +181 -0
  50. core/test_system_info.py +202 -43
  51. core/tests.py +2673 -1069
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +681 -495
  55. core/views.py +2484 -789
  56. core/widgets.py +213 -51
  57. nodes/admin.py +2236 -445
  58. nodes/apps.py +98 -70
  59. nodes/backends.py +160 -53
  60. nodes/dns.py +203 -0
  61. nodes/feature_checks.py +133 -0
  62. nodes/lcd.py +165 -165
  63. nodes/models.py +2375 -870
  64. nodes/reports.py +411 -0
  65. nodes/rfid_sync.py +210 -0
  66. nodes/signals.py +18 -0
  67. nodes/tasks.py +141 -46
  68. nodes/tests.py +5045 -1489
  69. nodes/urls.py +29 -13
  70. nodes/utils.py +172 -73
  71. nodes/views.py +1768 -304
  72. ocpp/admin.py +1775 -481
  73. ocpp/apps.py +25 -25
  74. ocpp/consumers.py +1843 -630
  75. ocpp/evcs.py +844 -928
  76. ocpp/evcs_discovery.py +158 -0
  77. ocpp/models.py +1417 -640
  78. ocpp/network.py +398 -0
  79. ocpp/reference_utils.py +42 -0
  80. ocpp/routing.py +11 -9
  81. ocpp/simulator.py +745 -368
  82. ocpp/status_display.py +26 -0
  83. ocpp/store.py +603 -403
  84. ocpp/tasks.py +479 -31
  85. ocpp/test_export_import.py +131 -130
  86. ocpp/test_rfid.py +1072 -540
  87. ocpp/tests.py +5494 -2296
  88. ocpp/transactions_io.py +197 -165
  89. ocpp/urls.py +50 -50
  90. ocpp/views.py +2024 -912
  91. pages/admin.py +1123 -396
  92. pages/apps.py +45 -10
  93. pages/checks.py +40 -40
  94. pages/context_processors.py +151 -85
  95. pages/defaults.py +13 -0
  96. pages/forms.py +221 -0
  97. pages/middleware.py +213 -153
  98. pages/models.py +720 -252
  99. pages/module_defaults.py +156 -0
  100. pages/site_config.py +137 -0
  101. pages/tasks.py +74 -0
  102. pages/tests.py +4009 -1389
  103. pages/urls.py +38 -20
  104. pages/utils.py +93 -12
  105. pages/views.py +1736 -762
  106. arthexis-0.1.9.dist-info/METADATA +0 -168
  107. arthexis-0.1.9.dist-info/RECORD +0 -92
  108. core/workgroup_urls.py +0 -17
  109. core/workgroup_views.py +0 -94
  110. nodes/actions.py +0 -70
  111. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
  112. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
ocpp/store.py CHANGED
@@ -1,403 +1,603 @@
1
- """In-memory store for OCPP data with file backed logs."""
2
-
3
- from __future__ import annotations
4
-
5
- from pathlib import Path
6
- from datetime import datetime
7
- import json
8
- import re
9
- import asyncio
10
-
11
- from core.log_paths import select_log_dir
12
-
13
- IDENTITY_SEPARATOR = "#"
14
- AGGREGATE_SLUG = "all"
15
- PENDING_SLUG = "pending"
16
-
17
- connections: dict[str, object] = {}
18
- transactions: dict[str, object] = {}
19
- logs: dict[str, dict[str, list[str]]] = {"charger": {}, "simulator": {}}
20
- # store per charger session logs before they are flushed to disk
21
- history: dict[str, dict[str, object]] = {}
22
- simulators = {}
23
-
24
- # mapping of charger id / cp_path to friendly names used for log files
25
- log_names: dict[str, dict[str, str]] = {"charger": {}, "simulator": {}}
26
-
27
- BASE_DIR = Path(__file__).resolve().parent.parent
28
- LOG_DIR = select_log_dir(BASE_DIR)
29
- SESSION_DIR = LOG_DIR / "sessions"
30
- SESSION_DIR.mkdir(exist_ok=True)
31
- LOCK_DIR = BASE_DIR / "locks"
32
- LOCK_DIR.mkdir(exist_ok=True)
33
- SESSION_LOCK = LOCK_DIR / "charging.lck"
34
- _lock_task: asyncio.Task | None = None
35
-
36
-
37
- def connector_slug(value: int | str | None) -> str:
38
- """Return the canonical slug for a connector value."""
39
-
40
- if value in (None, "", AGGREGATE_SLUG):
41
- return AGGREGATE_SLUG
42
- try:
43
- return str(int(value))
44
- except (TypeError, ValueError):
45
- return str(value)
46
-
47
-
48
- def identity_key(serial: str, connector: int | str | None) -> str:
49
- """Return the identity key used for in-memory store lookups."""
50
-
51
- return f"{serial}{IDENTITY_SEPARATOR}{connector_slug(connector)}"
52
-
53
-
54
- def pending_key(serial: str) -> str:
55
- """Return the key used before a connector id has been negotiated."""
56
-
57
- return f"{serial}{IDENTITY_SEPARATOR}{PENDING_SLUG}"
58
-
59
-
60
- def _candidate_keys(serial: str, connector: int | str | None) -> list[str]:
61
- """Return possible keys for lookups with fallbacks."""
62
-
63
- keys: list[str] = []
64
- if connector not in (None, "", AGGREGATE_SLUG):
65
- keys.append(identity_key(serial, connector))
66
- else:
67
- keys.append(identity_key(serial, None))
68
- prefix = f"{serial}{IDENTITY_SEPARATOR}"
69
- for key in connections.keys():
70
- if key.startswith(prefix) and key not in keys:
71
- keys.append(key)
72
- keys.append(pending_key(serial))
73
- keys.append(serial)
74
- seen: set[str] = set()
75
- result: list[str] = []
76
- for key in keys:
77
- if key and key not in seen:
78
- seen.add(key)
79
- result.append(key)
80
- return result
81
-
82
-
83
- def iter_identity_keys(serial: str) -> list[str]:
84
- """Return all known keys for the provided serial."""
85
-
86
- prefix = f"{serial}{IDENTITY_SEPARATOR}"
87
- keys = [key for key in connections.keys() if key.startswith(prefix)]
88
- if serial in connections:
89
- keys.append(serial)
90
- return keys
91
-
92
-
93
- def is_connected(serial: str, connector: int | str | None = None) -> bool:
94
- """Return whether a connection exists for the provided charger identity."""
95
-
96
- if connector in (None, "", AGGREGATE_SLUG):
97
- prefix = f"{serial}{IDENTITY_SEPARATOR}"
98
- return (
99
- any(key.startswith(prefix) for key in connections) or serial in connections
100
- )
101
- return any(key in connections for key in _candidate_keys(serial, connector))
102
-
103
-
104
- def get_connection(serial: str, connector: int | str | None = None):
105
- """Return the websocket consumer for the requested identity, if any."""
106
-
107
- for key in _candidate_keys(serial, connector):
108
- conn = connections.get(key)
109
- if conn is not None:
110
- return conn
111
- return None
112
-
113
-
114
- def set_connection(serial: str, connector: int | str | None, consumer) -> str:
115
- """Store a websocket consumer under the negotiated identity."""
116
-
117
- key = identity_key(serial, connector)
118
- connections[key] = consumer
119
- return key
120
-
121
-
122
- def pop_connection(serial: str, connector: int | str | None = None):
123
- """Remove a stored connection for the given identity."""
124
-
125
- for key in _candidate_keys(serial, connector):
126
- conn = connections.pop(key, None)
127
- if conn is not None:
128
- return conn
129
- return None
130
-
131
-
132
- def get_transaction(serial: str, connector: int | str | None = None):
133
- """Return the active transaction for the provided identity."""
134
-
135
- for key in _candidate_keys(serial, connector):
136
- tx = transactions.get(key)
137
- if tx is not None:
138
- return tx
139
- return None
140
-
141
-
142
- def set_transaction(serial: str, connector: int | str | None, tx) -> str:
143
- """Store an active transaction under the provided identity."""
144
-
145
- key = identity_key(serial, connector)
146
- transactions[key] = tx
147
- return key
148
-
149
-
150
- def pop_transaction(serial: str, connector: int | str | None = None):
151
- """Remove and return an active transaction for the identity."""
152
-
153
- for key in _candidate_keys(serial, connector):
154
- tx = transactions.pop(key, None)
155
- if tx is not None:
156
- return tx
157
- return None
158
-
159
-
160
- def reassign_identity(old_key: str, new_key: str) -> str:
161
- """Move any stored data from ``old_key`` to ``new_key``."""
162
-
163
- if old_key == new_key:
164
- return new_key
165
- if not old_key:
166
- return new_key
167
- for mapping in (connections, transactions, history):
168
- if old_key in mapping:
169
- mapping[new_key] = mapping.pop(old_key)
170
- for log_type in logs:
171
- store = logs[log_type]
172
- if old_key in store:
173
- store[new_key] = store.pop(old_key)
174
- for log_type in log_names:
175
- names = log_names[log_type]
176
- if old_key in names:
177
- names[new_key] = names.pop(old_key)
178
- return new_key
179
-
180
-
181
- async def _touch_lock() -> None:
182
- try:
183
- while True:
184
- SESSION_LOCK.touch()
185
- await asyncio.sleep(60)
186
- except asyncio.CancelledError:
187
- pass
188
-
189
-
190
- def start_session_lock() -> None:
191
- global _lock_task
192
- SESSION_LOCK.touch()
193
- loop = asyncio.get_event_loop()
194
- if _lock_task is None or _lock_task.done():
195
- _lock_task = loop.create_task(_touch_lock())
196
-
197
-
198
- def stop_session_lock() -> None:
199
- global _lock_task
200
- if _lock_task:
201
- _lock_task.cancel()
202
- _lock_task = None
203
- if SESSION_LOCK.exists():
204
- SESSION_LOCK.unlink()
205
-
206
-
207
- def register_log_name(cid: str, name: str, log_type: str = "charger") -> None:
208
- """Register a friendly name for the id used in log files."""
209
-
210
- names = log_names[log_type]
211
- # Ensure lookups are case-insensitive by overwriting any existing entry
212
- # that matches the provided cid regardless of case.
213
- for key in list(names.keys()):
214
- if key.lower() == cid.lower():
215
- cid = key
216
- break
217
- names[cid] = name
218
-
219
-
220
- def _safe_name(name: str) -> str:
221
- return re.sub(r"[^\w.-]", "_", name)
222
-
223
-
224
- def _file_path(cid: str, log_type: str = "charger") -> Path:
225
- name = log_names[log_type].get(cid, cid)
226
- return LOG_DIR / f"{log_type}.{_safe_name(name)}.log"
227
-
228
-
229
- def add_log(cid: str, entry: str, log_type: str = "charger") -> None:
230
- """Append a timestamped log entry for the given id and log type."""
231
-
232
- timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
233
- entry = f"{timestamp} {entry}"
234
-
235
- store = logs[log_type]
236
- # Store log entries under the cid as provided but allow retrieval using
237
- # any casing by recording entries in a case-insensitive manner.
238
- key = next((k for k in store.keys() if k.lower() == cid.lower()), cid)
239
- store.setdefault(key, []).append(entry)
240
- path = _file_path(key, log_type)
241
- with path.open("a", encoding="utf-8") as handle:
242
- handle.write(entry + "\n")
243
-
244
-
245
- def _session_folder(cid: str) -> Path:
246
- """Return the folder path for session logs for the given charger."""
247
-
248
- name = log_names["charger"].get(cid, cid)
249
- folder = SESSION_DIR / _safe_name(name)
250
- folder.mkdir(parents=True, exist_ok=True)
251
- return folder
252
-
253
-
254
- def start_session_log(cid: str, tx_id: int) -> None:
255
- """Begin logging a session for the given charger and transaction id."""
256
-
257
- history[cid] = {
258
- "transaction": tx_id,
259
- "start": datetime.utcnow(),
260
- "messages": [],
261
- }
262
-
263
-
264
- def add_session_message(cid: str, message: str) -> None:
265
- """Record a raw message for the current session if one is active."""
266
-
267
- sess = history.get(cid)
268
- if not sess:
269
- return
270
- sess["messages"].append(
271
- {
272
- "timestamp": datetime.utcnow().isoformat() + "Z",
273
- "message": message,
274
- }
275
- )
276
-
277
-
278
- def end_session_log(cid: str) -> None:
279
- """Write any recorded session log to disk for the given charger."""
280
-
281
- sess = history.pop(cid, None)
282
- if not sess:
283
- return
284
- folder = _session_folder(cid)
285
- date = sess["start"].strftime("%Y%m%d")
286
- tx_id = sess.get("transaction")
287
- filename = f"{date}_{tx_id}.json"
288
- path = folder / filename
289
- with path.open("w", encoding="utf-8") as handle:
290
- json.dump(sess["messages"], handle, ensure_ascii=False, indent=2)
291
-
292
-
293
- def _log_key_candidates(cid: str, log_type: str) -> list[str]:
294
- """Return log identifiers to inspect for the requested cid."""
295
-
296
- if IDENTITY_SEPARATOR not in cid:
297
- return [cid]
298
- serial, slug = cid.split(IDENTITY_SEPARATOR, 1)
299
- slug = slug or AGGREGATE_SLUG
300
- if slug != AGGREGATE_SLUG:
301
- return [cid]
302
- keys: list[str] = [identity_key(serial, None)]
303
- prefix = f"{serial}{IDENTITY_SEPARATOR}"
304
- for source in (log_names[log_type], logs[log_type]):
305
- for key in source.keys():
306
- if key.startswith(prefix) and key not in keys:
307
- keys.append(key)
308
- return keys
309
-
310
-
311
- def _resolve_log_identifier(cid: str, log_type: str) -> tuple[str, str | None]:
312
- """Return the canonical key and friendly name for ``cid``."""
313
-
314
- names = log_names[log_type]
315
- name = names.get(cid)
316
- if name is None:
317
- lower = cid.lower()
318
- for key, value in names.items():
319
- if key.lower() == lower:
320
- cid = key
321
- name = value
322
- break
323
- else:
324
- try:
325
- if log_type == "simulator":
326
- from .models import Simulator
327
-
328
- sim = Simulator.objects.filter(cp_path__iexact=cid).first()
329
- if sim:
330
- cid = sim.cp_path
331
- name = sim.name
332
- names[cid] = name
333
- else:
334
- from .models import Charger
335
-
336
- serial = cid.split(IDENTITY_SEPARATOR, 1)[0]
337
- ch = Charger.objects.filter(charger_id__iexact=serial).first()
338
- if ch and ch.name:
339
- name = ch.name
340
- names[cid] = name
341
- except Exception: # pragma: no cover - best effort lookup
342
- pass
343
- return cid, name
344
-
345
-
346
- def _log_file_for_identifier(cid: str, name: str | None, log_type: str) -> Path:
347
- path = _file_path(cid, log_type)
348
- if not path.exists():
349
- target = f"{log_type}.{_safe_name(name or cid).lower()}"
350
- for file in LOG_DIR.glob(f"{log_type}.*.log"):
351
- if file.stem.lower() == target:
352
- path = file
353
- break
354
- return path
355
-
356
-
357
- def _memory_logs_for_identifier(cid: str, log_type: str) -> list[str]:
358
- store = logs[log_type]
359
- lower = cid.lower()
360
- for key, entries in store.items():
361
- if key.lower() == lower:
362
- return entries
363
- return []
364
-
365
-
366
- def get_logs(cid: str, log_type: str = "charger") -> list[str]:
367
- """Return all log entries for the given id and type."""
368
-
369
- entries: list[str] = []
370
- seen_paths: set[Path] = set()
371
- seen_keys: set[str] = set()
372
- for key in _log_key_candidates(cid, log_type):
373
- resolved, name = _resolve_log_identifier(key, log_type)
374
- path = _log_file_for_identifier(resolved, name, log_type)
375
- if path.exists() and path not in seen_paths:
376
- entries.extend(path.read_text(encoding="utf-8").splitlines())
377
- seen_paths.add(path)
378
- memory_entries = _memory_logs_for_identifier(resolved, log_type)
379
- lower_key = resolved.lower()
380
- if memory_entries and lower_key not in seen_keys:
381
- entries.extend(memory_entries)
382
- seen_keys.add(lower_key)
383
- return entries
384
-
385
-
386
- def clear_log(cid: str, log_type: str = "charger") -> None:
387
- """Remove any stored logs for the given id and type."""
388
- for key in _log_key_candidates(cid, log_type):
389
- store_map = logs[log_type]
390
- resolved = next(
391
- (k for k in list(store_map.keys()) if k.lower() == key.lower()),
392
- key,
393
- )
394
- store_map.pop(resolved, None)
395
- path = _file_path(resolved, log_type)
396
- if not path.exists():
397
- target = f"{log_type}.{_safe_name(log_names[log_type].get(resolved, resolved)).lower()}"
398
- for file in LOG_DIR.glob(f"{log_type}.*.log"):
399
- if file.stem.lower() == target:
400
- path = file
401
- break
402
- if path.exists():
403
- path.unlink()
1
+ """In-memory store for OCPP data with file backed logs."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ from datetime import datetime, timezone
7
+ import json
8
+ from pathlib import Path
9
+ import re
10
+ import threading
11
+
12
+ from core.log_paths import select_log_dir
13
+
14
+ IDENTITY_SEPARATOR = "#"
15
+ AGGREGATE_SLUG = "all"
16
+ PENDING_SLUG = "pending"
17
+
18
+ MAX_CONNECTIONS_PER_IP = 2
19
+
20
+ connections: dict[str, object] = {}
21
+ transactions: dict[str, object] = {}
22
+ logs: dict[str, dict[str, list[str]]] = {"charger": {}, "simulator": {}}
23
+ # store per charger session logs before they are flushed to disk
24
+ history: dict[str, dict[str, object]] = {}
25
+ simulators = {}
26
+ ip_connections: dict[str, set[object]] = {}
27
+ pending_calls: dict[str, dict[str, object]] = {}
28
+ _pending_call_events: dict[str, threading.Event] = {}
29
+ _pending_call_results: dict[str, dict[str, object]] = {}
30
+ _pending_call_lock = threading.Lock()
31
+ triggered_followups: dict[str, list[dict[str, object]]] = {}
32
+
33
+ # mapping of charger id / cp_path to friendly names used for log files
34
+ log_names: dict[str, dict[str, str]] = {"charger": {}, "simulator": {}}
35
+
36
+ BASE_DIR = Path(__file__).resolve().parent.parent
37
+ LOG_DIR = select_log_dir(BASE_DIR)
38
+ SESSION_DIR = LOG_DIR / "sessions"
39
+ SESSION_DIR.mkdir(exist_ok=True)
40
+ LOCK_DIR = BASE_DIR / "locks"
41
+ LOCK_DIR.mkdir(exist_ok=True)
42
+ SESSION_LOCK = LOCK_DIR / "charging.lck"
43
+ _lock_task: asyncio.Task | None = None
44
+
45
+
46
+ def connector_slug(value: int | str | None) -> str:
47
+ """Return the canonical slug for a connector value."""
48
+
49
+ if value in (None, "", AGGREGATE_SLUG):
50
+ return AGGREGATE_SLUG
51
+ try:
52
+ return str(int(value))
53
+ except (TypeError, ValueError):
54
+ return str(value)
55
+
56
+
57
+ def identity_key(serial: str, connector: int | str | None) -> str:
58
+ """Return the identity key used for in-memory store lookups."""
59
+
60
+ return f"{serial}{IDENTITY_SEPARATOR}{connector_slug(connector)}"
61
+
62
+
63
+ def register_ip_connection(ip: str | None, consumer: object) -> bool:
64
+ """Track a websocket connection for the provided client IP."""
65
+
66
+ if not ip:
67
+ return True
68
+ conns = ip_connections.setdefault(ip, set())
69
+ if consumer in conns:
70
+ return True
71
+ if len(conns) >= MAX_CONNECTIONS_PER_IP:
72
+ return False
73
+ conns.add(consumer)
74
+ return True
75
+
76
+
77
+ def release_ip_connection(ip: str | None, consumer: object) -> None:
78
+ """Remove a websocket connection from the active client registry."""
79
+
80
+ if not ip:
81
+ return
82
+ conns = ip_connections.get(ip)
83
+ if not conns:
84
+ return
85
+ conns.discard(consumer)
86
+ if not conns:
87
+ ip_connections.pop(ip, None)
88
+
89
+
90
+ def pending_key(serial: str) -> str:
91
+ """Return the key used before a connector id has been negotiated."""
92
+
93
+ return f"{serial}{IDENTITY_SEPARATOR}{PENDING_SLUG}"
94
+
95
+
96
+ def _candidate_keys(serial: str, connector: int | str | None) -> list[str]:
97
+ """Return possible keys for lookups with fallbacks."""
98
+
99
+ keys: list[str] = []
100
+ if connector not in (None, "", AGGREGATE_SLUG):
101
+ keys.append(identity_key(serial, connector))
102
+ else:
103
+ keys.append(identity_key(serial, None))
104
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
105
+ for key in connections.keys():
106
+ if key.startswith(prefix) and key not in keys:
107
+ keys.append(key)
108
+ keys.append(pending_key(serial))
109
+ keys.append(serial)
110
+ seen: set[str] = set()
111
+ result: list[str] = []
112
+ for key in keys:
113
+ if key and key not in seen:
114
+ seen.add(key)
115
+ result.append(key)
116
+ return result
117
+
118
+
119
+ def iter_identity_keys(serial: str) -> list[str]:
120
+ """Return all known keys for the provided serial."""
121
+
122
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
123
+ keys = [key for key in connections.keys() if key.startswith(prefix)]
124
+ if serial in connections:
125
+ keys.append(serial)
126
+ return keys
127
+
128
+
129
+ def is_connected(serial: str, connector: int | str | None = None) -> bool:
130
+ """Return whether a connection exists for the provided charger identity."""
131
+
132
+ if connector in (None, "", AGGREGATE_SLUG):
133
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
134
+ return (
135
+ any(key.startswith(prefix) for key in connections) or serial in connections
136
+ )
137
+ return any(key in connections for key in _candidate_keys(serial, connector))
138
+
139
+
140
+ def get_connection(serial: str, connector: int | str | None = None):
141
+ """Return the websocket consumer for the requested identity, if any."""
142
+
143
+ for key in _candidate_keys(serial, connector):
144
+ conn = connections.get(key)
145
+ if conn is not None:
146
+ return conn
147
+ return None
148
+
149
+
150
+ def set_connection(serial: str, connector: int | str | None, consumer) -> str:
151
+ """Store a websocket consumer under the negotiated identity."""
152
+
153
+ key = identity_key(serial, connector)
154
+ connections[key] = consumer
155
+ return key
156
+
157
+
158
+ def pop_connection(serial: str, connector: int | str | None = None):
159
+ """Remove a stored connection for the given identity."""
160
+
161
+ for key in _candidate_keys(serial, connector):
162
+ conn = connections.pop(key, None)
163
+ if conn is not None:
164
+ return conn
165
+ return None
166
+
167
+
168
+ def get_transaction(serial: str, connector: int | str | None = None):
169
+ """Return the active transaction for the provided identity."""
170
+
171
+ for key in _candidate_keys(serial, connector):
172
+ tx = transactions.get(key)
173
+ if tx is not None:
174
+ return tx
175
+ return None
176
+
177
+
178
+ def set_transaction(serial: str, connector: int | str | None, tx) -> str:
179
+ """Store an active transaction under the provided identity."""
180
+
181
+ key = identity_key(serial, connector)
182
+ transactions[key] = tx
183
+ return key
184
+
185
+
186
+ def pop_transaction(serial: str, connector: int | str | None = None):
187
+ """Remove and return an active transaction for the identity."""
188
+
189
+ for key in _candidate_keys(serial, connector):
190
+ tx = transactions.pop(key, None)
191
+ if tx is not None:
192
+ return tx
193
+ return None
194
+
195
+
196
+ def register_pending_call(message_id: str, metadata: dict[str, object]) -> None:
197
+ """Store metadata about an outstanding CSMS call."""
198
+
199
+ copy = dict(metadata)
200
+ with _pending_call_lock:
201
+ pending_calls[message_id] = copy
202
+ event = threading.Event()
203
+ _pending_call_events[message_id] = event
204
+ _pending_call_results.pop(message_id, None)
205
+
206
+
207
+ def pop_pending_call(message_id: str) -> dict[str, object] | None:
208
+ """Return and remove metadata for a previously registered call."""
209
+
210
+ with _pending_call_lock:
211
+ return pending_calls.pop(message_id, None)
212
+
213
+
214
+ def record_pending_call_result(
215
+ message_id: str,
216
+ *,
217
+ metadata: dict[str, object] | None = None,
218
+ success: bool = True,
219
+ payload: object | None = None,
220
+ error_code: str | None = None,
221
+ error_description: str | None = None,
222
+ error_details: object | None = None,
223
+ ) -> None:
224
+ """Record the outcome for a previously registered pending call."""
225
+
226
+ result = {
227
+ "metadata": dict(metadata or {}),
228
+ "success": success,
229
+ "payload": payload,
230
+ "error_code": error_code,
231
+ "error_description": error_description,
232
+ "error_details": error_details,
233
+ }
234
+ with _pending_call_lock:
235
+ _pending_call_results[message_id] = result
236
+ event = _pending_call_events.pop(message_id, None)
237
+ if event:
238
+ event.set()
239
+
240
+
241
+ def wait_for_pending_call(
242
+ message_id: str, *, timeout: float = 5.0
243
+ ) -> dict[str, object] | None:
244
+ """Wait for a pending call to be resolved and return the stored result."""
245
+
246
+ with _pending_call_lock:
247
+ existing = _pending_call_results.pop(message_id, None)
248
+ if existing is not None:
249
+ return existing
250
+ event = _pending_call_events.get(message_id)
251
+ if not event:
252
+ return None
253
+ if not event.wait(timeout):
254
+ return None
255
+ with _pending_call_lock:
256
+ result = _pending_call_results.pop(message_id, None)
257
+ _pending_call_events.pop(message_id, None)
258
+ return result
259
+
260
+
261
+ def schedule_call_timeout(
262
+ message_id: str,
263
+ *,
264
+ timeout: float = 5.0,
265
+ action: str | None = None,
266
+ log_key: str | None = None,
267
+ log_type: str = "charger",
268
+ message: str | None = None,
269
+ ) -> None:
270
+ """Schedule a timeout notice if a pending call is not answered."""
271
+
272
+ def _notify() -> None:
273
+ with _pending_call_lock:
274
+ metadata = pending_calls.get(message_id)
275
+ if not metadata:
276
+ return
277
+ if action and metadata.get("action") != action:
278
+ return
279
+ if metadata.get("timeout_notice_sent"):
280
+ return
281
+ target_log = log_key or metadata.get("log_key")
282
+ if not target_log:
283
+ metadata["timeout_notice_sent"] = True
284
+ return
285
+ label = message
286
+ if not label:
287
+ action_label = action or str(metadata.get("action") or "Call")
288
+ label = f"{action_label} request timed out"
289
+ add_log(target_log, label, log_type=log_type)
290
+ metadata["timeout_notice_sent"] = True
291
+
292
+ timer = threading.Timer(timeout, _notify)
293
+ timer.daemon = True
294
+ timer.start()
295
+
296
+
297
+ def register_triggered_followup(
298
+ serial: str,
299
+ action: str,
300
+ *,
301
+ connector: int | str | None = None,
302
+ log_key: str | None = None,
303
+ target: str | None = None,
304
+ ) -> None:
305
+ """Record that ``serial`` should send ``action`` after a TriggerMessage."""
306
+
307
+ entry = {
308
+ "action": action,
309
+ "connector": connector_slug(connector),
310
+ "log_key": log_key,
311
+ "target": target,
312
+ }
313
+ triggered_followups.setdefault(serial, []).append(entry)
314
+
315
+
316
+ def consume_triggered_followup(
317
+ serial: str, action: str, connector: int | str | None = None
318
+ ) -> dict[str, object] | None:
319
+ """Return metadata for a previously registered follow-up message."""
320
+
321
+ entries = triggered_followups.get(serial)
322
+ if not entries:
323
+ return None
324
+ connector_slug_value = connector_slug(connector)
325
+ for index, entry in enumerate(entries):
326
+ if entry.get("action") != action:
327
+ continue
328
+ expected_slug = entry.get("connector")
329
+ if expected_slug == AGGREGATE_SLUG:
330
+ matched = True
331
+ else:
332
+ matched = connector_slug_value == expected_slug
333
+ if not matched:
334
+ continue
335
+ result = entries.pop(index)
336
+ if not entries:
337
+ triggered_followups.pop(serial, None)
338
+ return result
339
+ return None
340
+
341
+
342
+ def clear_pending_calls(serial: str) -> None:
343
+ """Remove any pending calls associated with the provided charger id."""
344
+
345
+ with _pending_call_lock:
346
+ to_remove = [
347
+ key
348
+ for key, value in pending_calls.items()
349
+ if value.get("charger_id") == serial
350
+ ]
351
+ for key in to_remove:
352
+ pending_calls.pop(key, None)
353
+ _pending_call_events.pop(key, None)
354
+ _pending_call_results.pop(key, None)
355
+ triggered_followups.pop(serial, None)
356
+
357
+
358
+ def reassign_identity(old_key: str, new_key: str) -> str:
359
+ """Move any stored data from ``old_key`` to ``new_key``."""
360
+
361
+ if old_key == new_key:
362
+ return new_key
363
+ if not old_key:
364
+ return new_key
365
+ for mapping in (connections, transactions, history):
366
+ if old_key in mapping:
367
+ mapping[new_key] = mapping.pop(old_key)
368
+ for log_type in logs:
369
+ store = logs[log_type]
370
+ if old_key in store:
371
+ store[new_key] = store.pop(old_key)
372
+ for log_type in log_names:
373
+ names = log_names[log_type]
374
+ if old_key in names:
375
+ names[new_key] = names.pop(old_key)
376
+ return new_key
377
+
378
+
379
+ async def _touch_lock() -> None:
380
+ try:
381
+ while True:
382
+ SESSION_LOCK.touch()
383
+ await asyncio.sleep(60)
384
+ except asyncio.CancelledError:
385
+ pass
386
+
387
+
388
+ def start_session_lock() -> None:
389
+ global _lock_task
390
+ SESSION_LOCK.touch()
391
+ loop = asyncio.get_event_loop()
392
+ if _lock_task is None or _lock_task.done():
393
+ _lock_task = loop.create_task(_touch_lock())
394
+
395
+
396
+ def stop_session_lock() -> None:
397
+ global _lock_task
398
+ if _lock_task:
399
+ _lock_task.cancel()
400
+ _lock_task = None
401
+ if SESSION_LOCK.exists():
402
+ SESSION_LOCK.unlink()
403
+
404
+
405
+ def register_log_name(cid: str, name: str, log_type: str = "charger") -> None:
406
+ """Register a friendly name for the id used in log files."""
407
+
408
+ names = log_names[log_type]
409
+ # Ensure lookups are case-insensitive by overwriting any existing entry
410
+ # that matches the provided cid regardless of case.
411
+ for key in list(names.keys()):
412
+ if key.lower() == cid.lower():
413
+ cid = key
414
+ break
415
+ names[cid] = name
416
+
417
+
418
+ def _safe_name(name: str) -> str:
419
+ return re.sub(r"[^\w.-]", "_", name)
420
+
421
+
422
+ def _file_path(cid: str, log_type: str = "charger") -> Path:
423
+ name = log_names[log_type].get(cid, cid)
424
+ return LOG_DIR / f"{log_type}.{_safe_name(name)}.log"
425
+
426
+
427
+ def add_log(cid: str, entry: str, log_type: str = "charger") -> None:
428
+ """Append a timestamped log entry for the given id and log type."""
429
+
430
+ timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
431
+ entry = f"{timestamp} {entry}"
432
+
433
+ store = logs[log_type]
434
+ # Store log entries under the cid as provided but allow retrieval using
435
+ # any casing by recording entries in a case-insensitive manner.
436
+ key = next((k for k in store.keys() if k.lower() == cid.lower()), cid)
437
+ store.setdefault(key, []).append(entry)
438
+ path = _file_path(key, log_type)
439
+ with path.open("a", encoding="utf-8") as handle:
440
+ handle.write(entry + "\n")
441
+
442
+
443
+ def _session_folder(cid: str) -> Path:
444
+ """Return the folder path for session logs for the given charger."""
445
+
446
+ name = log_names["charger"].get(cid, cid)
447
+ folder = SESSION_DIR / _safe_name(name)
448
+ folder.mkdir(parents=True, exist_ok=True)
449
+ return folder
450
+
451
+
452
+ def start_session_log(cid: str, tx_id: int) -> None:
453
+ """Begin logging a session for the given charger and transaction id."""
454
+
455
+ history[cid] = {
456
+ "transaction": tx_id,
457
+ "start": datetime.now(timezone.utc),
458
+ "messages": [],
459
+ }
460
+
461
+
462
+ def add_session_message(cid: str, message: str) -> None:
463
+ """Record a raw message for the current session if one is active."""
464
+
465
+ sess = history.get(cid)
466
+ if not sess:
467
+ return
468
+ sess["messages"].append(
469
+ {
470
+ "timestamp": datetime.now(timezone.utc)
471
+ .isoformat()
472
+ .replace("+00:00", "Z"),
473
+ "message": message,
474
+ }
475
+ )
476
+
477
+
478
+ def end_session_log(cid: str) -> None:
479
+ """Write any recorded session log to disk for the given charger."""
480
+
481
+ sess = history.pop(cid, None)
482
+ if not sess:
483
+ return
484
+ folder = _session_folder(cid)
485
+ date = sess["start"].strftime("%Y%m%d")
486
+ tx_id = sess.get("transaction")
487
+ filename = f"{date}_{tx_id}.json"
488
+ path = folder / filename
489
+ with path.open("w", encoding="utf-8") as handle:
490
+ json.dump(sess["messages"], handle, ensure_ascii=False, indent=2)
491
+
492
+
493
+ def _log_key_candidates(cid: str, log_type: str) -> list[str]:
494
+ """Return log identifiers to inspect for the requested cid."""
495
+
496
+ if IDENTITY_SEPARATOR not in cid:
497
+ return [cid]
498
+ serial, slug = cid.split(IDENTITY_SEPARATOR, 1)
499
+ slug = slug or AGGREGATE_SLUG
500
+ if slug != AGGREGATE_SLUG:
501
+ return [cid]
502
+ keys: list[str] = [identity_key(serial, None)]
503
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
504
+ for source in (log_names[log_type], logs[log_type]):
505
+ for key in source.keys():
506
+ if key.startswith(prefix) and key not in keys:
507
+ keys.append(key)
508
+ return keys
509
+
510
+
511
+ def _resolve_log_identifier(cid: str, log_type: str) -> tuple[str, str | None]:
512
+ """Return the canonical key and friendly name for ``cid``."""
513
+
514
+ names = log_names[log_type]
515
+ name = names.get(cid)
516
+ if name is None:
517
+ lower = cid.lower()
518
+ for key, value in names.items():
519
+ if key.lower() == lower:
520
+ cid = key
521
+ name = value
522
+ break
523
+ else:
524
+ try:
525
+ if log_type == "simulator":
526
+ from .models import Simulator
527
+
528
+ sim = Simulator.objects.filter(cp_path__iexact=cid).first()
529
+ if sim:
530
+ cid = sim.cp_path
531
+ name = sim.name
532
+ names[cid] = name
533
+ else:
534
+ from .models import Charger
535
+
536
+ serial = cid.split(IDENTITY_SEPARATOR, 1)[0]
537
+ ch = Charger.objects.filter(charger_id__iexact=serial).first()
538
+ if ch and ch.name:
539
+ name = ch.name
540
+ names[cid] = name
541
+ except Exception: # pragma: no cover - best effort lookup
542
+ pass
543
+ return cid, name
544
+
545
+
546
+ def _log_file_for_identifier(cid: str, name: str | None, log_type: str) -> Path:
547
+ path = _file_path(cid, log_type)
548
+ if not path.exists():
549
+ target = f"{log_type}.{_safe_name(name or cid).lower()}"
550
+ for file in LOG_DIR.glob(f"{log_type}.*.log"):
551
+ if file.stem.lower() == target:
552
+ path = file
553
+ break
554
+ return path
555
+
556
+
557
+ def _memory_logs_for_identifier(cid: str, log_type: str) -> list[str]:
558
+ store = logs[log_type]
559
+ lower = cid.lower()
560
+ for key, entries in store.items():
561
+ if key.lower() == lower:
562
+ return entries
563
+ return []
564
+
565
+
566
+ def get_logs(cid: str, log_type: str = "charger") -> list[str]:
567
+ """Return all log entries for the given id and type."""
568
+
569
+ entries: list[str] = []
570
+ seen_paths: set[Path] = set()
571
+ seen_keys: set[str] = set()
572
+ for key in _log_key_candidates(cid, log_type):
573
+ resolved, name = _resolve_log_identifier(key, log_type)
574
+ path = _log_file_for_identifier(resolved, name, log_type)
575
+ if path.exists() and path not in seen_paths:
576
+ entries.extend(path.read_text(encoding="utf-8").splitlines())
577
+ seen_paths.add(path)
578
+ memory_entries = _memory_logs_for_identifier(resolved, log_type)
579
+ lower_key = resolved.lower()
580
+ if memory_entries and lower_key not in seen_keys:
581
+ entries.extend(memory_entries)
582
+ seen_keys.add(lower_key)
583
+ return entries
584
+
585
+
586
+ def clear_log(cid: str, log_type: str = "charger") -> None:
587
+ """Remove any stored logs for the given id and type."""
588
+ for key in _log_key_candidates(cid, log_type):
589
+ store_map = logs[log_type]
590
+ resolved = next(
591
+ (k for k in list(store_map.keys()) if k.lower() == key.lower()),
592
+ key,
593
+ )
594
+ store_map.pop(resolved, None)
595
+ path = _file_path(resolved, log_type)
596
+ if not path.exists():
597
+ target = f"{log_type}.{_safe_name(log_names[log_type].get(resolved, resolved)).lower()}"
598
+ for file in LOG_DIR.glob(f"{log_type}.*.log"):
599
+ if file.stem.lower() == target:
600
+ path = file
601
+ break
602
+ if path.exists():
603
+ path.unlink()