arthexis 0.1.7__py3-none-any.whl → 0.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (82) hide show
  1. arthexis-0.1.9.dist-info/METADATA +168 -0
  2. arthexis-0.1.9.dist-info/RECORD +92 -0
  3. arthexis-0.1.9.dist-info/licenses/LICENSE +674 -0
  4. config/__init__.py +0 -1
  5. config/auth_app.py +0 -1
  6. config/celery.py +1 -2
  7. config/context_processors.py +1 -1
  8. config/offline.py +2 -0
  9. config/settings.py +134 -16
  10. config/urls.py +71 -3
  11. core/admin.py +1331 -165
  12. core/admin_history.py +50 -0
  13. core/admindocs.py +151 -0
  14. core/apps.py +158 -3
  15. core/backends.py +46 -4
  16. core/entity.py +62 -48
  17. core/fields.py +6 -1
  18. core/github_helper.py +25 -0
  19. core/github_issues.py +172 -0
  20. core/lcd_screen.py +1 -0
  21. core/liveupdate.py +25 -0
  22. core/log_paths.py +100 -0
  23. core/mailer.py +83 -0
  24. core/middleware.py +57 -0
  25. core/models.py +1136 -259
  26. core/notifications.py +11 -1
  27. core/public_wifi.py +227 -0
  28. core/release.py +27 -20
  29. core/sigil_builder.py +131 -0
  30. core/sigil_context.py +20 -0
  31. core/sigil_resolver.py +284 -0
  32. core/system.py +129 -10
  33. core/tasks.py +118 -19
  34. core/test_system_info.py +22 -0
  35. core/tests.py +445 -58
  36. core/tests_liveupdate.py +17 -0
  37. core/urls.py +2 -2
  38. core/user_data.py +329 -167
  39. core/views.py +383 -57
  40. core/widgets.py +51 -0
  41. core/workgroup_urls.py +17 -0
  42. core/workgroup_views.py +94 -0
  43. nodes/actions.py +0 -2
  44. nodes/admin.py +159 -284
  45. nodes/apps.py +9 -15
  46. nodes/backends.py +53 -0
  47. nodes/lcd.py +24 -10
  48. nodes/models.py +375 -178
  49. nodes/tasks.py +1 -5
  50. nodes/tests.py +524 -129
  51. nodes/utils.py +13 -2
  52. nodes/views.py +66 -23
  53. ocpp/admin.py +150 -61
  54. ocpp/apps.py +4 -3
  55. ocpp/consumers.py +432 -69
  56. ocpp/evcs.py +25 -8
  57. ocpp/models.py +408 -68
  58. ocpp/simulator.py +13 -6
  59. ocpp/store.py +258 -30
  60. ocpp/tasks.py +11 -7
  61. ocpp/test_export_import.py +8 -7
  62. ocpp/test_rfid.py +211 -16
  63. ocpp/tests.py +1198 -135
  64. ocpp/transactions_io.py +68 -22
  65. ocpp/urls.py +35 -2
  66. ocpp/views.py +654 -101
  67. pages/admin.py +173 -13
  68. pages/checks.py +0 -1
  69. pages/context_processors.py +19 -6
  70. pages/middleware.py +153 -0
  71. pages/models.py +37 -9
  72. pages/tests.py +759 -40
  73. pages/urls.py +3 -0
  74. pages/utils.py +0 -1
  75. pages/views.py +576 -25
  76. arthexis-0.1.7.dist-info/METADATA +0 -126
  77. arthexis-0.1.7.dist-info/RECORD +0 -77
  78. arthexis-0.1.7.dist-info/licenses/LICENSE +0 -21
  79. config/workgroup_app.py +0 -7
  80. core/checks.py +0 -29
  81. {arthexis-0.1.7.dist-info → arthexis-0.1.9.dist-info}/WHEEL +0 -0
  82. {arthexis-0.1.7.dist-info → arthexis-0.1.9.dist-info}/top_level.txt +0 -0
ocpp/store.py CHANGED
@@ -6,9 +6,16 @@ from pathlib import Path
6
6
  from datetime import datetime
7
7
  import json
8
8
  import re
9
+ import asyncio
9
10
 
10
- connections = {}
11
- transactions = {}
11
+ from core.log_paths import select_log_dir
12
+
13
+ IDENTITY_SEPARATOR = "#"
14
+ AGGREGATE_SLUG = "all"
15
+ PENDING_SLUG = "pending"
16
+
17
+ connections: dict[str, object] = {}
18
+ transactions: dict[str, object] = {}
12
19
  logs: dict[str, dict[str, list[str]]] = {"charger": {}, "simulator": {}}
13
20
  # store per charger session logs before they are flushed to disk
14
21
  history: dict[str, dict[str, object]] = {}
@@ -17,10 +24,184 @@ simulators = {}
17
24
  # mapping of charger id / cp_path to friendly names used for log files
18
25
  log_names: dict[str, dict[str, str]] = {"charger": {}, "simulator": {}}
19
26
 
20
- LOG_DIR = Path(__file__).resolve().parent.parent / "logs"
21
- LOG_DIR.mkdir(exist_ok=True)
27
+ BASE_DIR = Path(__file__).resolve().parent.parent
28
+ LOG_DIR = select_log_dir(BASE_DIR)
22
29
  SESSION_DIR = LOG_DIR / "sessions"
23
30
  SESSION_DIR.mkdir(exist_ok=True)
31
+ LOCK_DIR = BASE_DIR / "locks"
32
+ LOCK_DIR.mkdir(exist_ok=True)
33
+ SESSION_LOCK = LOCK_DIR / "charging.lck"
34
+ _lock_task: asyncio.Task | None = None
35
+
36
+
37
+ def connector_slug(value: int | str | None) -> str:
38
+ """Return the canonical slug for a connector value."""
39
+
40
+ if value in (None, "", AGGREGATE_SLUG):
41
+ return AGGREGATE_SLUG
42
+ try:
43
+ return str(int(value))
44
+ except (TypeError, ValueError):
45
+ return str(value)
46
+
47
+
48
+ def identity_key(serial: str, connector: int | str | None) -> str:
49
+ """Return the identity key used for in-memory store lookups."""
50
+
51
+ return f"{serial}{IDENTITY_SEPARATOR}{connector_slug(connector)}"
52
+
53
+
54
+ def pending_key(serial: str) -> str:
55
+ """Return the key used before a connector id has been negotiated."""
56
+
57
+ return f"{serial}{IDENTITY_SEPARATOR}{PENDING_SLUG}"
58
+
59
+
60
+ def _candidate_keys(serial: str, connector: int | str | None) -> list[str]:
61
+ """Return possible keys for lookups with fallbacks."""
62
+
63
+ keys: list[str] = []
64
+ if connector not in (None, "", AGGREGATE_SLUG):
65
+ keys.append(identity_key(serial, connector))
66
+ else:
67
+ keys.append(identity_key(serial, None))
68
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
69
+ for key in connections.keys():
70
+ if key.startswith(prefix) and key not in keys:
71
+ keys.append(key)
72
+ keys.append(pending_key(serial))
73
+ keys.append(serial)
74
+ seen: set[str] = set()
75
+ result: list[str] = []
76
+ for key in keys:
77
+ if key and key not in seen:
78
+ seen.add(key)
79
+ result.append(key)
80
+ return result
81
+
82
+
83
+ def iter_identity_keys(serial: str) -> list[str]:
84
+ """Return all known keys for the provided serial."""
85
+
86
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
87
+ keys = [key for key in connections.keys() if key.startswith(prefix)]
88
+ if serial in connections:
89
+ keys.append(serial)
90
+ return keys
91
+
92
+
93
+ def is_connected(serial: str, connector: int | str | None = None) -> bool:
94
+ """Return whether a connection exists for the provided charger identity."""
95
+
96
+ if connector in (None, "", AGGREGATE_SLUG):
97
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
98
+ return (
99
+ any(key.startswith(prefix) for key in connections) or serial in connections
100
+ )
101
+ return any(key in connections for key in _candidate_keys(serial, connector))
102
+
103
+
104
+ def get_connection(serial: str, connector: int | str | None = None):
105
+ """Return the websocket consumer for the requested identity, if any."""
106
+
107
+ for key in _candidate_keys(serial, connector):
108
+ conn = connections.get(key)
109
+ if conn is not None:
110
+ return conn
111
+ return None
112
+
113
+
114
+ def set_connection(serial: str, connector: int | str | None, consumer) -> str:
115
+ """Store a websocket consumer under the negotiated identity."""
116
+
117
+ key = identity_key(serial, connector)
118
+ connections[key] = consumer
119
+ return key
120
+
121
+
122
+ def pop_connection(serial: str, connector: int | str | None = None):
123
+ """Remove a stored connection for the given identity."""
124
+
125
+ for key in _candidate_keys(serial, connector):
126
+ conn = connections.pop(key, None)
127
+ if conn is not None:
128
+ return conn
129
+ return None
130
+
131
+
132
+ def get_transaction(serial: str, connector: int | str | None = None):
133
+ """Return the active transaction for the provided identity."""
134
+
135
+ for key in _candidate_keys(serial, connector):
136
+ tx = transactions.get(key)
137
+ if tx is not None:
138
+ return tx
139
+ return None
140
+
141
+
142
+ def set_transaction(serial: str, connector: int | str | None, tx) -> str:
143
+ """Store an active transaction under the provided identity."""
144
+
145
+ key = identity_key(serial, connector)
146
+ transactions[key] = tx
147
+ return key
148
+
149
+
150
+ def pop_transaction(serial: str, connector: int | str | None = None):
151
+ """Remove and return an active transaction for the identity."""
152
+
153
+ for key in _candidate_keys(serial, connector):
154
+ tx = transactions.pop(key, None)
155
+ if tx is not None:
156
+ return tx
157
+ return None
158
+
159
+
160
+ def reassign_identity(old_key: str, new_key: str) -> str:
161
+ """Move any stored data from ``old_key`` to ``new_key``."""
162
+
163
+ if old_key == new_key:
164
+ return new_key
165
+ if not old_key:
166
+ return new_key
167
+ for mapping in (connections, transactions, history):
168
+ if old_key in mapping:
169
+ mapping[new_key] = mapping.pop(old_key)
170
+ for log_type in logs:
171
+ store = logs[log_type]
172
+ if old_key in store:
173
+ store[new_key] = store.pop(old_key)
174
+ for log_type in log_names:
175
+ names = log_names[log_type]
176
+ if old_key in names:
177
+ names[new_key] = names.pop(old_key)
178
+ return new_key
179
+
180
+
181
+ async def _touch_lock() -> None:
182
+ try:
183
+ while True:
184
+ SESSION_LOCK.touch()
185
+ await asyncio.sleep(60)
186
+ except asyncio.CancelledError:
187
+ pass
188
+
189
+
190
+ def start_session_lock() -> None:
191
+ global _lock_task
192
+ SESSION_LOCK.touch()
193
+ loop = asyncio.get_event_loop()
194
+ if _lock_task is None or _lock_task.done():
195
+ _lock_task = loop.create_task(_touch_lock())
196
+
197
+
198
+ def stop_session_lock() -> None:
199
+ global _lock_task
200
+ if _lock_task:
201
+ _lock_task.cancel()
202
+ _lock_task = None
203
+ if SESSION_LOCK.exists():
204
+ SESSION_LOCK.unlink()
24
205
 
25
206
 
26
207
  def register_log_name(cid: str, name: str, log_type: str = "charger") -> None:
@@ -86,10 +267,12 @@ def add_session_message(cid: str, message: str) -> None:
86
267
  sess = history.get(cid)
87
268
  if not sess:
88
269
  return
89
- sess["messages"].append({
90
- "timestamp": datetime.utcnow().isoformat() + "Z",
91
- "message": message,
92
- })
270
+ sess["messages"].append(
271
+ {
272
+ "timestamp": datetime.utcnow().isoformat() + "Z",
273
+ "message": message,
274
+ }
275
+ )
93
276
 
94
277
 
95
278
  def end_session_log(cid: str) -> None:
@@ -107,15 +290,33 @@ def end_session_log(cid: str) -> None:
107
290
  json.dump(sess["messages"], handle, ensure_ascii=False, indent=2)
108
291
 
109
292
 
110
- def get_logs(cid: str, log_type: str = "charger") -> list[str]:
111
- """Return all log entries for the given id and type."""
293
+ def _log_key_candidates(cid: str, log_type: str) -> list[str]:
294
+ """Return log identifiers to inspect for the requested cid."""
295
+
296
+ if IDENTITY_SEPARATOR not in cid:
297
+ return [cid]
298
+ serial, slug = cid.split(IDENTITY_SEPARATOR, 1)
299
+ slug = slug or AGGREGATE_SLUG
300
+ if slug != AGGREGATE_SLUG:
301
+ return [cid]
302
+ keys: list[str] = [identity_key(serial, None)]
303
+ prefix = f"{serial}{IDENTITY_SEPARATOR}"
304
+ for source in (log_names[log_type], logs[log_type]):
305
+ for key in source.keys():
306
+ if key.startswith(prefix) and key not in keys:
307
+ keys.append(key)
308
+ return keys
309
+
310
+
311
+ def _resolve_log_identifier(cid: str, log_type: str) -> tuple[str, str | None]:
312
+ """Return the canonical key and friendly name for ``cid``."""
112
313
 
113
314
  names = log_names[log_type]
114
- # Try to find a matching log name case-insensitively
115
315
  name = names.get(cid)
116
316
  if name is None:
317
+ lower = cid.lower()
117
318
  for key, value in names.items():
118
- if key.lower() == cid.lower():
319
+ if key.lower() == lower:
119
320
  cid = key
120
321
  name = value
121
322
  break
@@ -132,14 +333,17 @@ def get_logs(cid: str, log_type: str = "charger") -> list[str]:
132
333
  else:
133
334
  from .models import Charger
134
335
 
135
- ch = Charger.objects.filter(charger_id__iexact=cid).first()
336
+ serial = cid.split(IDENTITY_SEPARATOR, 1)[0]
337
+ ch = Charger.objects.filter(charger_id__iexact=serial).first()
136
338
  if ch and ch.name:
137
- cid = ch.charger_id
138
339
  name = ch.name
139
340
  names[cid] = name
140
341
  except Exception: # pragma: no cover - best effort lookup
141
342
  pass
343
+ return cid, name
344
+
142
345
 
346
+ def _log_file_for_identifier(cid: str, name: str | None, log_type: str) -> Path:
143
347
  path = _file_path(cid, log_type)
144
348
  if not path.exists():
145
349
  target = f"{log_type}.{_safe_name(name or cid).lower()}"
@@ -147,29 +351,53 @@ def get_logs(cid: str, log_type: str = "charger") -> list[str]:
147
351
  if file.stem.lower() == target:
148
352
  path = file
149
353
  break
354
+ return path
150
355
 
151
- if path.exists():
152
- return path.read_text(encoding="utf-8").splitlines()
153
356
 
357
+ def _memory_logs_for_identifier(cid: str, log_type: str) -> list[str]:
154
358
  store = logs[log_type]
359
+ lower = cid.lower()
155
360
  for key, entries in store.items():
156
- if key.lower() == cid.lower():
361
+ if key.lower() == lower:
157
362
  return entries
158
363
  return []
159
364
 
160
365
 
366
+ def get_logs(cid: str, log_type: str = "charger") -> list[str]:
367
+ """Return all log entries for the given id and type."""
368
+
369
+ entries: list[str] = []
370
+ seen_paths: set[Path] = set()
371
+ seen_keys: set[str] = set()
372
+ for key in _log_key_candidates(cid, log_type):
373
+ resolved, name = _resolve_log_identifier(key, log_type)
374
+ path = _log_file_for_identifier(resolved, name, log_type)
375
+ if path.exists() and path not in seen_paths:
376
+ entries.extend(path.read_text(encoding="utf-8").splitlines())
377
+ seen_paths.add(path)
378
+ memory_entries = _memory_logs_for_identifier(resolved, log_type)
379
+ lower_key = resolved.lower()
380
+ if memory_entries and lower_key not in seen_keys:
381
+ entries.extend(memory_entries)
382
+ seen_keys.add(lower_key)
383
+ return entries
384
+
385
+
161
386
  def clear_log(cid: str, log_type: str = "charger") -> None:
162
387
  """Remove any stored logs for the given id and type."""
163
-
164
- store = logs[log_type]
165
- key = next((k for k in list(store.keys()) if k.lower() == cid.lower()), cid)
166
- store.pop(key, None)
167
- path = _file_path(key, log_type)
168
- if not path.exists():
169
- target = f"{log_type}.{_safe_name(log_names[log_type].get(key, key)).lower()}"
170
- for file in LOG_DIR.glob(f"{log_type}.*.log"):
171
- if file.stem.lower() == target:
172
- path = file
173
- break
174
- if path.exists():
175
- path.unlink()
388
+ for key in _log_key_candidates(cid, log_type):
389
+ store_map = logs[log_type]
390
+ resolved = next(
391
+ (k for k in list(store_map.keys()) if k.lower() == key.lower()),
392
+ key,
393
+ )
394
+ store_map.pop(resolved, None)
395
+ path = _file_path(resolved, log_type)
396
+ if not path.exists():
397
+ target = f"{log_type}.{_safe_name(log_names[log_type].get(resolved, resolved)).lower()}"
398
+ for file in LOG_DIR.glob(f"{log_type}.*.log"):
399
+ if file.stem.lower() == target:
400
+ path = file
401
+ break
402
+ if path.exists():
403
+ path.unlink()
ocpp/tasks.py CHANGED
@@ -5,23 +5,27 @@ from celery import shared_task
5
5
  from django.utils import timezone
6
6
  from django.db.models import Q
7
7
 
8
- from .models import MeterReading
8
+ from .models import MeterValue
9
9
 
10
10
  logger = logging.getLogger(__name__)
11
11
 
12
12
 
13
13
  @shared_task
14
- def purge_meter_readings() -> int:
15
- """Delete meter readings older than 7 days.
14
+ def purge_meter_values() -> int:
15
+ """Delete meter values older than 7 days.
16
16
 
17
- Readings tied to transactions without a recorded meter_stop are preserved so
17
+ Values tied to transactions without a recorded meter_stop are preserved so
18
18
  that ongoing or incomplete sessions retain their energy data.
19
- Returns the number of deleted readings.
19
+ Returns the number of deleted rows.
20
20
  """
21
21
  cutoff = timezone.now() - timedelta(days=7)
22
- qs = MeterReading.objects.filter(timestamp__lt=cutoff).filter(
22
+ qs = MeterValue.objects.filter(timestamp__lt=cutoff).filter(
23
23
  Q(transaction__isnull=True) | Q(transaction__meter_stop__isnull=False)
24
24
  )
25
25
  deleted, _ = qs.delete()
26
- logger.info("Purged %s meter readings", deleted)
26
+ logger.info("Purged %s meter values", deleted)
27
27
  return deleted
28
+
29
+
30
+ # Backwards compatibility alias
31
+ purge_meter_readings = purge_meter_values
@@ -10,7 +10,7 @@ from django.utils import timezone
10
10
  from django.urls import reverse
11
11
  from django.contrib.auth import get_user_model
12
12
 
13
- from ocpp.models import Charger, Transaction, MeterReading
13
+ from ocpp.models import Charger, Transaction, MeterValue
14
14
  from core.models import EnergyAccount
15
15
 
16
16
 
@@ -29,12 +29,11 @@ class TransactionExportImportTests(TestCase):
29
29
  charger=self.ch2,
30
30
  start_time=now,
31
31
  )
32
- MeterReading.objects.create(
32
+ MeterValue.objects.create(
33
33
  charger=self.ch1,
34
34
  transaction=self.tx_old,
35
35
  timestamp=now - timedelta(days=5),
36
- value=1,
37
- unit="kW",
36
+ energy=1,
38
37
  )
39
38
 
40
39
  def test_export_filters_and_import_creates_chargers(self):
@@ -55,7 +54,7 @@ class TransactionExportImportTests(TestCase):
55
54
  self.assertEqual(len(data["transactions"]), 1)
56
55
  self.assertEqual(data["transactions"][0]["charger"], "C2")
57
56
 
58
- MeterReading.objects.all().delete()
57
+ MeterValue.objects.all().delete()
59
58
  Transaction.objects.all().delete()
60
59
  Charger.objects.all().delete()
61
60
 
@@ -117,7 +116,7 @@ class TransactionAdminExportImportTests(TestCase):
117
116
  "meter_stop": 0,
118
117
  "start_time": timezone.now().isoformat(),
119
118
  "stop_time": None,
120
- "meter_readings": [],
119
+ "meter_values": [],
121
120
  }
122
121
  ],
123
122
  }
@@ -126,4 +125,6 @@ class TransactionAdminExportImportTests(TestCase):
126
125
  response = self.client.post(url, {"file": json_file})
127
126
  self.assertEqual(response.status_code, 302)
128
127
  self.assertTrue(Charger.objects.filter(charger_id="C9").exists())
129
- self.assertEqual(Transaction.objects.filter(charger__charger_id="C9").count(), 1)
128
+ self.assertEqual(
129
+ Transaction.objects.filter(charger__charger_id="C9").count(), 1
130
+ )