arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (112) hide show
  1. arthexis-0.1.26.dist-info/METADATA +272 -0
  2. arthexis-0.1.26.dist-info/RECORD +111 -0
  3. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +29 -29
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -25
  9. config/context_processors.py +67 -68
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +71 -25
  14. config/offline.py +49 -49
  15. config/settings.py +676 -492
  16. config/settings_helpers.py +109 -0
  17. config/urls.py +228 -159
  18. config/wsgi.py +17 -17
  19. core/admin.py +4052 -2066
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +192 -151
  22. core/apps.py +350 -223
  23. core/auto_upgrade.py +72 -0
  24. core/backends.py +311 -124
  25. core/changelog.py +403 -0
  26. core/entity.py +149 -133
  27. core/environment.py +60 -43
  28. core/fields.py +168 -75
  29. core/form_fields.py +75 -0
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +183 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +114 -100
  36. core/mailer.py +89 -83
  37. core/middleware.py +91 -91
  38. core/models.py +5041 -2195
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +107 -0
  42. core/release.py +940 -346
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -131
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +250 -284
  47. core/system.py +1425 -230
  48. core/tasks.py +538 -199
  49. core/temp_passwords.py +181 -0
  50. core/test_system_info.py +202 -43
  51. core/tests.py +2673 -1069
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +681 -495
  55. core/views.py +2484 -789
  56. core/widgets.py +213 -51
  57. nodes/admin.py +2236 -445
  58. nodes/apps.py +98 -70
  59. nodes/backends.py +160 -53
  60. nodes/dns.py +203 -0
  61. nodes/feature_checks.py +133 -0
  62. nodes/lcd.py +165 -165
  63. nodes/models.py +2375 -870
  64. nodes/reports.py +411 -0
  65. nodes/rfid_sync.py +210 -0
  66. nodes/signals.py +18 -0
  67. nodes/tasks.py +141 -46
  68. nodes/tests.py +5045 -1489
  69. nodes/urls.py +29 -13
  70. nodes/utils.py +172 -73
  71. nodes/views.py +1768 -304
  72. ocpp/admin.py +1775 -481
  73. ocpp/apps.py +25 -25
  74. ocpp/consumers.py +1843 -630
  75. ocpp/evcs.py +844 -928
  76. ocpp/evcs_discovery.py +158 -0
  77. ocpp/models.py +1417 -640
  78. ocpp/network.py +398 -0
  79. ocpp/reference_utils.py +42 -0
  80. ocpp/routing.py +11 -9
  81. ocpp/simulator.py +745 -368
  82. ocpp/status_display.py +26 -0
  83. ocpp/store.py +603 -403
  84. ocpp/tasks.py +479 -31
  85. ocpp/test_export_import.py +131 -130
  86. ocpp/test_rfid.py +1072 -540
  87. ocpp/tests.py +5494 -2296
  88. ocpp/transactions_io.py +197 -165
  89. ocpp/urls.py +50 -50
  90. ocpp/views.py +2024 -912
  91. pages/admin.py +1123 -396
  92. pages/apps.py +45 -10
  93. pages/checks.py +40 -40
  94. pages/context_processors.py +151 -85
  95. pages/defaults.py +13 -0
  96. pages/forms.py +221 -0
  97. pages/middleware.py +213 -153
  98. pages/models.py +720 -252
  99. pages/module_defaults.py +156 -0
  100. pages/site_config.py +137 -0
  101. pages/tasks.py +74 -0
  102. pages/tests.py +4009 -1389
  103. pages/urls.py +38 -20
  104. pages/utils.py +93 -12
  105. pages/views.py +1736 -762
  106. arthexis-0.1.9.dist-info/METADATA +0 -168
  107. arthexis-0.1.9.dist-info/RECORD +0 -92
  108. core/workgroup_urls.py +0 -17
  109. core/workgroup_views.py +0 -94
  110. nodes/actions.py +0 -70
  111. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
  112. {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
core/apps.py CHANGED
@@ -1,223 +1,350 @@
1
- import logging
2
-
3
- from django.apps import AppConfig
4
- from django.utils.translation import gettext_lazy as _
5
-
6
-
7
- logger = logging.getLogger(__name__)
8
-
9
-
10
- class CoreConfig(AppConfig):
11
- default_auto_field = "django.db.models.BigAutoField"
12
- name = "core"
13
- verbose_name = _("2. Business")
14
-
15
- def ready(self): # pragma: no cover - called by Django
16
- from contextlib import suppress
17
- from functools import wraps
18
- import hashlib
19
- import time
20
- import traceback
21
- from pathlib import Path
22
-
23
- from django.conf import settings
24
- from django.contrib.auth import get_user_model
25
- from django.db.models.signals import post_migrate
26
- from django.core.signals import got_request_exception
27
-
28
- from core.github_helper import report_exception_to_github
29
- from .entity import Entity
30
- from .user_data import (
31
- patch_admin_user_datum,
32
- patch_admin_user_data_views,
33
- )
34
- from .system import patch_admin_system_view
35
- from .environment import patch_admin_environment_view
36
- from .sigil_builder import (
37
- patch_admin_sigil_builder_view,
38
- generate_model_sigils,
39
- )
40
- from .admin_history import patch_admin_history
41
-
42
- def create_default_arthexis(**kwargs):
43
- User = get_user_model()
44
- if not User.all_objects.exists():
45
- User.all_objects.create_superuser(
46
- pk=1,
47
- username="arthexis",
48
- email="arthexis@gmail.com",
49
- password="arthexis",
50
- )
51
-
52
- post_migrate.connect(create_default_arthexis, sender=self)
53
- post_migrate.connect(generate_model_sigils, sender=self)
54
- patch_admin_user_datum()
55
- patch_admin_user_data_views()
56
- patch_admin_system_view()
57
- patch_admin_environment_view()
58
- patch_admin_sigil_builder_view()
59
- patch_admin_history()
60
-
61
- from django.core.serializers import base as serializer_base
62
-
63
- if not hasattr(
64
- serializer_base.DeserializedObject.save, "_entity_fixture_patch"
65
- ):
66
- original_save = serializer_base.DeserializedObject.save
67
-
68
- @wraps(original_save)
69
- def patched_save(self, save_m2m=True, using=None, **kwargs):
70
- obj = self.object
71
- if isinstance(obj, Entity):
72
- manager = getattr(
73
- type(obj), "all_objects", type(obj)._default_manager
74
- )
75
- if using:
76
- manager = manager.db_manager(using)
77
- for fields in obj._unique_field_groups():
78
- lookup = {}
79
- for field in fields:
80
- value = getattr(obj, field.attname)
81
- if value is None:
82
- lookup = {}
83
- break
84
- lookup[field.attname] = value
85
- if not lookup:
86
- continue
87
- existing = (
88
- manager.filter(**lookup)
89
- .only("pk", "is_seed_data", "is_user_data")
90
- .first()
91
- )
92
- if existing is not None:
93
- obj.pk = existing.pk
94
- obj.is_seed_data = existing.is_seed_data
95
- obj.is_user_data = existing.is_user_data
96
- obj._state.adding = False
97
- if using:
98
- obj._state.db = using
99
- break
100
- return original_save(self, save_m2m=save_m2m, using=using, **kwargs)
101
-
102
- patched_save._entity_fixture_patch = True
103
- serializer_base.DeserializedObject.save = patched_save
104
-
105
- lock = Path(settings.BASE_DIR) / "locks" / "celery.lck"
106
-
107
- if lock.exists():
108
-
109
- def ensure_email_collector_task(**kwargs):
110
- try: # pragma: no cover - optional dependency
111
- from django_celery_beat.models import (
112
- IntervalSchedule,
113
- PeriodicTask,
114
- )
115
- from django.db.utils import OperationalError, ProgrammingError
116
- except Exception: # pragma: no cover - tables or module not ready
117
- return
118
-
119
- try:
120
- schedule, _ = IntervalSchedule.objects.get_or_create(
121
- every=1, period=IntervalSchedule.HOURS
122
- )
123
- PeriodicTask.objects.get_or_create(
124
- name="poll_email_collectors",
125
- defaults={
126
- "interval": schedule,
127
- "task": "core.tasks.poll_email_collectors",
128
- },
129
- )
130
- except (OperationalError, ProgrammingError):
131
- pass
132
-
133
- post_migrate.connect(ensure_email_collector_task, sender=self)
134
-
135
- from django.db.backends.signals import connection_created
136
-
137
- def enable_sqlite_wal(**kwargs):
138
- connection = kwargs.get("connection")
139
- if connection.vendor == "sqlite":
140
- cursor = connection.cursor()
141
- cursor.execute("PRAGMA journal_mode=WAL;")
142
- cursor.execute("PRAGMA busy_timeout=60000;")
143
- cursor.close()
144
-
145
- connection_created.connect(enable_sqlite_wal)
146
-
147
- def queue_github_issue(sender, request=None, **kwargs):
148
- if not getattr(settings, "GITHUB_ISSUE_REPORTING_ENABLED", True):
149
- return
150
- if request is None:
151
- return
152
-
153
- exception = kwargs.get("exception")
154
- if exception is None:
155
- return
156
-
157
- try:
158
- tb_exc = traceback.TracebackException.from_exception(exception)
159
- stack = tb_exc.stack
160
- top_frame = stack[-1] if stack else None
161
- fingerprint_parts = [
162
- exception.__class__.__module__,
163
- exception.__class__.__name__,
164
- ]
165
- if top_frame:
166
- fingerprint_parts.extend(
167
- [
168
- top_frame.filename,
169
- str(top_frame.lineno),
170
- top_frame.name,
171
- ]
172
- )
173
- fingerprint = hashlib.sha256(
174
- "|".join(fingerprint_parts).encode("utf-8")
175
- ).hexdigest()
176
-
177
- cooldown = getattr(settings, "GITHUB_ISSUE_REPORTING_COOLDOWN", 3600)
178
- lock_dir = Path(settings.BASE_DIR) / "locks" / "github-issues"
179
- fingerprint_path = None
180
- now = time.time()
181
-
182
- with suppress(OSError):
183
- lock_dir.mkdir(parents=True, exist_ok=True)
184
- fingerprint_path = lock_dir / fingerprint
185
- if fingerprint_path.exists():
186
- age = now - fingerprint_path.stat().st_mtime
187
- if age < cooldown:
188
- return
189
-
190
- if fingerprint_path is not None:
191
- with suppress(OSError):
192
- fingerprint_path.write_text(str(now))
193
-
194
- user_repr = None
195
- user = getattr(request, "user", None)
196
- if user is not None:
197
- try:
198
- if getattr(user, "is_authenticated", False):
199
- user_repr = user.get_username()
200
- else:
201
- user_repr = "anonymous"
202
- except Exception: # pragma: no cover - defensive
203
- user_repr = str(user)
204
-
205
- payload = {
206
- "path": getattr(request, "path", None),
207
- "method": getattr(request, "method", None),
208
- "user": user_repr,
209
- "active_app": getattr(request, "active_app", None),
210
- "fingerprint": fingerprint,
211
- "exception_class": f"{exception.__class__.__module__}.{exception.__class__.__name__}",
212
- "traceback": "".join(tb_exc.format()),
213
- }
214
-
215
- report_exception_to_github.delay(payload)
216
- except Exception: # pragma: no cover - defensive
217
- logger.exception("Failed to queue GitHub issue from request exception")
218
-
219
- got_request_exception.connect(
220
- queue_github_issue,
221
- dispatch_uid="core.github_issue_reporter",
222
- weak=False,
223
- )
1
+ import logging
2
+
3
+ from django.apps import AppConfig
4
+ from django.utils.translation import gettext_lazy as _
5
+
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class CoreConfig(AppConfig):
11
+ default_auto_field = "django.db.models.BigAutoField"
12
+ name = "core"
13
+ verbose_name = _("2. Business")
14
+
15
+ def ready(self): # pragma: no cover - called by Django
16
+ from contextlib import suppress
17
+ from functools import wraps
18
+ import hashlib
19
+ import time
20
+ import traceback
21
+ from pathlib import Path
22
+
23
+ from django.conf import settings
24
+ from django.core.exceptions import ObjectDoesNotExist
25
+ from django.contrib.auth import get_user_model
26
+ from django.db.models.signals import post_migrate
27
+ from django.core.signals import got_request_exception
28
+
29
+ from core.github_helper import report_exception_to_github
30
+ from .entity import Entity
31
+ from .user_data import (
32
+ patch_admin_user_datum,
33
+ patch_admin_user_data_views,
34
+ )
35
+ from .system import patch_admin_system_view
36
+ from .environment import patch_admin_environment_view
37
+ from .sigil_builder import (
38
+ patch_admin_sigil_builder_view,
39
+ generate_model_sigils,
40
+ )
41
+ from .admin_history import patch_admin_history
42
+
43
+ from django_otp.plugins.otp_totp.models import TOTPDevice as OTP_TOTPDevice
44
+
45
+ if not hasattr(
46
+ OTP_TOTPDevice._read_str_from_settings, "_core_totp_issuer_patch"
47
+ ):
48
+ original_read_str = OTP_TOTPDevice._read_str_from_settings
49
+
50
+ def _core_totp_read_str(self, key):
51
+ if key == "OTP_TOTP_ISSUER":
52
+ try:
53
+ settings_obj = self.custom_settings
54
+ except ObjectDoesNotExist:
55
+ settings_obj = None
56
+ if settings_obj and settings_obj.issuer:
57
+ return settings_obj.issuer
58
+ return original_read_str(self, key)
59
+
60
+ _core_totp_read_str._core_totp_issuer_patch = True
61
+ OTP_TOTPDevice._read_str_from_settings = _core_totp_read_str
62
+
63
+ if not getattr(OTP_TOTPDevice, "_core_user_datum_patch", False):
64
+ from .models import TOTPDeviceSettings
65
+
66
+ def _totp_should_persist(settings_obj):
67
+ return bool(
68
+ settings_obj
69
+ and (
70
+ settings_obj.issuer
71
+ or settings_obj.is_seed_data
72
+ or settings_obj.is_user_data
73
+ )
74
+ )
75
+
76
+ def _totp_save_or_delete(settings_obj):
77
+ if settings_obj is None:
78
+ return
79
+ if _totp_should_persist(settings_obj):
80
+ if settings_obj.pk:
81
+ settings_obj.save(
82
+ update_fields=["issuer", "is_seed_data", "is_user_data"]
83
+ )
84
+ else:
85
+ settings_obj.save()
86
+ elif settings_obj.pk:
87
+ settings_obj.delete()
88
+
89
+ def _totp_get_flag(instance, attr):
90
+ cache_key = f"_{attr}"
91
+ if cache_key in instance.__dict__:
92
+ return instance.__dict__[cache_key]
93
+ try:
94
+ settings_obj = instance.custom_settings
95
+ except ObjectDoesNotExist:
96
+ value = False
97
+ else:
98
+ value = bool(getattr(settings_obj, attr, False))
99
+ instance.__dict__[cache_key] = value
100
+ return value
101
+
102
+ def _totp_set_flag(instance, attr, value):
103
+ cache_key = f"_{attr}"
104
+ value = bool(value)
105
+ try:
106
+ settings_obj = instance.custom_settings
107
+ except ObjectDoesNotExist:
108
+ if not value:
109
+ instance.__dict__[cache_key] = False
110
+ return
111
+ settings_obj = TOTPDeviceSettings(device=instance)
112
+ setattr(settings_obj, attr, value)
113
+ _totp_save_or_delete(settings_obj)
114
+ instance.__dict__[cache_key] = value
115
+
116
+ def _totp_get_user_data(instance):
117
+ return _totp_get_flag(instance, "is_user_data")
118
+
119
+ def _totp_set_user_data(instance, value):
120
+ _totp_set_flag(instance, "is_user_data", value)
121
+
122
+ def _totp_get_seed_data(instance):
123
+ return _totp_get_flag(instance, "is_seed_data")
124
+
125
+ def _totp_set_seed_data(instance, value):
126
+ _totp_set_flag(instance, "is_seed_data", value)
127
+
128
+ OTP_TOTPDevice.is_user_data = property(
129
+ _totp_get_user_data, _totp_set_user_data
130
+ )
131
+ OTP_TOTPDevice.is_seed_data = property(
132
+ _totp_get_seed_data, _totp_set_seed_data
133
+ )
134
+ if not hasattr(OTP_TOTPDevice, "all_objects"):
135
+ OTP_TOTPDevice.all_objects = OTP_TOTPDevice._default_manager
136
+ OTP_TOTPDevice.supports_user_datum = True
137
+ OTP_TOTPDevice.supports_seed_datum = True
138
+ OTP_TOTPDevice._core_user_datum_patch = True
139
+
140
+ def create_default_arthexis(**kwargs):
141
+ User = get_user_model()
142
+ if not User.all_objects.exists():
143
+ User.all_objects.create_superuser(
144
+ pk=1,
145
+ username="arthexis",
146
+ email="arthexis@gmail.com",
147
+ password="arthexis",
148
+ )
149
+
150
+ post_migrate.connect(create_default_arthexis, sender=self)
151
+ post_migrate.connect(generate_model_sigils, sender=self)
152
+ patch_admin_user_datum()
153
+ patch_admin_user_data_views()
154
+ patch_admin_system_view()
155
+ patch_admin_environment_view()
156
+ patch_admin_sigil_builder_view()
157
+ patch_admin_history()
158
+
159
+ from django.core.serializers import base as serializer_base
160
+
161
+ if not hasattr(
162
+ serializer_base.DeserializedObject.save, "_entity_fixture_patch"
163
+ ):
164
+ original_save = serializer_base.DeserializedObject.save
165
+
166
+ @wraps(original_save)
167
+ def patched_save(self, save_m2m=True, using=None, **kwargs):
168
+ obj = self.object
169
+ if isinstance(obj, Entity):
170
+ manager = getattr(
171
+ type(obj), "all_objects", type(obj)._default_manager
172
+ )
173
+ if using:
174
+ manager = manager.db_manager(using)
175
+ for fields in obj._unique_field_groups():
176
+ lookup = {}
177
+ for field in fields:
178
+ value = getattr(obj, field.attname)
179
+ if value is None:
180
+ lookup = {}
181
+ break
182
+ lookup[field.attname] = value
183
+ if not lookup:
184
+ continue
185
+ existing = (
186
+ manager.filter(**lookup)
187
+ .only("pk", "is_seed_data", "is_user_data")
188
+ .first()
189
+ )
190
+ if existing is not None:
191
+ obj.pk = existing.pk
192
+ obj.is_seed_data = existing.is_seed_data
193
+ obj.is_user_data = existing.is_user_data
194
+ obj._state.adding = False
195
+ if using:
196
+ obj._state.db = using
197
+ break
198
+ return original_save(self, save_m2m=save_m2m, using=using, **kwargs)
199
+
200
+ patched_save._entity_fixture_patch = True
201
+ serializer_base.DeserializedObject.save = patched_save
202
+
203
+ lock = Path(settings.BASE_DIR) / "locks" / "celery.lck"
204
+
205
+ from django.db.backends.signals import connection_created
206
+
207
+ if lock.exists():
208
+ from .auto_upgrade import ensure_auto_upgrade_periodic_task
209
+ from django.db import DEFAULT_DB_ALIAS, connections
210
+
211
+ def ensure_email_collector_task(**kwargs):
212
+ try: # pragma: no cover - optional dependency
213
+ from django_celery_beat.models import (
214
+ IntervalSchedule,
215
+ PeriodicTask,
216
+ )
217
+ from django.db.utils import OperationalError, ProgrammingError
218
+ except Exception: # pragma: no cover - tables or module not ready
219
+ return
220
+
221
+ try:
222
+ schedule, _ = IntervalSchedule.objects.get_or_create(
223
+ every=1, period=IntervalSchedule.HOURS
224
+ )
225
+ PeriodicTask.objects.get_or_create(
226
+ name="poll_email_collectors",
227
+ defaults={
228
+ "interval": schedule,
229
+ "task": "core.tasks.poll_email_collectors",
230
+ },
231
+ )
232
+ except (OperationalError, ProgrammingError):
233
+ pass
234
+
235
+ post_migrate.connect(ensure_email_collector_task, sender=self)
236
+ post_migrate.connect(ensure_auto_upgrade_periodic_task, sender=self)
237
+
238
+ auto_upgrade_dispatch_uid = "core.apps.ensure_auto_upgrade_periodic_task"
239
+
240
+ def ensure_auto_upgrade_on_connection(**kwargs):
241
+ connection = kwargs.get("connection")
242
+ if connection is not None and connection.alias != "default":
243
+ return
244
+
245
+ try:
246
+ ensure_auto_upgrade_periodic_task()
247
+ finally:
248
+ connection_created.disconnect(
249
+ receiver=ensure_auto_upgrade_on_connection,
250
+ dispatch_uid=auto_upgrade_dispatch_uid,
251
+ )
252
+
253
+ connection_created.connect(
254
+ ensure_auto_upgrade_on_connection,
255
+ dispatch_uid=auto_upgrade_dispatch_uid,
256
+ weak=False,
257
+ )
258
+
259
+ default_connection = connections[DEFAULT_DB_ALIAS]
260
+ if default_connection.connection is not None:
261
+ ensure_auto_upgrade_on_connection(connection=default_connection)
262
+
263
+ def enable_sqlite_wal(**kwargs):
264
+ connection = kwargs.get("connection")
265
+ if connection.vendor == "sqlite":
266
+ cursor = connection.cursor()
267
+ cursor.execute("PRAGMA journal_mode=WAL;")
268
+ cursor.execute("PRAGMA busy_timeout=60000;")
269
+ cursor.close()
270
+
271
+ connection_created.connect(enable_sqlite_wal)
272
+
273
+ def queue_github_issue(sender, request=None, **kwargs):
274
+ if not getattr(settings, "GITHUB_ISSUE_REPORTING_ENABLED", True):
275
+ return
276
+ if request is None:
277
+ return
278
+
279
+ exception = kwargs.get("exception")
280
+ if exception is None:
281
+ return
282
+
283
+ try:
284
+ tb_exc = traceback.TracebackException.from_exception(exception)
285
+ stack = tb_exc.stack
286
+ top_frame = stack[-1] if stack else None
287
+ fingerprint_parts = [
288
+ exception.__class__.__module__,
289
+ exception.__class__.__name__,
290
+ ]
291
+ if top_frame:
292
+ fingerprint_parts.extend(
293
+ [
294
+ top_frame.filename,
295
+ str(top_frame.lineno),
296
+ top_frame.name,
297
+ ]
298
+ )
299
+ fingerprint = hashlib.sha256(
300
+ "|".join(fingerprint_parts).encode("utf-8")
301
+ ).hexdigest()
302
+
303
+ cooldown = getattr(settings, "GITHUB_ISSUE_REPORTING_COOLDOWN", 3600)
304
+ lock_dir = Path(settings.BASE_DIR) / "locks" / "github-issues"
305
+ fingerprint_path = None
306
+ now = time.time()
307
+
308
+ with suppress(OSError):
309
+ lock_dir.mkdir(parents=True, exist_ok=True)
310
+ fingerprint_path = lock_dir / fingerprint
311
+ if fingerprint_path.exists():
312
+ age = now - fingerprint_path.stat().st_mtime
313
+ if age < cooldown:
314
+ return
315
+
316
+ if fingerprint_path is not None:
317
+ with suppress(OSError):
318
+ fingerprint_path.write_text(str(now))
319
+
320
+ user_repr = None
321
+ user = getattr(request, "user", None)
322
+ if user is not None:
323
+ try:
324
+ if getattr(user, "is_authenticated", False):
325
+ user_repr = user.get_username()
326
+ else:
327
+ user_repr = "anonymous"
328
+ except Exception: # pragma: no cover - defensive
329
+ user_repr = str(user)
330
+
331
+ payload = {
332
+ "path": getattr(request, "path", None),
333
+ "method": getattr(request, "method", None),
334
+ "user": user_repr,
335
+ "active_app": getattr(request, "active_app", None),
336
+ "fingerprint": fingerprint,
337
+ "exception_class": f"{exception.__class__.__module__}.{exception.__class__.__name__}",
338
+ "traceback": "".join(tb_exc.format()),
339
+ }
340
+
341
+ report_exception_to_github.delay(payload)
342
+ except Exception: # pragma: no cover - defensive
343
+ logger.exception("Failed to queue GitHub issue from request exception")
344
+
345
+ got_request_exception.connect(
346
+ queue_github_issue,
347
+ dispatch_uid="core.github_issue_reporter",
348
+ weak=False,
349
+ )
350
+
core/auto_upgrade.py ADDED
@@ -0,0 +1,72 @@
1
+ """Helpers for managing the auto-upgrade scheduler."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+
7
+ from django.conf import settings
8
+
9
+
10
+ AUTO_UPGRADE_TASK_NAME = "auto-upgrade-check"
11
+ AUTO_UPGRADE_TASK_PATH = "core.tasks.check_github_updates"
12
+
13
+ DEFAULT_AUTO_UPGRADE_MODE = "version"
14
+ AUTO_UPGRADE_INTERVAL_MINUTES = {
15
+ "latest": 5,
16
+ "stable": 60,
17
+ DEFAULT_AUTO_UPGRADE_MODE: 720,
18
+ }
19
+ AUTO_UPGRADE_FALLBACK_INTERVAL = AUTO_UPGRADE_INTERVAL_MINUTES["stable"]
20
+
21
+
22
+ def ensure_auto_upgrade_periodic_task(
23
+ sender=None, *, base_dir: Path | None = None, **kwargs
24
+ ) -> None:
25
+ """Ensure the auto-upgrade periodic task exists.
26
+
27
+ The function is signal-safe so it can be wired to Django's
28
+ ``post_migrate`` hook. When called directly the ``sender`` and
29
+ ``**kwargs`` parameters are ignored.
30
+ """
31
+
32
+ del sender, kwargs # Unused when invoked as a Django signal handler.
33
+
34
+ if base_dir is None:
35
+ base_dir = Path(settings.BASE_DIR)
36
+ else:
37
+ base_dir = Path(base_dir)
38
+
39
+ lock_dir = base_dir / "locks"
40
+ mode_file = lock_dir / "auto_upgrade.lck"
41
+
42
+ try: # pragma: no cover - optional dependency failures
43
+ from django_celery_beat.models import IntervalSchedule, PeriodicTask
44
+ from django.db.utils import OperationalError, ProgrammingError
45
+ except Exception:
46
+ return
47
+
48
+ if not mode_file.exists():
49
+ try:
50
+ PeriodicTask.objects.filter(name=AUTO_UPGRADE_TASK_NAME).delete()
51
+ except (OperationalError, ProgrammingError): # pragma: no cover - DB not ready
52
+ return
53
+ return
54
+
55
+ _mode = mode_file.read_text().strip().lower() or DEFAULT_AUTO_UPGRADE_MODE
56
+ interval_minutes = AUTO_UPGRADE_INTERVAL_MINUTES.get(
57
+ _mode, AUTO_UPGRADE_FALLBACK_INTERVAL
58
+ )
59
+
60
+ try:
61
+ schedule, _ = IntervalSchedule.objects.get_or_create(
62
+ every=interval_minutes, period=IntervalSchedule.MINUTES
63
+ )
64
+ PeriodicTask.objects.update_or_create(
65
+ name=AUTO_UPGRADE_TASK_NAME,
66
+ defaults={
67
+ "interval": schedule,
68
+ "task": AUTO_UPGRADE_TASK_PATH,
69
+ },
70
+ )
71
+ except (OperationalError, ProgrammingError): # pragma: no cover - DB not ready
72
+ return