arthexis 0.1.13__py3-none-any.whl → 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arthexis might be problematic. Click here for more details.

Files changed (107) hide show
  1. {arthexis-0.1.13.dist-info → arthexis-0.1.14.dist-info}/METADATA +222 -221
  2. arthexis-0.1.14.dist-info/RECORD +109 -0
  3. {arthexis-0.1.13.dist-info → arthexis-0.1.14.dist-info}/licenses/LICENSE +674 -674
  4. config/__init__.py +5 -5
  5. config/active_app.py +15 -15
  6. config/asgi.py +43 -43
  7. config/auth_app.py +7 -7
  8. config/celery.py +32 -32
  9. config/context_processors.py +67 -69
  10. config/horologia_app.py +7 -7
  11. config/loadenv.py +11 -11
  12. config/logging.py +59 -48
  13. config/middleware.py +25 -25
  14. config/offline.py +49 -49
  15. config/settings.py +691 -682
  16. config/settings_helpers.py +109 -109
  17. config/urls.py +171 -166
  18. config/wsgi.py +17 -17
  19. core/admin.py +3771 -2809
  20. core/admin_history.py +50 -50
  21. core/admindocs.py +151 -151
  22. core/apps.py +356 -272
  23. core/auto_upgrade.py +57 -57
  24. core/backends.py +265 -236
  25. core/changelog.py +342 -0
  26. core/entity.py +133 -133
  27. core/environment.py +61 -61
  28. core/fields.py +168 -168
  29. core/form_fields.py +75 -75
  30. core/github_helper.py +188 -25
  31. core/github_issues.py +178 -172
  32. core/github_repos.py +72 -0
  33. core/lcd_screen.py +78 -78
  34. core/liveupdate.py +25 -25
  35. core/log_paths.py +100 -100
  36. core/mailer.py +85 -85
  37. core/middleware.py +91 -91
  38. core/models.py +3609 -2795
  39. core/notifications.py +105 -105
  40. core/public_wifi.py +267 -227
  41. core/reference_utils.py +108 -108
  42. core/release.py +721 -368
  43. core/rfid_import_export.py +113 -0
  44. core/sigil_builder.py +149 -149
  45. core/sigil_context.py +20 -20
  46. core/sigil_resolver.py +315 -315
  47. core/system.py +752 -493
  48. core/tasks.py +408 -394
  49. core/temp_passwords.py +181 -181
  50. core/test_system_info.py +186 -139
  51. core/tests.py +2095 -1521
  52. core/tests_liveupdate.py +17 -17
  53. core/urls.py +11 -11
  54. core/user_data.py +641 -633
  55. core/views.py +2175 -1417
  56. core/widgets.py +213 -94
  57. core/workgroup_urls.py +17 -17
  58. core/workgroup_views.py +94 -94
  59. nodes/admin.py +1720 -1161
  60. nodes/apps.py +87 -85
  61. nodes/backends.py +160 -160
  62. nodes/dns.py +203 -203
  63. nodes/feature_checks.py +133 -133
  64. nodes/lcd.py +165 -165
  65. nodes/models.py +1737 -1597
  66. nodes/reports.py +411 -411
  67. nodes/rfid_sync.py +195 -0
  68. nodes/signals.py +18 -0
  69. nodes/tasks.py +46 -46
  70. nodes/tests.py +3810 -3116
  71. nodes/urls.py +15 -14
  72. nodes/utils.py +121 -105
  73. nodes/views.py +683 -619
  74. ocpp/admin.py +948 -948
  75. ocpp/apps.py +25 -25
  76. ocpp/consumers.py +1565 -1459
  77. ocpp/evcs.py +844 -844
  78. ocpp/evcs_discovery.py +158 -158
  79. ocpp/models.py +917 -917
  80. ocpp/reference_utils.py +42 -42
  81. ocpp/routing.py +11 -11
  82. ocpp/simulator.py +745 -745
  83. ocpp/status_display.py +26 -26
  84. ocpp/store.py +601 -541
  85. ocpp/tasks.py +31 -31
  86. ocpp/test_export_import.py +130 -130
  87. ocpp/test_rfid.py +913 -702
  88. ocpp/tests.py +4445 -4094
  89. ocpp/transactions_io.py +189 -189
  90. ocpp/urls.py +50 -50
  91. ocpp/views.py +1479 -1251
  92. pages/admin.py +708 -539
  93. pages/apps.py +10 -10
  94. pages/checks.py +40 -40
  95. pages/context_processors.py +127 -119
  96. pages/defaults.py +13 -13
  97. pages/forms.py +198 -198
  98. pages/middleware.py +205 -153
  99. pages/models.py +607 -426
  100. pages/tests.py +2612 -2200
  101. pages/urls.py +25 -25
  102. pages/utils.py +12 -12
  103. pages/views.py +1165 -1128
  104. arthexis-0.1.13.dist-info/RECORD +0 -105
  105. nodes/actions.py +0 -70
  106. {arthexis-0.1.13.dist-info → arthexis-0.1.14.dist-info}/WHEEL +0 -0
  107. {arthexis-0.1.13.dist-info → arthexis-0.1.14.dist-info}/top_level.txt +0 -0
core/apps.py CHANGED
@@ -1,272 +1,356 @@
1
- import logging
2
-
3
- from django.apps import AppConfig
4
- from django.utils.translation import gettext_lazy as _
5
-
6
-
7
- logger = logging.getLogger(__name__)
8
-
9
-
10
- class CoreConfig(AppConfig):
11
- default_auto_field = "django.db.models.BigAutoField"
12
- name = "core"
13
- verbose_name = _("2. Business")
14
-
15
- def ready(self): # pragma: no cover - called by Django
16
- from contextlib import suppress
17
- from functools import wraps
18
- import hashlib
19
- import time
20
- import traceback
21
- from pathlib import Path
22
-
23
- from django.conf import settings
24
- from django.core.exceptions import ObjectDoesNotExist
25
- from django.contrib.auth import get_user_model
26
- from django.db.models.signals import post_migrate
27
- from django.core.signals import got_request_exception
28
-
29
- from core.github_helper import report_exception_to_github
30
- from .entity import Entity
31
- from .user_data import (
32
- patch_admin_user_datum,
33
- patch_admin_user_data_views,
34
- )
35
- from .system import patch_admin_system_view
36
- from .environment import patch_admin_environment_view
37
- from .sigil_builder import (
38
- patch_admin_sigil_builder_view,
39
- generate_model_sigils,
40
- )
41
- from .admin_history import patch_admin_history
42
-
43
- from django_otp.plugins.otp_totp.models import TOTPDevice as OTP_TOTPDevice
44
-
45
- if not hasattr(
46
- OTP_TOTPDevice._read_str_from_settings, "_core_totp_issuer_patch"
47
- ):
48
- original_read_str = OTP_TOTPDevice._read_str_from_settings
49
-
50
- def _core_totp_read_str(self, key):
51
- if key == "OTP_TOTP_ISSUER":
52
- try:
53
- settings_obj = self.custom_settings
54
- except ObjectDoesNotExist:
55
- settings_obj = None
56
- if settings_obj and settings_obj.issuer:
57
- return settings_obj.issuer
58
- return original_read_str(self, key)
59
-
60
- _core_totp_read_str._core_totp_issuer_patch = True
61
- OTP_TOTPDevice._read_str_from_settings = _core_totp_read_str
62
-
63
- def create_default_arthexis(**kwargs):
64
- User = get_user_model()
65
- if not User.all_objects.exists():
66
- User.all_objects.create_superuser(
67
- pk=1,
68
- username="arthexis",
69
- email="arthexis@gmail.com",
70
- password="arthexis",
71
- )
72
-
73
- post_migrate.connect(create_default_arthexis, sender=self)
74
- post_migrate.connect(generate_model_sigils, sender=self)
75
- patch_admin_user_datum()
76
- patch_admin_user_data_views()
77
- patch_admin_system_view()
78
- patch_admin_environment_view()
79
- patch_admin_sigil_builder_view()
80
- patch_admin_history()
81
-
82
- from django.core.serializers import base as serializer_base
83
-
84
- if not hasattr(
85
- serializer_base.DeserializedObject.save, "_entity_fixture_patch"
86
- ):
87
- original_save = serializer_base.DeserializedObject.save
88
-
89
- @wraps(original_save)
90
- def patched_save(self, save_m2m=True, using=None, **kwargs):
91
- obj = self.object
92
- if isinstance(obj, Entity):
93
- manager = getattr(
94
- type(obj), "all_objects", type(obj)._default_manager
95
- )
96
- if using:
97
- manager = manager.db_manager(using)
98
- for fields in obj._unique_field_groups():
99
- lookup = {}
100
- for field in fields:
101
- value = getattr(obj, field.attname)
102
- if value is None:
103
- lookup = {}
104
- break
105
- lookup[field.attname] = value
106
- if not lookup:
107
- continue
108
- existing = (
109
- manager.filter(**lookup)
110
- .only("pk", "is_seed_data", "is_user_data")
111
- .first()
112
- )
113
- if existing is not None:
114
- obj.pk = existing.pk
115
- obj.is_seed_data = existing.is_seed_data
116
- obj.is_user_data = existing.is_user_data
117
- obj._state.adding = False
118
- if using:
119
- obj._state.db = using
120
- break
121
- return original_save(self, save_m2m=save_m2m, using=using, **kwargs)
122
-
123
- patched_save._entity_fixture_patch = True
124
- serializer_base.DeserializedObject.save = patched_save
125
-
126
- lock = Path(settings.BASE_DIR) / "locks" / "celery.lck"
127
-
128
- from django.db.backends.signals import connection_created
129
-
130
- if lock.exists():
131
- from .auto_upgrade import ensure_auto_upgrade_periodic_task
132
- from django.db import DEFAULT_DB_ALIAS, connections
133
-
134
- def ensure_email_collector_task(**kwargs):
135
- try: # pragma: no cover - optional dependency
136
- from django_celery_beat.models import (
137
- IntervalSchedule,
138
- PeriodicTask,
139
- )
140
- from django.db.utils import OperationalError, ProgrammingError
141
- except Exception: # pragma: no cover - tables or module not ready
142
- return
143
-
144
- try:
145
- schedule, _ = IntervalSchedule.objects.get_or_create(
146
- every=1, period=IntervalSchedule.HOURS
147
- )
148
- PeriodicTask.objects.get_or_create(
149
- name="poll_email_collectors",
150
- defaults={
151
- "interval": schedule,
152
- "task": "core.tasks.poll_email_collectors",
153
- },
154
- )
155
- except (OperationalError, ProgrammingError):
156
- pass
157
-
158
- post_migrate.connect(ensure_email_collector_task, sender=self)
159
- post_migrate.connect(ensure_auto_upgrade_periodic_task, sender=self)
160
-
161
- auto_upgrade_dispatch_uid = "core.apps.ensure_auto_upgrade_periodic_task"
162
-
163
- def ensure_auto_upgrade_on_connection(**kwargs):
164
- connection = kwargs.get("connection")
165
- if connection is not None and connection.alias != "default":
166
- return
167
-
168
- try:
169
- ensure_auto_upgrade_periodic_task()
170
- finally:
171
- connection_created.disconnect(
172
- receiver=ensure_auto_upgrade_on_connection,
173
- dispatch_uid=auto_upgrade_dispatch_uid,
174
- )
175
-
176
- connection_created.connect(
177
- ensure_auto_upgrade_on_connection,
178
- dispatch_uid=auto_upgrade_dispatch_uid,
179
- weak=False,
180
- )
181
-
182
- default_connection = connections[DEFAULT_DB_ALIAS]
183
- if default_connection.connection is not None:
184
- ensure_auto_upgrade_on_connection(connection=default_connection)
185
-
186
- def enable_sqlite_wal(**kwargs):
187
- connection = kwargs.get("connection")
188
- if connection.vendor == "sqlite":
189
- cursor = connection.cursor()
190
- cursor.execute("PRAGMA journal_mode=WAL;")
191
- cursor.execute("PRAGMA busy_timeout=60000;")
192
- cursor.close()
193
-
194
- connection_created.connect(enable_sqlite_wal)
195
-
196
- def queue_github_issue(sender, request=None, **kwargs):
197
- if not getattr(settings, "GITHUB_ISSUE_REPORTING_ENABLED", True):
198
- return
199
- if request is None:
200
- return
201
-
202
- exception = kwargs.get("exception")
203
- if exception is None:
204
- return
205
-
206
- try:
207
- tb_exc = traceback.TracebackException.from_exception(exception)
208
- stack = tb_exc.stack
209
- top_frame = stack[-1] if stack else None
210
- fingerprint_parts = [
211
- exception.__class__.__module__,
212
- exception.__class__.__name__,
213
- ]
214
- if top_frame:
215
- fingerprint_parts.extend(
216
- [
217
- top_frame.filename,
218
- str(top_frame.lineno),
219
- top_frame.name,
220
- ]
221
- )
222
- fingerprint = hashlib.sha256(
223
- "|".join(fingerprint_parts).encode("utf-8")
224
- ).hexdigest()
225
-
226
- cooldown = getattr(settings, "GITHUB_ISSUE_REPORTING_COOLDOWN", 3600)
227
- lock_dir = Path(settings.BASE_DIR) / "locks" / "github-issues"
228
- fingerprint_path = None
229
- now = time.time()
230
-
231
- with suppress(OSError):
232
- lock_dir.mkdir(parents=True, exist_ok=True)
233
- fingerprint_path = lock_dir / fingerprint
234
- if fingerprint_path.exists():
235
- age = now - fingerprint_path.stat().st_mtime
236
- if age < cooldown:
237
- return
238
-
239
- if fingerprint_path is not None:
240
- with suppress(OSError):
241
- fingerprint_path.write_text(str(now))
242
-
243
- user_repr = None
244
- user = getattr(request, "user", None)
245
- if user is not None:
246
- try:
247
- if getattr(user, "is_authenticated", False):
248
- user_repr = user.get_username()
249
- else:
250
- user_repr = "anonymous"
251
- except Exception: # pragma: no cover - defensive
252
- user_repr = str(user)
253
-
254
- payload = {
255
- "path": getattr(request, "path", None),
256
- "method": getattr(request, "method", None),
257
- "user": user_repr,
258
- "active_app": getattr(request, "active_app", None),
259
- "fingerprint": fingerprint,
260
- "exception_class": f"{exception.__class__.__module__}.{exception.__class__.__name__}",
261
- "traceback": "".join(tb_exc.format()),
262
- }
263
-
264
- report_exception_to_github.delay(payload)
265
- except Exception: # pragma: no cover - defensive
266
- logger.exception("Failed to queue GitHub issue from request exception")
267
-
268
- got_request_exception.connect(
269
- queue_github_issue,
270
- dispatch_uid="core.github_issue_reporter",
271
- weak=False,
272
- )
1
+ import logging
2
+
3
+ from django.apps import AppConfig
4
+ from django.utils.translation import gettext_lazy as _
5
+
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class CoreConfig(AppConfig):
11
+ default_auto_field = "django.db.models.BigAutoField"
12
+ name = "core"
13
+ verbose_name = _("2. Business")
14
+
15
+ def ready(self): # pragma: no cover - called by Django
16
+ from contextlib import suppress
17
+ from functools import wraps
18
+ import hashlib
19
+ import time
20
+ import traceback
21
+ from pathlib import Path
22
+
23
+ from django.conf import settings
24
+ from django.core.exceptions import ObjectDoesNotExist
25
+ from django.contrib.auth import get_user_model
26
+ from django.db.models.signals import post_migrate
27
+ from django.core.signals import got_request_exception
28
+
29
+ from core.github_helper import report_exception_to_github
30
+ from .entity import Entity
31
+ from .user_data import (
32
+ patch_admin_user_datum,
33
+ patch_admin_user_data_views,
34
+ )
35
+ from .system import patch_admin_system_view
36
+ from .environment import patch_admin_environment_view
37
+ from .sigil_builder import (
38
+ patch_admin_sigil_builder_view,
39
+ generate_model_sigils,
40
+ )
41
+ from .admin_history import patch_admin_history
42
+
43
+ from django_otp.plugins.otp_totp.models import TOTPDevice as OTP_TOTPDevice
44
+
45
+ if not hasattr(
46
+ OTP_TOTPDevice._read_str_from_settings, "_core_totp_issuer_patch"
47
+ ):
48
+ original_read_str = OTP_TOTPDevice._read_str_from_settings
49
+
50
+ def _core_totp_read_str(self, key):
51
+ if key == "OTP_TOTP_ISSUER":
52
+ try:
53
+ settings_obj = self.custom_settings
54
+ except ObjectDoesNotExist:
55
+ settings_obj = None
56
+ if settings_obj and settings_obj.issuer:
57
+ return settings_obj.issuer
58
+ return original_read_str(self, key)
59
+
60
+ _core_totp_read_str._core_totp_issuer_patch = True
61
+ OTP_TOTPDevice._read_str_from_settings = _core_totp_read_str
62
+
63
+ if not getattr(OTP_TOTPDevice, "_core_user_datum_patch", False):
64
+ from .models import TOTPDeviceSettings
65
+
66
+ def _totp_should_persist(settings_obj):
67
+ return bool(
68
+ settings_obj
69
+ and (
70
+ settings_obj.issuer
71
+ or settings_obj.is_seed_data
72
+ or settings_obj.is_user_data
73
+ )
74
+ )
75
+
76
+ def _totp_save_or_delete(settings_obj):
77
+ if settings_obj is None:
78
+ return
79
+ if _totp_should_persist(settings_obj):
80
+ if settings_obj.pk:
81
+ settings_obj.save(
82
+ update_fields=["issuer", "is_seed_data", "is_user_data"]
83
+ )
84
+ else:
85
+ settings_obj.save()
86
+ elif settings_obj.pk:
87
+ settings_obj.delete()
88
+
89
+ def _totp_get_flag(instance, attr):
90
+ cache_key = f"_{attr}"
91
+ if cache_key in instance.__dict__:
92
+ return instance.__dict__[cache_key]
93
+ try:
94
+ settings_obj = instance.custom_settings
95
+ except ObjectDoesNotExist:
96
+ value = False
97
+ else:
98
+ value = bool(getattr(settings_obj, attr, False))
99
+ instance.__dict__[cache_key] = value
100
+ return value
101
+
102
+ def _totp_set_flag(instance, attr, value):
103
+ cache_key = f"_{attr}"
104
+ value = bool(value)
105
+ try:
106
+ settings_obj = instance.custom_settings
107
+ except ObjectDoesNotExist:
108
+ if not value:
109
+ instance.__dict__[cache_key] = False
110
+ return
111
+ settings_obj = TOTPDeviceSettings(device=instance)
112
+ setattr(settings_obj, attr, value)
113
+ _totp_save_or_delete(settings_obj)
114
+ instance.__dict__[cache_key] = value
115
+
116
+ def _totp_get_user_data(instance):
117
+ return _totp_get_flag(instance, "is_user_data")
118
+
119
+ def _totp_set_user_data(instance, value):
120
+ _totp_set_flag(instance, "is_user_data", value)
121
+
122
+ def _totp_get_seed_data(instance):
123
+ return _totp_get_flag(instance, "is_seed_data")
124
+
125
+ def _totp_set_seed_data(instance, value):
126
+ _totp_set_flag(instance, "is_seed_data", value)
127
+
128
+ OTP_TOTPDevice.is_user_data = property(
129
+ _totp_get_user_data, _totp_set_user_data
130
+ )
131
+ OTP_TOTPDevice.is_seed_data = property(
132
+ _totp_get_seed_data, _totp_set_seed_data
133
+ )
134
+ if not hasattr(OTP_TOTPDevice, "all_objects"):
135
+ OTP_TOTPDevice.all_objects = OTP_TOTPDevice._default_manager
136
+ OTP_TOTPDevice.supports_user_datum = True
137
+ OTP_TOTPDevice.supports_seed_datum = True
138
+ OTP_TOTPDevice._core_user_datum_patch = True
139
+
140
+ def create_default_arthexis(**kwargs):
141
+ User = get_user_model()
142
+ if not User.all_objects.exists():
143
+ User.all_objects.create_superuser(
144
+ pk=1,
145
+ username="arthexis",
146
+ email="arthexis@gmail.com",
147
+ password="arthexis",
148
+ )
149
+
150
+ post_migrate.connect(create_default_arthexis, sender=self)
151
+ post_migrate.connect(generate_model_sigils, sender=self)
152
+ patch_admin_user_datum()
153
+ patch_admin_user_data_views()
154
+ patch_admin_system_view()
155
+ patch_admin_environment_view()
156
+ patch_admin_sigil_builder_view()
157
+ patch_admin_history()
158
+
159
+ from django.core.serializers import base as serializer_base
160
+
161
+ if not hasattr(
162
+ serializer_base.DeserializedObject.save, "_entity_fixture_patch"
163
+ ):
164
+ original_save = serializer_base.DeserializedObject.save
165
+
166
+ @wraps(original_save)
167
+ def patched_save(self, save_m2m=True, using=None, **kwargs):
168
+ obj = self.object
169
+ if isinstance(obj, Entity):
170
+ manager = getattr(
171
+ type(obj), "all_objects", type(obj)._default_manager
172
+ )
173
+ if using:
174
+ manager = manager.db_manager(using)
175
+ for fields in obj._unique_field_groups():
176
+ lookup = {}
177
+ for field in fields:
178
+ value = getattr(obj, field.attname)
179
+ if value is None:
180
+ lookup = {}
181
+ break
182
+ lookup[field.attname] = value
183
+ if not lookup:
184
+ continue
185
+ existing = (
186
+ manager.filter(**lookup)
187
+ .only("pk", "is_seed_data", "is_user_data")
188
+ .first()
189
+ )
190
+ if existing is not None:
191
+ obj.pk = existing.pk
192
+ obj.is_seed_data = existing.is_seed_data
193
+ obj.is_user_data = existing.is_user_data
194
+ obj._state.adding = False
195
+ if using:
196
+ obj._state.db = using
197
+ break
198
+ return original_save(self, save_m2m=save_m2m, using=using, **kwargs)
199
+
200
+ patched_save._entity_fixture_patch = True
201
+ serializer_base.DeserializedObject.save = patched_save
202
+
203
+ lock = Path(settings.BASE_DIR) / "locks" / "celery.lck"
204
+
205
+ from django.db.backends.signals import connection_created
206
+
207
+ if lock.exists():
208
+ from .auto_upgrade import ensure_auto_upgrade_periodic_task
209
+ from django.db import DEFAULT_DB_ALIAS, connections
210
+
211
+ def ensure_email_collector_task(**kwargs):
212
+ try: # pragma: no cover - optional dependency
213
+ from django_celery_beat.models import (
214
+ IntervalSchedule,
215
+ PeriodicTask,
216
+ )
217
+ from django.db.utils import OperationalError, ProgrammingError
218
+ except Exception: # pragma: no cover - tables or module not ready
219
+ return
220
+
221
+ try:
222
+ schedule, _ = IntervalSchedule.objects.get_or_create(
223
+ every=1, period=IntervalSchedule.HOURS
224
+ )
225
+ PeriodicTask.objects.get_or_create(
226
+ name="poll_email_collectors",
227
+ defaults={
228
+ "interval": schedule,
229
+ "task": "core.tasks.poll_email_collectors",
230
+ },
231
+ )
232
+ except (OperationalError, ProgrammingError):
233
+ pass
234
+
235
+ post_migrate.connect(ensure_email_collector_task, sender=self)
236
+ post_migrate.connect(ensure_auto_upgrade_periodic_task, sender=self)
237
+
238
+ auto_upgrade_dispatch_uid = "core.apps.ensure_auto_upgrade_periodic_task"
239
+
240
+ def ensure_auto_upgrade_on_connection(**kwargs):
241
+ connection = kwargs.get("connection")
242
+ if connection is not None and connection.alias != "default":
243
+ return
244
+
245
+ try:
246
+ ensure_auto_upgrade_periodic_task()
247
+ finally:
248
+ connection_created.disconnect(
249
+ receiver=ensure_auto_upgrade_on_connection,
250
+ dispatch_uid=auto_upgrade_dispatch_uid,
251
+ )
252
+
253
+ connection_created.connect(
254
+ ensure_auto_upgrade_on_connection,
255
+ dispatch_uid=auto_upgrade_dispatch_uid,
256
+ weak=False,
257
+ )
258
+
259
+ default_connection = connections[DEFAULT_DB_ALIAS]
260
+ if default_connection.connection is not None:
261
+ ensure_auto_upgrade_on_connection(connection=default_connection)
262
+
263
+ def enable_sqlite_wal(**kwargs):
264
+ connection = kwargs.get("connection")
265
+ if connection.vendor == "sqlite":
266
+ cursor = connection.cursor()
267
+ cursor.execute("PRAGMA journal_mode=WAL;")
268
+ cursor.execute("PRAGMA busy_timeout=60000;")
269
+ cursor.close()
270
+
271
+ connection_created.connect(enable_sqlite_wal)
272
+
273
+ def queue_github_issue(sender, request=None, **kwargs):
274
+ if not getattr(settings, "GITHUB_ISSUE_REPORTING_ENABLED", True):
275
+ return
276
+ if request is None:
277
+ return
278
+
279
+ exception = kwargs.get("exception")
280
+ if exception is None:
281
+ return
282
+
283
+ try:
284
+ tb_exc = traceback.TracebackException.from_exception(exception)
285
+ stack = tb_exc.stack
286
+ top_frame = stack[-1] if stack else None
287
+ fingerprint_parts = [
288
+ exception.__class__.__module__,
289
+ exception.__class__.__name__,
290
+ ]
291
+ if top_frame:
292
+ fingerprint_parts.extend(
293
+ [
294
+ top_frame.filename,
295
+ str(top_frame.lineno),
296
+ top_frame.name,
297
+ ]
298
+ )
299
+ fingerprint = hashlib.sha256(
300
+ "|".join(fingerprint_parts).encode("utf-8")
301
+ ).hexdigest()
302
+
303
+ cooldown = getattr(settings, "GITHUB_ISSUE_REPORTING_COOLDOWN", 3600)
304
+ lock_dir = Path(settings.BASE_DIR) / "locks" / "github-issues"
305
+ fingerprint_path = None
306
+ now = time.time()
307
+
308
+ with suppress(OSError):
309
+ lock_dir.mkdir(parents=True, exist_ok=True)
310
+ fingerprint_path = lock_dir / fingerprint
311
+ if fingerprint_path.exists():
312
+ age = now - fingerprint_path.stat().st_mtime
313
+ if age < cooldown:
314
+ return
315
+
316
+ if fingerprint_path is not None:
317
+ with suppress(OSError):
318
+ fingerprint_path.write_text(str(now))
319
+
320
+ user_repr = None
321
+ user = getattr(request, "user", None)
322
+ if user is not None:
323
+ try:
324
+ if getattr(user, "is_authenticated", False):
325
+ user_repr = user.get_username()
326
+ else:
327
+ user_repr = "anonymous"
328
+ except Exception: # pragma: no cover - defensive
329
+ user_repr = str(user)
330
+
331
+ payload = {
332
+ "path": getattr(request, "path", None),
333
+ "method": getattr(request, "method", None),
334
+ "user": user_repr,
335
+ "active_app": getattr(request, "active_app", None),
336
+ "fingerprint": fingerprint,
337
+ "exception_class": f"{exception.__class__.__module__}.{exception.__class__.__name__}",
338
+ "traceback": "".join(tb_exc.format()),
339
+ }
340
+
341
+ report_exception_to_github.delay(payload)
342
+ except Exception: # pragma: no cover - defensive
343
+ logger.exception("Failed to queue GitHub issue from request exception")
344
+
345
+ got_request_exception.connect(
346
+ queue_github_issue,
347
+ dispatch_uid="core.github_issue_reporter",
348
+ weak=False,
349
+ )
350
+
351
+ try:
352
+ from .mcp.auto_start import schedule_auto_start
353
+
354
+ schedule_auto_start()
355
+ except Exception: # pragma: no cover - defensive
356
+ logger.exception("Failed to schedule MCP auto-start")