arthexis 0.1.9__py3-none-any.whl → 0.1.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arthexis might be problematic. Click here for more details.
- arthexis-0.1.26.dist-info/METADATA +272 -0
- arthexis-0.1.26.dist-info/RECORD +111 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/licenses/LICENSE +674 -674
- config/__init__.py +5 -5
- config/active_app.py +15 -15
- config/asgi.py +29 -29
- config/auth_app.py +7 -7
- config/celery.py +32 -25
- config/context_processors.py +67 -68
- config/horologia_app.py +7 -7
- config/loadenv.py +11 -11
- config/logging.py +59 -48
- config/middleware.py +71 -25
- config/offline.py +49 -49
- config/settings.py +676 -492
- config/settings_helpers.py +109 -0
- config/urls.py +228 -159
- config/wsgi.py +17 -17
- core/admin.py +4052 -2066
- core/admin_history.py +50 -50
- core/admindocs.py +192 -151
- core/apps.py +350 -223
- core/auto_upgrade.py +72 -0
- core/backends.py +311 -124
- core/changelog.py +403 -0
- core/entity.py +149 -133
- core/environment.py +60 -43
- core/fields.py +168 -75
- core/form_fields.py +75 -0
- core/github_helper.py +188 -25
- core/github_issues.py +183 -172
- core/github_repos.py +72 -0
- core/lcd_screen.py +78 -78
- core/liveupdate.py +25 -25
- core/log_paths.py +114 -100
- core/mailer.py +89 -83
- core/middleware.py +91 -91
- core/models.py +5041 -2195
- core/notifications.py +105 -105
- core/public_wifi.py +267 -227
- core/reference_utils.py +107 -0
- core/release.py +940 -346
- core/rfid_import_export.py +113 -0
- core/sigil_builder.py +149 -131
- core/sigil_context.py +20 -20
- core/sigil_resolver.py +250 -284
- core/system.py +1425 -230
- core/tasks.py +538 -199
- core/temp_passwords.py +181 -0
- core/test_system_info.py +202 -43
- core/tests.py +2673 -1069
- core/tests_liveupdate.py +17 -17
- core/urls.py +11 -11
- core/user_data.py +681 -495
- core/views.py +2484 -789
- core/widgets.py +213 -51
- nodes/admin.py +2236 -445
- nodes/apps.py +98 -70
- nodes/backends.py +160 -53
- nodes/dns.py +203 -0
- nodes/feature_checks.py +133 -0
- nodes/lcd.py +165 -165
- nodes/models.py +2375 -870
- nodes/reports.py +411 -0
- nodes/rfid_sync.py +210 -0
- nodes/signals.py +18 -0
- nodes/tasks.py +141 -46
- nodes/tests.py +5045 -1489
- nodes/urls.py +29 -13
- nodes/utils.py +172 -73
- nodes/views.py +1768 -304
- ocpp/admin.py +1775 -481
- ocpp/apps.py +25 -25
- ocpp/consumers.py +1843 -630
- ocpp/evcs.py +844 -928
- ocpp/evcs_discovery.py +158 -0
- ocpp/models.py +1417 -640
- ocpp/network.py +398 -0
- ocpp/reference_utils.py +42 -0
- ocpp/routing.py +11 -9
- ocpp/simulator.py +745 -368
- ocpp/status_display.py +26 -0
- ocpp/store.py +603 -403
- ocpp/tasks.py +479 -31
- ocpp/test_export_import.py +131 -130
- ocpp/test_rfid.py +1072 -540
- ocpp/tests.py +5494 -2296
- ocpp/transactions_io.py +197 -165
- ocpp/urls.py +50 -50
- ocpp/views.py +2024 -912
- pages/admin.py +1123 -396
- pages/apps.py +45 -10
- pages/checks.py +40 -40
- pages/context_processors.py +151 -85
- pages/defaults.py +13 -0
- pages/forms.py +221 -0
- pages/middleware.py +213 -153
- pages/models.py +720 -252
- pages/module_defaults.py +156 -0
- pages/site_config.py +137 -0
- pages/tasks.py +74 -0
- pages/tests.py +4009 -1389
- pages/urls.py +38 -20
- pages/utils.py +93 -12
- pages/views.py +1736 -762
- arthexis-0.1.9.dist-info/METADATA +0 -168
- arthexis-0.1.9.dist-info/RECORD +0 -92
- core/workgroup_urls.py +0 -17
- core/workgroup_views.py +0 -94
- nodes/actions.py +0 -70
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/WHEEL +0 -0
- {arthexis-0.1.9.dist-info → arthexis-0.1.26.dist-info}/top_level.txt +0 -0
core/views.py
CHANGED
|
@@ -1,789 +1,2484 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
import
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
from
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
from django.
|
|
12
|
-
from django.
|
|
13
|
-
from django.
|
|
14
|
-
from django.
|
|
15
|
-
from
|
|
16
|
-
import
|
|
17
|
-
import
|
|
18
|
-
|
|
19
|
-
from
|
|
20
|
-
|
|
21
|
-
from .
|
|
22
|
-
from .
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
from . import
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
def
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
]
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
)
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
}
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
)
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
1
|
+
import base64
|
|
2
|
+
import binascii
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
import shutil
|
|
7
|
+
import uuid
|
|
8
|
+
from datetime import datetime, timedelta, timezone as datetime_timezone
|
|
9
|
+
|
|
10
|
+
import requests
|
|
11
|
+
from django.conf import settings
|
|
12
|
+
from django.contrib.admin.sites import site as admin_site
|
|
13
|
+
from django.contrib.admin.views.decorators import staff_member_required
|
|
14
|
+
from django.contrib.auth import authenticate, login
|
|
15
|
+
from django.contrib import messages
|
|
16
|
+
from django.contrib.sites.models import Site
|
|
17
|
+
from django.http import Http404, JsonResponse, HttpResponse
|
|
18
|
+
from django.shortcuts import get_object_or_404, redirect, render, resolve_url
|
|
19
|
+
from django.template.response import TemplateResponse
|
|
20
|
+
from django.utils import timezone
|
|
21
|
+
from django.utils.html import strip_tags
|
|
22
|
+
from django.utils.translation import gettext as _
|
|
23
|
+
from django.urls import NoReverseMatch, reverse
|
|
24
|
+
from django.views.decorators.csrf import csrf_exempt
|
|
25
|
+
from django.views.decorators.http import require_GET, require_POST
|
|
26
|
+
from django.utils.http import url_has_allowed_host_and_scheme
|
|
27
|
+
from pathlib import Path
|
|
28
|
+
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
|
|
29
|
+
import errno
|
|
30
|
+
import subprocess
|
|
31
|
+
from typing import Optional, Sequence
|
|
32
|
+
|
|
33
|
+
from django.template.loader import get_template
|
|
34
|
+
from django.test import signals
|
|
35
|
+
|
|
36
|
+
from utils import revision
|
|
37
|
+
from nodes.utils import save_screenshot
|
|
38
|
+
from utils.api import api_login_required
|
|
39
|
+
|
|
40
|
+
logger = logging.getLogger(__name__)
|
|
41
|
+
|
|
42
|
+
PYPI_REQUEST_TIMEOUT = 10
|
|
43
|
+
|
|
44
|
+
from . import changelog as changelog_utils
|
|
45
|
+
from . import temp_passwords
|
|
46
|
+
from .models import OdooProfile, Product, EnergyAccount, PackageRelease, Todo
|
|
47
|
+
from .models import RFID
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@staff_member_required
|
|
51
|
+
def odoo_products(request):
|
|
52
|
+
"""Return available products from the user's Odoo instance."""
|
|
53
|
+
|
|
54
|
+
profile = getattr(request.user, "odoo_profile", None)
|
|
55
|
+
if not profile or not profile.is_verified:
|
|
56
|
+
raise Http404
|
|
57
|
+
try:
|
|
58
|
+
products = profile.execute(
|
|
59
|
+
"product.product",
|
|
60
|
+
"search_read",
|
|
61
|
+
fields=["name"],
|
|
62
|
+
limit=50,
|
|
63
|
+
)
|
|
64
|
+
except Exception:
|
|
65
|
+
logger.exception(
|
|
66
|
+
"Failed to fetch Odoo products via API for user %s (profile_id=%s, host=%s, database=%s)",
|
|
67
|
+
getattr(request.user, "pk", None),
|
|
68
|
+
getattr(profile, "pk", None),
|
|
69
|
+
getattr(profile, "host", None),
|
|
70
|
+
getattr(profile, "database", None),
|
|
71
|
+
)
|
|
72
|
+
return JsonResponse({"detail": "Unable to fetch products"}, status=502)
|
|
73
|
+
items = [{"id": p.get("id"), "name": p.get("name", "")} for p in products]
|
|
74
|
+
return JsonResponse(items, safe=False)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@staff_member_required
|
|
78
|
+
def odoo_quote_report(request):
|
|
79
|
+
"""Display a consolidated quote report from the user's Odoo instance."""
|
|
80
|
+
|
|
81
|
+
profile = getattr(request.user, "odoo_profile", None)
|
|
82
|
+
context = {
|
|
83
|
+
"title": _("Quote Report"),
|
|
84
|
+
"profile": profile,
|
|
85
|
+
"error": None,
|
|
86
|
+
"template_stats": [],
|
|
87
|
+
"quotes": [],
|
|
88
|
+
"recent_products": [],
|
|
89
|
+
"installed_modules": [],
|
|
90
|
+
"profile_url": "",
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
profile_admin = admin_site._registry.get(OdooProfile)
|
|
94
|
+
if profile_admin is not None:
|
|
95
|
+
try:
|
|
96
|
+
context["profile_url"] = profile_admin.get_my_profile_url(request)
|
|
97
|
+
except Exception: # pragma: no cover - defensive fallback
|
|
98
|
+
context["profile_url"] = ""
|
|
99
|
+
|
|
100
|
+
if not profile or not profile.is_verified:
|
|
101
|
+
context["error"] = _(
|
|
102
|
+
"Configure and verify your Odoo employee credentials before generating the report."
|
|
103
|
+
)
|
|
104
|
+
return TemplateResponse(
|
|
105
|
+
request, "admin/core/odoo_quote_report.html", context
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
def _parse_datetime(value):
|
|
109
|
+
if not value:
|
|
110
|
+
return None
|
|
111
|
+
if isinstance(value, datetime):
|
|
112
|
+
dt = value
|
|
113
|
+
else:
|
|
114
|
+
text = str(value)
|
|
115
|
+
try:
|
|
116
|
+
dt = datetime.fromisoformat(text)
|
|
117
|
+
except ValueError:
|
|
118
|
+
text_iso = text.replace(" ", "T")
|
|
119
|
+
try:
|
|
120
|
+
dt = datetime.fromisoformat(text_iso)
|
|
121
|
+
except ValueError:
|
|
122
|
+
for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"):
|
|
123
|
+
try:
|
|
124
|
+
dt = datetime.strptime(text, fmt)
|
|
125
|
+
break
|
|
126
|
+
except ValueError:
|
|
127
|
+
continue
|
|
128
|
+
else:
|
|
129
|
+
return None
|
|
130
|
+
if timezone.is_naive(dt):
|
|
131
|
+
tzinfo = getattr(timezone, "utc", datetime_timezone.utc)
|
|
132
|
+
dt = timezone.make_aware(dt, tzinfo)
|
|
133
|
+
return dt
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
templates = profile.execute(
|
|
137
|
+
"sale.order.template",
|
|
138
|
+
"search_read",
|
|
139
|
+
fields=["name"],
|
|
140
|
+
order="name asc",
|
|
141
|
+
)
|
|
142
|
+
template_usage = profile.execute(
|
|
143
|
+
"sale.order",
|
|
144
|
+
"read_group",
|
|
145
|
+
[[("sale_order_template_id", "!=", False)]],
|
|
146
|
+
["sale_order_template_id"],
|
|
147
|
+
lazy=False,
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
usage_map = {}
|
|
151
|
+
for entry in template_usage:
|
|
152
|
+
template_info = entry.get("sale_order_template_id")
|
|
153
|
+
if not template_info:
|
|
154
|
+
continue
|
|
155
|
+
template_id = template_info[0]
|
|
156
|
+
usage_map[template_id] = entry.get(
|
|
157
|
+
"sale_order_template_id_count", 0
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
context["template_stats"] = [
|
|
161
|
+
{
|
|
162
|
+
"id": template.get("id"),
|
|
163
|
+
"name": template.get("name", ""),
|
|
164
|
+
"quote_count": usage_map.get(template.get("id"), 0),
|
|
165
|
+
}
|
|
166
|
+
for template in templates
|
|
167
|
+
]
|
|
168
|
+
|
|
169
|
+
ninety_days_ago = timezone.now() - timedelta(days=90)
|
|
170
|
+
quotes = profile.execute(
|
|
171
|
+
"sale.order",
|
|
172
|
+
"search_read",
|
|
173
|
+
[
|
|
174
|
+
[
|
|
175
|
+
("create_date", ">=", ninety_days_ago.strftime("%Y-%m-%d %H:%M:%S")),
|
|
176
|
+
("state", "!=", "cancel"),
|
|
177
|
+
("quote_sent", "=", False),
|
|
178
|
+
]
|
|
179
|
+
],
|
|
180
|
+
fields=[
|
|
181
|
+
"name",
|
|
182
|
+
"amount_total",
|
|
183
|
+
"partner_id",
|
|
184
|
+
"activity_type_id",
|
|
185
|
+
"activity_summary",
|
|
186
|
+
"tag_ids",
|
|
187
|
+
"create_date",
|
|
188
|
+
"currency_id",
|
|
189
|
+
],
|
|
190
|
+
order="create_date desc",
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
tag_ids = set()
|
|
194
|
+
currency_ids = set()
|
|
195
|
+
for quote in quotes:
|
|
196
|
+
tag_ids.update(quote.get("tag_ids") or [])
|
|
197
|
+
currency_info = quote.get("currency_id")
|
|
198
|
+
if (
|
|
199
|
+
isinstance(currency_info, (list, tuple))
|
|
200
|
+
and len(currency_info) >= 1
|
|
201
|
+
and currency_info[0]
|
|
202
|
+
):
|
|
203
|
+
currency_ids.add(currency_info[0])
|
|
204
|
+
|
|
205
|
+
tag_map: dict[int, str] = {}
|
|
206
|
+
if tag_ids:
|
|
207
|
+
tag_records = profile.execute(
|
|
208
|
+
"sale.order.tag",
|
|
209
|
+
"read",
|
|
210
|
+
list(tag_ids),
|
|
211
|
+
fields=["name"],
|
|
212
|
+
)
|
|
213
|
+
for tag in tag_records:
|
|
214
|
+
tag_id = tag.get("id")
|
|
215
|
+
if tag_id is not None:
|
|
216
|
+
tag_map[tag_id] = tag.get("name", "")
|
|
217
|
+
|
|
218
|
+
currency_map: dict[int, dict[str, str]] = {}
|
|
219
|
+
if currency_ids:
|
|
220
|
+
currency_records = profile.execute(
|
|
221
|
+
"res.currency",
|
|
222
|
+
"read",
|
|
223
|
+
list(currency_ids),
|
|
224
|
+
fields=["name", "symbol"],
|
|
225
|
+
)
|
|
226
|
+
for currency in currency_records:
|
|
227
|
+
currency_id = currency.get("id")
|
|
228
|
+
if currency_id is not None:
|
|
229
|
+
currency_map[currency_id] = {
|
|
230
|
+
"name": currency.get("name", ""),
|
|
231
|
+
"symbol": currency.get("symbol", ""),
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
prepared_quotes = []
|
|
235
|
+
for quote in quotes:
|
|
236
|
+
partner = quote.get("partner_id")
|
|
237
|
+
customer = ""
|
|
238
|
+
if isinstance(partner, (list, tuple)) and len(partner) >= 2:
|
|
239
|
+
customer = partner[1]
|
|
240
|
+
|
|
241
|
+
activity_type = quote.get("activity_type_id")
|
|
242
|
+
activity_name = ""
|
|
243
|
+
if isinstance(activity_type, (list, tuple)) and len(activity_type) >= 2:
|
|
244
|
+
activity_name = activity_type[1]
|
|
245
|
+
|
|
246
|
+
activity_summary = quote.get("activity_summary") or ""
|
|
247
|
+
activity_value = activity_summary or activity_name
|
|
248
|
+
|
|
249
|
+
quote_tags = [
|
|
250
|
+
tag_map.get(tag_id, str(tag_id))
|
|
251
|
+
for tag_id in quote.get("tag_ids") or []
|
|
252
|
+
]
|
|
253
|
+
|
|
254
|
+
currency_info = quote.get("currency_id")
|
|
255
|
+
currency_label = ""
|
|
256
|
+
if isinstance(currency_info, (list, tuple)) and currency_info:
|
|
257
|
+
currency_id = currency_info[0]
|
|
258
|
+
currency_details = currency_map.get(currency_id, {})
|
|
259
|
+
currency_label = (
|
|
260
|
+
currency_details.get("symbol")
|
|
261
|
+
or currency_details.get("name")
|
|
262
|
+
or (currency_info[1] if len(currency_info) >= 2 else "")
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
amount_total = quote.get("amount_total") or 0
|
|
266
|
+
if currency_label:
|
|
267
|
+
total_display = f"{currency_label}{amount_total:,.2f}"
|
|
268
|
+
else:
|
|
269
|
+
total_display = f"{amount_total:,.2f}"
|
|
270
|
+
|
|
271
|
+
prepared_quotes.append(
|
|
272
|
+
{
|
|
273
|
+
"name": quote.get("name", ""),
|
|
274
|
+
"customer": customer,
|
|
275
|
+
"activity": activity_value,
|
|
276
|
+
"tags": quote_tags,
|
|
277
|
+
"create_date": _parse_datetime(quote.get("create_date")),
|
|
278
|
+
"total": amount_total,
|
|
279
|
+
"total_display": total_display,
|
|
280
|
+
}
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
context["quotes"] = prepared_quotes
|
|
284
|
+
|
|
285
|
+
products = profile.execute(
|
|
286
|
+
"product.product",
|
|
287
|
+
"search_read",
|
|
288
|
+
fields=["name", "default_code", "write_date", "create_date"],
|
|
289
|
+
limit=10,
|
|
290
|
+
order="write_date desc, create_date desc",
|
|
291
|
+
)
|
|
292
|
+
context["recent_products"] = [
|
|
293
|
+
{
|
|
294
|
+
"name": product.get("name", ""),
|
|
295
|
+
"default_code": product.get("default_code", ""),
|
|
296
|
+
"create_date": _parse_datetime(product.get("create_date")),
|
|
297
|
+
"write_date": _parse_datetime(product.get("write_date")),
|
|
298
|
+
}
|
|
299
|
+
for product in products
|
|
300
|
+
]
|
|
301
|
+
|
|
302
|
+
modules = profile.execute(
|
|
303
|
+
"ir.module.module",
|
|
304
|
+
"search_read",
|
|
305
|
+
[[("state", "=", "installed")]],
|
|
306
|
+
fields=["name", "shortdesc", "latest_version", "author"],
|
|
307
|
+
order="name asc",
|
|
308
|
+
)
|
|
309
|
+
context["installed_modules"] = [
|
|
310
|
+
{
|
|
311
|
+
"name": module.get("name", ""),
|
|
312
|
+
"shortdesc": module.get("shortdesc", ""),
|
|
313
|
+
"latest_version": module.get("latest_version", ""),
|
|
314
|
+
"author": module.get("author", ""),
|
|
315
|
+
}
|
|
316
|
+
for module in modules
|
|
317
|
+
]
|
|
318
|
+
|
|
319
|
+
except Exception:
|
|
320
|
+
logger.exception(
|
|
321
|
+
"Failed to build Odoo quote report for user %s (profile_id=%s)",
|
|
322
|
+
getattr(request.user, "pk", None),
|
|
323
|
+
getattr(profile, "pk", None),
|
|
324
|
+
)
|
|
325
|
+
context["error"] = _("Unable to generate the quote report from Odoo.")
|
|
326
|
+
return TemplateResponse(
|
|
327
|
+
request,
|
|
328
|
+
"admin/core/odoo_quote_report.html",
|
|
329
|
+
context,
|
|
330
|
+
status=502,
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
return TemplateResponse(request, "admin/core/odoo_quote_report.html", context)
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
@staff_member_required
|
|
337
|
+
@require_GET
|
|
338
|
+
def request_temp_password(request):
|
|
339
|
+
"""Generate a temporary password for the authenticated staff member."""
|
|
340
|
+
|
|
341
|
+
user = request.user
|
|
342
|
+
username = user.get_username()
|
|
343
|
+
password = temp_passwords.generate_password()
|
|
344
|
+
entry = temp_passwords.store_temp_password(
|
|
345
|
+
username,
|
|
346
|
+
password,
|
|
347
|
+
allow_change=True,
|
|
348
|
+
)
|
|
349
|
+
context = {
|
|
350
|
+
**admin_site.each_context(request),
|
|
351
|
+
"title": _("Temporary password"),
|
|
352
|
+
"username": username,
|
|
353
|
+
"password": password,
|
|
354
|
+
"expires_at": timezone.localtime(entry.expires_at),
|
|
355
|
+
"allow_change": entry.allow_change,
|
|
356
|
+
"return_url": reverse("admin:password_change"),
|
|
357
|
+
}
|
|
358
|
+
return TemplateResponse(
|
|
359
|
+
request,
|
|
360
|
+
"admin/core/request_temp_password.html",
|
|
361
|
+
context,
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
@staff_member_required
|
|
366
|
+
@require_GET
|
|
367
|
+
def version_info(request):
|
|
368
|
+
"""Return the running application version and Git revision."""
|
|
369
|
+
|
|
370
|
+
version = ""
|
|
371
|
+
version_path = Path(settings.BASE_DIR) / "VERSION"
|
|
372
|
+
if version_path.exists():
|
|
373
|
+
version = version_path.read_text(encoding="utf-8").strip()
|
|
374
|
+
return JsonResponse(
|
|
375
|
+
{
|
|
376
|
+
"version": version,
|
|
377
|
+
"revision": revision.get_revision(),
|
|
378
|
+
}
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
from . import release as release_utils
|
|
383
|
+
from .log_paths import select_log_dir
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
TODO_FIXTURE_DIR = Path(__file__).resolve().parent / "fixtures"
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
DIRTY_COMMIT_DEFAULT_MESSAGE = "chore: commit pending changes"
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
DIRTY_STATUS_LABELS = {
|
|
393
|
+
"A": _("Added"),
|
|
394
|
+
"C": _("Copied"),
|
|
395
|
+
"D": _("Deleted"),
|
|
396
|
+
"M": _("Modified"),
|
|
397
|
+
"R": _("Renamed"),
|
|
398
|
+
"U": _("Updated"),
|
|
399
|
+
"??": _("Untracked"),
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def _append_log(path: Path, message: str) -> None:
|
|
404
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
405
|
+
with path.open("a", encoding="utf-8") as fh:
|
|
406
|
+
fh.write(message + "\n")
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def _release_log_name(package_name: str, version: str) -> str:
|
|
410
|
+
return f"pr.{package_name}.v{version}.log"
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
def _ensure_log_directory(path: Path) -> tuple[bool, OSError | None]:
|
|
414
|
+
"""Return whether ``path`` is writable along with the triggering error."""
|
|
415
|
+
|
|
416
|
+
try:
|
|
417
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
418
|
+
except OSError as exc:
|
|
419
|
+
return False, exc
|
|
420
|
+
|
|
421
|
+
probe = path / f".permcheck_{uuid.uuid4().hex}"
|
|
422
|
+
try:
|
|
423
|
+
with probe.open("w", encoding="utf-8") as fh:
|
|
424
|
+
fh.write("")
|
|
425
|
+
except OSError as exc:
|
|
426
|
+
return False, exc
|
|
427
|
+
else:
|
|
428
|
+
try:
|
|
429
|
+
probe.unlink()
|
|
430
|
+
except OSError:
|
|
431
|
+
pass
|
|
432
|
+
return True, None
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
def _resolve_release_log_dir(preferred: Path) -> tuple[Path, str | None]:
|
|
436
|
+
"""Return a writable log directory for the release publish flow."""
|
|
437
|
+
|
|
438
|
+
writable, error = _ensure_log_directory(preferred)
|
|
439
|
+
if writable:
|
|
440
|
+
return preferred, None
|
|
441
|
+
|
|
442
|
+
logger.warning(
|
|
443
|
+
"Release log directory %s is not writable: %s", preferred, error
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
env_override = os.environ.pop("ARTHEXIS_LOG_DIR", None)
|
|
447
|
+
fallback = select_log_dir(Path(settings.BASE_DIR))
|
|
448
|
+
if env_override is not None:
|
|
449
|
+
if Path(env_override) == fallback:
|
|
450
|
+
os.environ["ARTHEXIS_LOG_DIR"] = env_override
|
|
451
|
+
else:
|
|
452
|
+
os.environ["ARTHEXIS_LOG_DIR"] = str(fallback)
|
|
453
|
+
|
|
454
|
+
if fallback == preferred:
|
|
455
|
+
if error:
|
|
456
|
+
raise error
|
|
457
|
+
raise PermissionError(f"Release log directory {preferred} is not writable")
|
|
458
|
+
|
|
459
|
+
fallback_writable, fallback_error = _ensure_log_directory(fallback)
|
|
460
|
+
if not fallback_writable:
|
|
461
|
+
raise fallback_error or PermissionError(
|
|
462
|
+
f"Release log directory {fallback} is not writable"
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
settings.LOG_DIR = fallback
|
|
466
|
+
warning = (
|
|
467
|
+
f"Release log directory {preferred} is not writable; using {fallback}"
|
|
468
|
+
)
|
|
469
|
+
logger.warning(warning)
|
|
470
|
+
return fallback, warning
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def _sync_with_origin_main(log_path: Path) -> None:
|
|
474
|
+
"""Ensure the current branch is rebased onto ``origin/main``."""
|
|
475
|
+
|
|
476
|
+
if not _has_remote("origin"):
|
|
477
|
+
_append_log(log_path, "No git remote configured; skipping sync with origin/main")
|
|
478
|
+
return
|
|
479
|
+
|
|
480
|
+
try:
|
|
481
|
+
subprocess.run(["git", "fetch", "origin", "main"], check=True)
|
|
482
|
+
_append_log(log_path, "Fetched latest changes from origin/main")
|
|
483
|
+
subprocess.run(["git", "rebase", "origin/main"], check=True)
|
|
484
|
+
_append_log(log_path, "Rebased current branch onto origin/main")
|
|
485
|
+
except subprocess.CalledProcessError as exc:
|
|
486
|
+
subprocess.run(["git", "rebase", "--abort"], check=False)
|
|
487
|
+
_append_log(log_path, "Rebase onto origin/main failed; aborted rebase")
|
|
488
|
+
|
|
489
|
+
stdout = (exc.stdout or "").strip()
|
|
490
|
+
stderr = (exc.stderr or "").strip()
|
|
491
|
+
if stdout:
|
|
492
|
+
_append_log(log_path, "git output:\n" + stdout)
|
|
493
|
+
if stderr:
|
|
494
|
+
_append_log(log_path, "git errors:\n" + stderr)
|
|
495
|
+
|
|
496
|
+
status = subprocess.run(
|
|
497
|
+
["git", "status"], capture_output=True, text=True, check=False
|
|
498
|
+
)
|
|
499
|
+
status_output = (status.stdout or "").strip()
|
|
500
|
+
status_errors = (status.stderr or "").strip()
|
|
501
|
+
if status_output:
|
|
502
|
+
_append_log(log_path, "git status:\n" + status_output)
|
|
503
|
+
if status_errors:
|
|
504
|
+
_append_log(log_path, "git status errors:\n" + status_errors)
|
|
505
|
+
|
|
506
|
+
branch = _current_branch() or "(detached HEAD)"
|
|
507
|
+
instructions = [
|
|
508
|
+
"Manual intervention required to finish syncing with origin/main.",
|
|
509
|
+
"Ensure you are on the branch you intend to publish (normally `main`; currently "
|
|
510
|
+
f"{branch}).",
|
|
511
|
+
"Then run these commands from the repository root:",
|
|
512
|
+
" git fetch origin main",
|
|
513
|
+
" git rebase origin/main",
|
|
514
|
+
"Resolve any conflicts (use `git status` to review files) and continue the rebase.",
|
|
515
|
+
]
|
|
516
|
+
|
|
517
|
+
if branch != "main" and branch != "(detached HEAD)":
|
|
518
|
+
instructions.append(
|
|
519
|
+
"If this branch should mirror main, push the rebased changes with "
|
|
520
|
+
f"`git push origin {branch}:main`."
|
|
521
|
+
)
|
|
522
|
+
else:
|
|
523
|
+
instructions.append("Push the rebased branch with `git push origin main`.")
|
|
524
|
+
|
|
525
|
+
instructions.append(
|
|
526
|
+
"If push authentication fails, verify your git remote permissions and SSH keys "
|
|
527
|
+
"for origin/main before retrying the publish flow."
|
|
528
|
+
)
|
|
529
|
+
_append_log(log_path, "\n".join(instructions))
|
|
530
|
+
|
|
531
|
+
raise Exception("Rebase onto main failed") from exc
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
def _clean_repo() -> None:
|
|
535
|
+
"""Return the git repository to a clean state."""
|
|
536
|
+
subprocess.run(["git", "reset", "--hard"], check=False)
|
|
537
|
+
subprocess.run(["git", "clean", "-fd"], check=False)
|
|
538
|
+
|
|
539
|
+
|
|
540
|
+
def _format_path(path: Path) -> str:
|
|
541
|
+
try:
|
|
542
|
+
return str(path.resolve().relative_to(Path.cwd()))
|
|
543
|
+
except ValueError:
|
|
544
|
+
return str(path)
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
def _git_stdout(args: Sequence[str]) -> str:
|
|
548
|
+
proc = subprocess.run(args, check=True, capture_output=True, text=True)
|
|
549
|
+
return (proc.stdout or "").strip()
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
def _has_remote(remote: str) -> bool:
|
|
553
|
+
proc = subprocess.run(
|
|
554
|
+
["git", "remote"],
|
|
555
|
+
check=True,
|
|
556
|
+
capture_output=True,
|
|
557
|
+
text=True,
|
|
558
|
+
)
|
|
559
|
+
remotes = [line.strip() for line in proc.stdout.splitlines() if line.strip()]
|
|
560
|
+
return remote in remotes
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
def _current_branch() -> str | None:
|
|
564
|
+
branch = _git_stdout(["git", "rev-parse", "--abbrev-ref", "HEAD"])
|
|
565
|
+
if branch == "HEAD":
|
|
566
|
+
return None
|
|
567
|
+
return branch
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
def _has_upstream(branch: str) -> bool:
|
|
571
|
+
proc = subprocess.run(
|
|
572
|
+
["git", "rev-parse", "--abbrev-ref", f"{branch}@{{upstream}}"],
|
|
573
|
+
capture_output=True,
|
|
574
|
+
text=True,
|
|
575
|
+
check=False,
|
|
576
|
+
)
|
|
577
|
+
return proc.returncode == 0
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
def _collect_dirty_files() -> list[dict[str, str]]:
|
|
581
|
+
proc = subprocess.run(
|
|
582
|
+
["git", "status", "--porcelain"],
|
|
583
|
+
capture_output=True,
|
|
584
|
+
text=True,
|
|
585
|
+
check=True,
|
|
586
|
+
)
|
|
587
|
+
dirty: list[dict[str, str]] = []
|
|
588
|
+
for line in proc.stdout.splitlines():
|
|
589
|
+
if not line.strip():
|
|
590
|
+
continue
|
|
591
|
+
status_code = line[:2]
|
|
592
|
+
status = status_code.strip() or status_code
|
|
593
|
+
path = line[3:]
|
|
594
|
+
dirty.append(
|
|
595
|
+
{
|
|
596
|
+
"path": path,
|
|
597
|
+
"status": status,
|
|
598
|
+
"status_label": DIRTY_STATUS_LABELS.get(status, status),
|
|
599
|
+
}
|
|
600
|
+
)
|
|
601
|
+
return dirty
|
|
602
|
+
|
|
603
|
+
|
|
604
|
+
def _format_subprocess_error(exc: subprocess.CalledProcessError) -> str:
|
|
605
|
+
return (exc.stderr or exc.stdout or str(exc)).strip() or str(exc)
|
|
606
|
+
|
|
607
|
+
|
|
608
|
+
def _git_authentication_missing(exc: subprocess.CalledProcessError) -> bool:
|
|
609
|
+
message = (exc.stderr or exc.stdout or "").strip().lower()
|
|
610
|
+
if not message:
|
|
611
|
+
return False
|
|
612
|
+
auth_markers = [
|
|
613
|
+
"could not read username",
|
|
614
|
+
"authentication failed",
|
|
615
|
+
"fatal: authentication failed",
|
|
616
|
+
"terminal prompts disabled",
|
|
617
|
+
]
|
|
618
|
+
return any(marker in message for marker in auth_markers)
|
|
619
|
+
|
|
620
|
+
|
|
621
|
+
def _push_release_changes(log_path: Path) -> bool:
|
|
622
|
+
"""Push release commits to ``origin`` and log the outcome."""
|
|
623
|
+
|
|
624
|
+
if not _has_remote("origin"):
|
|
625
|
+
_append_log(
|
|
626
|
+
log_path, "No git remote configured; skipping push of release changes"
|
|
627
|
+
)
|
|
628
|
+
return False
|
|
629
|
+
|
|
630
|
+
try:
|
|
631
|
+
branch = _current_branch()
|
|
632
|
+
if branch is None:
|
|
633
|
+
push_cmd = ["git", "push", "origin", "HEAD"]
|
|
634
|
+
elif _has_upstream(branch):
|
|
635
|
+
push_cmd = ["git", "push"]
|
|
636
|
+
else:
|
|
637
|
+
push_cmd = ["git", "push", "--set-upstream", "origin", branch]
|
|
638
|
+
subprocess.run(push_cmd, check=True, capture_output=True, text=True)
|
|
639
|
+
except subprocess.CalledProcessError as exc:
|
|
640
|
+
details = _format_subprocess_error(exc)
|
|
641
|
+
if _git_authentication_missing(exc):
|
|
642
|
+
_append_log(
|
|
643
|
+
log_path,
|
|
644
|
+
"Authentication is required to push release changes to origin; skipping push",
|
|
645
|
+
)
|
|
646
|
+
if details:
|
|
647
|
+
_append_log(log_path, details)
|
|
648
|
+
return False
|
|
649
|
+
_append_log(
|
|
650
|
+
log_path, f"Failed to push release changes to origin: {details}"
|
|
651
|
+
)
|
|
652
|
+
raise Exception("Failed to push release changes") from exc
|
|
653
|
+
|
|
654
|
+
_append_log(log_path, "Pushed release changes to origin")
|
|
655
|
+
return True
|
|
656
|
+
|
|
657
|
+
|
|
658
|
+
def _ensure_origin_main_unchanged(log_path: Path) -> None:
|
|
659
|
+
"""Verify that ``origin/main`` has not advanced during the release."""
|
|
660
|
+
|
|
661
|
+
if not _has_remote("origin"):
|
|
662
|
+
_append_log(
|
|
663
|
+
log_path, "No git remote configured; skipping origin/main verification"
|
|
664
|
+
)
|
|
665
|
+
return
|
|
666
|
+
|
|
667
|
+
try:
|
|
668
|
+
subprocess.run(["git", "fetch", "origin", "main"], check=True)
|
|
669
|
+
_append_log(log_path, "Fetched latest changes from origin/main")
|
|
670
|
+
origin_main = _git_stdout(["git", "rev-parse", "origin/main"])
|
|
671
|
+
merge_base = _git_stdout(["git", "merge-base", "HEAD", "origin/main"])
|
|
672
|
+
except subprocess.CalledProcessError as exc:
|
|
673
|
+
details = (getattr(exc, "stderr", "") or getattr(exc, "stdout", "") or str(exc)).strip()
|
|
674
|
+
if details:
|
|
675
|
+
_append_log(log_path, f"Failed to verify origin/main status: {details}")
|
|
676
|
+
else: # pragma: no cover - defensive fallback
|
|
677
|
+
_append_log(log_path, "Failed to verify origin/main status")
|
|
678
|
+
raise Exception("Unable to verify origin/main status") from exc
|
|
679
|
+
|
|
680
|
+
if origin_main != merge_base:
|
|
681
|
+
_append_log(log_path, "origin/main advanced during release; restart required")
|
|
682
|
+
raise Exception("origin/main changed during release; restart required")
|
|
683
|
+
|
|
684
|
+
_append_log(log_path, "origin/main unchanged since last sync")
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
def _next_patch_version(version: str) -> str:
|
|
688
|
+
from packaging.version import InvalidVersion, Version
|
|
689
|
+
|
|
690
|
+
cleaned = version.rstrip("+")
|
|
691
|
+
try:
|
|
692
|
+
parsed = Version(cleaned)
|
|
693
|
+
except InvalidVersion:
|
|
694
|
+
parts = cleaned.split(".") if cleaned else []
|
|
695
|
+
for index in range(len(parts) - 1, -1, -1):
|
|
696
|
+
segment = parts[index]
|
|
697
|
+
if segment.isdigit():
|
|
698
|
+
parts[index] = str(int(segment) + 1)
|
|
699
|
+
return ".".join(parts)
|
|
700
|
+
return cleaned or version
|
|
701
|
+
return f"{parsed.major}.{parsed.minor}.{parsed.micro + 1}"
|
|
702
|
+
|
|
703
|
+
|
|
704
|
+
def _should_use_python_changelog(exc: OSError) -> bool:
|
|
705
|
+
winerror = getattr(exc, "winerror", None)
|
|
706
|
+
if winerror in {193}:
|
|
707
|
+
return True
|
|
708
|
+
return exc.errno in {errno.ENOEXEC, errno.EACCES, errno.ENOENT}
|
|
709
|
+
|
|
710
|
+
|
|
711
|
+
def _generate_changelog_with_python(log_path: Path) -> None:
|
|
712
|
+
_append_log(log_path, "Falling back to Python changelog generator")
|
|
713
|
+
changelog_path = Path("CHANGELOG.rst")
|
|
714
|
+
previous = changelog_path.read_text(encoding="utf-8") if changelog_path.exists() else None
|
|
715
|
+
range_spec = changelog_utils.determine_range_spec(previous_text=previous)
|
|
716
|
+
sections = changelog_utils.collect_sections(range_spec=range_spec, previous_text=previous)
|
|
717
|
+
content = changelog_utils.render_changelog(sections)
|
|
718
|
+
if not content.endswith("\n"):
|
|
719
|
+
content += "\n"
|
|
720
|
+
changelog_path.write_text(content, encoding="utf-8")
|
|
721
|
+
_append_log(log_path, "Regenerated CHANGELOG.rst using Python fallback")
|
|
722
|
+
|
|
723
|
+
|
|
724
|
+
def _todo_blocks_publish(todo: Todo, release: PackageRelease) -> bool:
|
|
725
|
+
"""Return ``True`` when ``todo`` should block the release workflow."""
|
|
726
|
+
|
|
727
|
+
request = (todo.request or "").strip()
|
|
728
|
+
release_name = (release.package.name or "").strip()
|
|
729
|
+
if not request or not release_name:
|
|
730
|
+
return True
|
|
731
|
+
|
|
732
|
+
prefix = f"create release {release_name.lower()} "
|
|
733
|
+
if not request.lower().startswith(prefix):
|
|
734
|
+
return True
|
|
735
|
+
|
|
736
|
+
release_version = (release.version or "").strip()
|
|
737
|
+
generated_version = (todo.generated_for_version or "").strip()
|
|
738
|
+
if not release_version or release_version != generated_version:
|
|
739
|
+
return True
|
|
740
|
+
|
|
741
|
+
generated_revision = (todo.generated_for_revision or "").strip()
|
|
742
|
+
release_revision = (release.revision or "").strip()
|
|
743
|
+
if generated_revision and release_revision and generated_revision != release_revision:
|
|
744
|
+
return True
|
|
745
|
+
|
|
746
|
+
if not todo.is_seed_data:
|
|
747
|
+
return True
|
|
748
|
+
|
|
749
|
+
return False
|
|
750
|
+
|
|
751
|
+
|
|
752
|
+
def _sync_release_with_revision(release: PackageRelease) -> tuple[bool, str]:
|
|
753
|
+
"""Ensure ``release`` matches the repository revision and version.
|
|
754
|
+
|
|
755
|
+
Returns a tuple ``(updated, previous_version)`` where ``updated`` is
|
|
756
|
+
``True`` when any field changed and ``previous_version`` is the version
|
|
757
|
+
before synchronization.
|
|
758
|
+
"""
|
|
759
|
+
|
|
760
|
+
from packaging.version import InvalidVersion, Version
|
|
761
|
+
|
|
762
|
+
previous_version = release.version
|
|
763
|
+
updated_fields: set[str] = set()
|
|
764
|
+
|
|
765
|
+
repo_version: Version | None = None
|
|
766
|
+
version_path = Path("VERSION")
|
|
767
|
+
if version_path.exists():
|
|
768
|
+
try:
|
|
769
|
+
raw_version = version_path.read_text(encoding="utf-8").strip()
|
|
770
|
+
cleaned_version = raw_version.rstrip("+") or "0.0.0"
|
|
771
|
+
repo_version = Version(cleaned_version)
|
|
772
|
+
except InvalidVersion:
|
|
773
|
+
repo_version = None
|
|
774
|
+
|
|
775
|
+
try:
|
|
776
|
+
release_version = Version(release.version)
|
|
777
|
+
except InvalidVersion:
|
|
778
|
+
release_version = None
|
|
779
|
+
|
|
780
|
+
if repo_version is not None:
|
|
781
|
+
bumped_repo_version = Version(
|
|
782
|
+
f"{repo_version.major}.{repo_version.minor}.{repo_version.micro + 1}"
|
|
783
|
+
)
|
|
784
|
+
if release_version is None or release_version < bumped_repo_version:
|
|
785
|
+
release.version = str(bumped_repo_version)
|
|
786
|
+
release_version = bumped_repo_version
|
|
787
|
+
updated_fields.add("version")
|
|
788
|
+
|
|
789
|
+
current_revision = revision.get_revision()
|
|
790
|
+
if current_revision and current_revision != release.revision:
|
|
791
|
+
release.revision = current_revision
|
|
792
|
+
updated_fields.add("revision")
|
|
793
|
+
|
|
794
|
+
if updated_fields:
|
|
795
|
+
release.save(update_fields=list(updated_fields))
|
|
796
|
+
PackageRelease.dump_fixture()
|
|
797
|
+
|
|
798
|
+
package_updated = False
|
|
799
|
+
if release.package_id and not release.package.is_active:
|
|
800
|
+
release.package.is_active = True
|
|
801
|
+
release.package.save(update_fields=["is_active"])
|
|
802
|
+
package_updated = True
|
|
803
|
+
|
|
804
|
+
version_updated = False
|
|
805
|
+
if release.version:
|
|
806
|
+
current = ""
|
|
807
|
+
if version_path.exists():
|
|
808
|
+
current = version_path.read_text(encoding="utf-8").strip()
|
|
809
|
+
if current != release.version:
|
|
810
|
+
version_path.write_text(f"{release.version}\n", encoding="utf-8")
|
|
811
|
+
version_updated = True
|
|
812
|
+
|
|
813
|
+
return bool(updated_fields or version_updated or package_updated), previous_version
|
|
814
|
+
|
|
815
|
+
|
|
816
|
+
def _changelog_notes(version: str) -> str:
|
|
817
|
+
path = Path("CHANGELOG.rst")
|
|
818
|
+
if not path.exists():
|
|
819
|
+
return ""
|
|
820
|
+
notes = changelog_utils.extract_release_notes(
|
|
821
|
+
path.read_text(encoding="utf-8"), version
|
|
822
|
+
)
|
|
823
|
+
return notes.strip()
|
|
824
|
+
|
|
825
|
+
|
|
826
|
+
class PendingTodos(Exception):
|
|
827
|
+
"""Raised when TODO items require acknowledgment before proceeding."""
|
|
828
|
+
|
|
829
|
+
|
|
830
|
+
class ApprovalRequired(Exception):
|
|
831
|
+
"""Raised when release manager approval is required before continuing."""
|
|
832
|
+
|
|
833
|
+
|
|
834
|
+
class DirtyRepository(Exception):
|
|
835
|
+
"""Raised when the Git workspace has uncommitted changes."""
|
|
836
|
+
|
|
837
|
+
|
|
838
|
+
def _format_condition_failure(todo: Todo, result) -> str:
|
|
839
|
+
"""Return a localized error message for a failed TODO condition."""
|
|
840
|
+
|
|
841
|
+
if result.error and result.resolved:
|
|
842
|
+
detail = _("%(condition)s (error: %(error)s)") % {
|
|
843
|
+
"condition": result.resolved,
|
|
844
|
+
"error": result.error,
|
|
845
|
+
}
|
|
846
|
+
elif result.error:
|
|
847
|
+
detail = _("Error: %(error)s") % {"error": result.error}
|
|
848
|
+
elif result.resolved:
|
|
849
|
+
detail = result.resolved
|
|
850
|
+
else:
|
|
851
|
+
detail = _("Condition evaluated to False")
|
|
852
|
+
return _("Condition failed for %(todo)s: %(detail)s") % {
|
|
853
|
+
"todo": todo.request,
|
|
854
|
+
"detail": detail,
|
|
855
|
+
}
|
|
856
|
+
|
|
857
|
+
|
|
858
|
+
def _get_return_url(request) -> str:
|
|
859
|
+
"""Return a safe URL to redirect back to after completing a TODO."""
|
|
860
|
+
|
|
861
|
+
candidates = [request.GET.get("next"), request.POST.get("next")]
|
|
862
|
+
referer = request.META.get("HTTP_REFERER")
|
|
863
|
+
if referer:
|
|
864
|
+
candidates.append(referer)
|
|
865
|
+
|
|
866
|
+
for candidate in candidates:
|
|
867
|
+
if not candidate:
|
|
868
|
+
continue
|
|
869
|
+
if url_has_allowed_host_and_scheme(
|
|
870
|
+
candidate,
|
|
871
|
+
allowed_hosts={request.get_host()},
|
|
872
|
+
require_https=request.is_secure(),
|
|
873
|
+
):
|
|
874
|
+
return candidate
|
|
875
|
+
return resolve_url("admin:index")
|
|
876
|
+
|
|
877
|
+
|
|
878
|
+
def _refresh_changelog_once(ctx, log_path: Path) -> None:
|
|
879
|
+
"""Regenerate the changelog a single time per release run."""
|
|
880
|
+
|
|
881
|
+
if ctx.get("changelog_refreshed"):
|
|
882
|
+
return
|
|
883
|
+
|
|
884
|
+
_append_log(log_path, "Refreshing changelog before TODO review")
|
|
885
|
+
try:
|
|
886
|
+
subprocess.run(["scripts/generate-changelog.sh"], check=True)
|
|
887
|
+
except OSError as exc:
|
|
888
|
+
if _should_use_python_changelog(exc):
|
|
889
|
+
_append_log(
|
|
890
|
+
log_path,
|
|
891
|
+
f"scripts/generate-changelog.sh failed: {exc}",
|
|
892
|
+
)
|
|
893
|
+
_generate_changelog_with_python(log_path)
|
|
894
|
+
else: # pragma: no cover - unexpected OSError
|
|
895
|
+
raise
|
|
896
|
+
else:
|
|
897
|
+
_append_log(
|
|
898
|
+
log_path,
|
|
899
|
+
"Regenerated CHANGELOG.rst using scripts/generate-changelog.sh",
|
|
900
|
+
)
|
|
901
|
+
|
|
902
|
+
staged_paths: list[str] = []
|
|
903
|
+
changelog_path = Path("CHANGELOG.rst")
|
|
904
|
+
if changelog_path.exists():
|
|
905
|
+
staged_paths.append(str(changelog_path))
|
|
906
|
+
|
|
907
|
+
release_fixtures = sorted(Path("core/fixtures").glob("releases__*.json"))
|
|
908
|
+
staged_paths.extend(str(path) for path in release_fixtures)
|
|
909
|
+
|
|
910
|
+
if staged_paths:
|
|
911
|
+
subprocess.run(["git", "add", *staged_paths], check=True)
|
|
912
|
+
|
|
913
|
+
diff = subprocess.run(
|
|
914
|
+
["git", "diff", "--cached", "--name-only"],
|
|
915
|
+
check=True,
|
|
916
|
+
capture_output=True,
|
|
917
|
+
text=True,
|
|
918
|
+
)
|
|
919
|
+
changed_paths = [line.strip() for line in diff.stdout.splitlines() if line.strip()]
|
|
920
|
+
|
|
921
|
+
if changed_paths:
|
|
922
|
+
changelog_dirty = "CHANGELOG.rst" in changed_paths
|
|
923
|
+
fixtures_dirty = any(path.startswith("core/fixtures/") for path in changed_paths)
|
|
924
|
+
if changelog_dirty and fixtures_dirty:
|
|
925
|
+
message = "chore: sync release fixtures and changelog"
|
|
926
|
+
elif changelog_dirty:
|
|
927
|
+
message = "docs: refresh changelog"
|
|
928
|
+
else:
|
|
929
|
+
message = "chore: update release fixtures"
|
|
930
|
+
subprocess.run(["git", "commit", "-m", message], check=True)
|
|
931
|
+
_append_log(log_path, f"Committed changelog refresh ({message})")
|
|
932
|
+
else:
|
|
933
|
+
_append_log(log_path, "Changelog already up to date")
|
|
934
|
+
|
|
935
|
+
ctx["changelog_refreshed"] = True
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
def _step_check_todos(release, ctx, log_path: Path, *, user=None) -> None:
|
|
939
|
+
_refresh_changelog_once(ctx, log_path)
|
|
940
|
+
|
|
941
|
+
pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
|
|
942
|
+
pending_values = list(
|
|
943
|
+
pending_qs.values("id", "request", "url", "request_details")
|
|
944
|
+
)
|
|
945
|
+
if not pending_values:
|
|
946
|
+
ctx["todos_ack"] = True
|
|
947
|
+
|
|
948
|
+
if not ctx.get("todos_ack"):
|
|
949
|
+
if not ctx.get("todos_block_logged"):
|
|
950
|
+
_append_log(
|
|
951
|
+
log_path,
|
|
952
|
+
"Release checklist requires acknowledgment before continuing. "
|
|
953
|
+
"Review outstanding TODO items and confirm the checklist; "
|
|
954
|
+
"publishing will resume automatically afterward.",
|
|
955
|
+
)
|
|
956
|
+
ctx["todos_block_logged"] = True
|
|
957
|
+
ctx["todos"] = pending_values
|
|
958
|
+
ctx["todos_required"] = True
|
|
959
|
+
raise PendingTodos()
|
|
960
|
+
todos = list(Todo.objects.filter(is_deleted=False))
|
|
961
|
+
for todo in todos:
|
|
962
|
+
todo.delete()
|
|
963
|
+
removed = []
|
|
964
|
+
for path in TODO_FIXTURE_DIR.glob("todos__*.json"):
|
|
965
|
+
removed.append(str(path))
|
|
966
|
+
path.unlink()
|
|
967
|
+
if removed:
|
|
968
|
+
subprocess.run(["git", "add", *removed], check=False)
|
|
969
|
+
subprocess.run(
|
|
970
|
+
["git", "commit", "-m", "chore: remove TODO fixtures"],
|
|
971
|
+
check=False,
|
|
972
|
+
)
|
|
973
|
+
ctx.pop("todos", None)
|
|
974
|
+
ctx.pop("todos_required", None)
|
|
975
|
+
ctx["todos_ack"] = True
|
|
976
|
+
|
|
977
|
+
|
|
978
|
+
def _step_check_version(release, ctx, log_path: Path, *, user=None) -> None:
|
|
979
|
+
from . import release as release_utils
|
|
980
|
+
from packaging.version import InvalidVersion, Version
|
|
981
|
+
|
|
982
|
+
sync_error: Optional[Exception] = None
|
|
983
|
+
retry_sync = False
|
|
984
|
+
try:
|
|
985
|
+
_sync_with_origin_main(log_path)
|
|
986
|
+
except Exception as exc:
|
|
987
|
+
sync_error = exc
|
|
988
|
+
|
|
989
|
+
if not release_utils._git_clean():
|
|
990
|
+
dirty_entries = _collect_dirty_files()
|
|
991
|
+
files = [entry["path"] for entry in dirty_entries]
|
|
992
|
+
fixture_files = [
|
|
993
|
+
f
|
|
994
|
+
for f in files
|
|
995
|
+
if "fixtures" in Path(f).parts and Path(f).suffix == ".json"
|
|
996
|
+
]
|
|
997
|
+
changelog_dirty = "CHANGELOG.rst" in files
|
|
998
|
+
version_dirty = "VERSION" in files
|
|
999
|
+
allowed_dirty_files = set(fixture_files)
|
|
1000
|
+
if changelog_dirty:
|
|
1001
|
+
allowed_dirty_files.add("CHANGELOG.rst")
|
|
1002
|
+
if version_dirty:
|
|
1003
|
+
allowed_dirty_files.add("VERSION")
|
|
1004
|
+
|
|
1005
|
+
if files and len(allowed_dirty_files) == len(files):
|
|
1006
|
+
summary = []
|
|
1007
|
+
for f in fixture_files:
|
|
1008
|
+
path = Path(f)
|
|
1009
|
+
try:
|
|
1010
|
+
data = json.loads(path.read_text(encoding="utf-8"))
|
|
1011
|
+
except Exception:
|
|
1012
|
+
count = 0
|
|
1013
|
+
models: list[str] = []
|
|
1014
|
+
else:
|
|
1015
|
+
if isinstance(data, list):
|
|
1016
|
+
count = len(data)
|
|
1017
|
+
models = sorted(
|
|
1018
|
+
{
|
|
1019
|
+
obj.get("model", "")
|
|
1020
|
+
for obj in data
|
|
1021
|
+
if isinstance(obj, dict)
|
|
1022
|
+
}
|
|
1023
|
+
)
|
|
1024
|
+
elif isinstance(data, dict):
|
|
1025
|
+
count = 1
|
|
1026
|
+
models = [data.get("model", "")]
|
|
1027
|
+
else: # pragma: no cover - unexpected structure
|
|
1028
|
+
count = 0
|
|
1029
|
+
models = []
|
|
1030
|
+
summary.append({"path": f, "count": count, "models": models})
|
|
1031
|
+
|
|
1032
|
+
ctx["fixtures"] = summary
|
|
1033
|
+
commit_paths = [*fixture_files]
|
|
1034
|
+
if changelog_dirty:
|
|
1035
|
+
commit_paths.append("CHANGELOG.rst")
|
|
1036
|
+
if version_dirty:
|
|
1037
|
+
commit_paths.append("VERSION")
|
|
1038
|
+
|
|
1039
|
+
log_fragments = []
|
|
1040
|
+
if fixture_files:
|
|
1041
|
+
log_fragments.append(
|
|
1042
|
+
"fixtures " + ", ".join(fixture_files)
|
|
1043
|
+
)
|
|
1044
|
+
if changelog_dirty:
|
|
1045
|
+
log_fragments.append("CHANGELOG.rst")
|
|
1046
|
+
if version_dirty:
|
|
1047
|
+
log_fragments.append("VERSION")
|
|
1048
|
+
details = ", ".join(log_fragments) if log_fragments else "changes"
|
|
1049
|
+
_append_log(
|
|
1050
|
+
log_path,
|
|
1051
|
+
f"Committing release prep changes: {details}",
|
|
1052
|
+
)
|
|
1053
|
+
subprocess.run(["git", "add", *commit_paths], check=True)
|
|
1054
|
+
|
|
1055
|
+
if changelog_dirty and version_dirty and fixture_files:
|
|
1056
|
+
commit_message = "chore: sync release metadata"
|
|
1057
|
+
elif changelog_dirty and version_dirty:
|
|
1058
|
+
commit_message = "chore: update version and changelog"
|
|
1059
|
+
elif version_dirty and fixture_files:
|
|
1060
|
+
commit_message = "chore: update version and fixtures"
|
|
1061
|
+
elif changelog_dirty and fixture_files:
|
|
1062
|
+
commit_message = "chore: sync release fixtures and changelog"
|
|
1063
|
+
elif version_dirty:
|
|
1064
|
+
commit_message = "chore: update version"
|
|
1065
|
+
elif changelog_dirty:
|
|
1066
|
+
commit_message = "docs: refresh changelog"
|
|
1067
|
+
else:
|
|
1068
|
+
commit_message = "chore: update fixtures"
|
|
1069
|
+
|
|
1070
|
+
subprocess.run(["git", "commit", "-m", commit_message], check=True)
|
|
1071
|
+
_append_log(
|
|
1072
|
+
log_path,
|
|
1073
|
+
f"Release prep changes committed ({commit_message})",
|
|
1074
|
+
)
|
|
1075
|
+
ctx.pop("dirty_files", None)
|
|
1076
|
+
ctx.pop("dirty_commit_error", None)
|
|
1077
|
+
retry_sync = True
|
|
1078
|
+
else:
|
|
1079
|
+
ctx["dirty_files"] = dirty_entries
|
|
1080
|
+
ctx.setdefault("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE)
|
|
1081
|
+
ctx.pop("fixtures", None)
|
|
1082
|
+
ctx.pop("dirty_commit_error", None)
|
|
1083
|
+
if dirty_entries:
|
|
1084
|
+
details = ", ".join(entry["path"] for entry in dirty_entries)
|
|
1085
|
+
else:
|
|
1086
|
+
details = ""
|
|
1087
|
+
message = "Git repository has uncommitted changes"
|
|
1088
|
+
if details:
|
|
1089
|
+
message += f": {details}"
|
|
1090
|
+
if ctx.get("dirty_log_message") != message:
|
|
1091
|
+
_append_log(log_path, message)
|
|
1092
|
+
ctx["dirty_log_message"] = message
|
|
1093
|
+
raise DirtyRepository()
|
|
1094
|
+
else:
|
|
1095
|
+
ctx.pop("dirty_files", None)
|
|
1096
|
+
ctx.pop("dirty_commit_error", None)
|
|
1097
|
+
ctx.pop("dirty_log_message", None)
|
|
1098
|
+
|
|
1099
|
+
if retry_sync and sync_error is not None:
|
|
1100
|
+
try:
|
|
1101
|
+
_sync_with_origin_main(log_path)
|
|
1102
|
+
except Exception as exc:
|
|
1103
|
+
sync_error = exc
|
|
1104
|
+
else:
|
|
1105
|
+
sync_error = None
|
|
1106
|
+
|
|
1107
|
+
if sync_error is not None:
|
|
1108
|
+
raise sync_error
|
|
1109
|
+
|
|
1110
|
+
version_path = Path("VERSION")
|
|
1111
|
+
if version_path.exists():
|
|
1112
|
+
current = version_path.read_text(encoding="utf-8").strip()
|
|
1113
|
+
if current:
|
|
1114
|
+
current_clean = current.rstrip("+") or "0.0.0"
|
|
1115
|
+
if Version(release.version) < Version(current_clean):
|
|
1116
|
+
raise Exception(
|
|
1117
|
+
f"Version {release.version} is older than existing {current}"
|
|
1118
|
+
)
|
|
1119
|
+
|
|
1120
|
+
_append_log(log_path, f"Checking if version {release.version} exists on PyPI")
|
|
1121
|
+
if release_utils.network_available():
|
|
1122
|
+
try:
|
|
1123
|
+
resp = requests.get(
|
|
1124
|
+
f"https://pypi.org/pypi/{release.package.name}/json",
|
|
1125
|
+
timeout=PYPI_REQUEST_TIMEOUT,
|
|
1126
|
+
)
|
|
1127
|
+
if resp.ok:
|
|
1128
|
+
data = resp.json()
|
|
1129
|
+
releases = data.get("releases", {})
|
|
1130
|
+
try:
|
|
1131
|
+
target_version = Version(release.version)
|
|
1132
|
+
except InvalidVersion:
|
|
1133
|
+
target_version = None
|
|
1134
|
+
|
|
1135
|
+
for candidate, files in releases.items():
|
|
1136
|
+
same_version = candidate == release.version
|
|
1137
|
+
if target_version is not None and not same_version:
|
|
1138
|
+
try:
|
|
1139
|
+
same_version = Version(candidate) == target_version
|
|
1140
|
+
except InvalidVersion:
|
|
1141
|
+
same_version = False
|
|
1142
|
+
if not same_version:
|
|
1143
|
+
continue
|
|
1144
|
+
|
|
1145
|
+
has_available_files = any(
|
|
1146
|
+
isinstance(file_data, dict)
|
|
1147
|
+
and not file_data.get("yanked", False)
|
|
1148
|
+
for file_data in files or []
|
|
1149
|
+
)
|
|
1150
|
+
if has_available_files:
|
|
1151
|
+
raise Exception(
|
|
1152
|
+
f"Version {release.version} already on PyPI"
|
|
1153
|
+
)
|
|
1154
|
+
except Exception as exc:
|
|
1155
|
+
# network errors should be logged but not crash
|
|
1156
|
+
if "already on PyPI" in str(exc):
|
|
1157
|
+
raise
|
|
1158
|
+
_append_log(log_path, f"PyPI check failed: {exc}")
|
|
1159
|
+
else:
|
|
1160
|
+
_append_log(
|
|
1161
|
+
log_path,
|
|
1162
|
+
f"Version {release.version} not published on PyPI",
|
|
1163
|
+
)
|
|
1164
|
+
else:
|
|
1165
|
+
_append_log(log_path, "Network unavailable, skipping PyPI check")
|
|
1166
|
+
|
|
1167
|
+
|
|
1168
|
+
def _step_handle_migrations(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1169
|
+
_append_log(log_path, "Freeze, squash and approve migrations")
|
|
1170
|
+
_append_log(log_path, "Migration review acknowledged (manual step)")
|
|
1171
|
+
|
|
1172
|
+
|
|
1173
|
+
def _step_changelog_docs(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1174
|
+
_append_log(log_path, "Compose CHANGELOG and documentation")
|
|
1175
|
+
_append_log(log_path, "CHANGELOG and documentation review recorded")
|
|
1176
|
+
|
|
1177
|
+
|
|
1178
|
+
def _step_pre_release_actions(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1179
|
+
_append_log(log_path, "Execute pre-release actions")
|
|
1180
|
+
if ctx.get("dry_run"):
|
|
1181
|
+
_append_log(log_path, "Dry run: skipping pre-release actions")
|
|
1182
|
+
return
|
|
1183
|
+
_sync_with_origin_main(log_path)
|
|
1184
|
+
try:
|
|
1185
|
+
subprocess.run(["scripts/generate-changelog.sh"], check=True)
|
|
1186
|
+
except OSError as exc:
|
|
1187
|
+
if _should_use_python_changelog(exc):
|
|
1188
|
+
_append_log(
|
|
1189
|
+
log_path,
|
|
1190
|
+
f"scripts/generate-changelog.sh failed: {exc}",
|
|
1191
|
+
)
|
|
1192
|
+
_generate_changelog_with_python(log_path)
|
|
1193
|
+
else: # pragma: no cover - unexpected OSError
|
|
1194
|
+
raise
|
|
1195
|
+
else:
|
|
1196
|
+
_append_log(
|
|
1197
|
+
log_path, "Regenerated CHANGELOG.rst using scripts/generate-changelog.sh"
|
|
1198
|
+
)
|
|
1199
|
+
notes = _changelog_notes(release.version)
|
|
1200
|
+
staged_release_fixtures: list[Path] = []
|
|
1201
|
+
if notes != release.changelog:
|
|
1202
|
+
release.changelog = notes
|
|
1203
|
+
release.save(update_fields=["changelog"])
|
|
1204
|
+
PackageRelease.dump_fixture()
|
|
1205
|
+
_append_log(log_path, f"Recorded changelog notes for v{release.version}")
|
|
1206
|
+
release_fixture_paths = sorted(
|
|
1207
|
+
Path("core/fixtures").glob("releases__*.json")
|
|
1208
|
+
)
|
|
1209
|
+
if release_fixture_paths:
|
|
1210
|
+
subprocess.run(
|
|
1211
|
+
["git", "add", *[str(path) for path in release_fixture_paths]],
|
|
1212
|
+
check=True,
|
|
1213
|
+
)
|
|
1214
|
+
staged_release_fixtures = release_fixture_paths
|
|
1215
|
+
formatted = ", ".join(_format_path(path) for path in release_fixture_paths)
|
|
1216
|
+
_append_log(
|
|
1217
|
+
log_path,
|
|
1218
|
+
"Staged release fixtures " + formatted,
|
|
1219
|
+
)
|
|
1220
|
+
subprocess.run(["git", "add", "CHANGELOG.rst"], check=True)
|
|
1221
|
+
_append_log(log_path, "Staged CHANGELOG.rst for commit")
|
|
1222
|
+
version_path = Path("VERSION")
|
|
1223
|
+
previous_version_text = ""
|
|
1224
|
+
if version_path.exists():
|
|
1225
|
+
previous_version_text = version_path.read_text(encoding="utf-8").strip()
|
|
1226
|
+
repo_version_before_sync = getattr(
|
|
1227
|
+
release, "_repo_version_before_sync", previous_version_text
|
|
1228
|
+
)
|
|
1229
|
+
version_path.write_text(f"{release.version}\n", encoding="utf-8")
|
|
1230
|
+
_append_log(log_path, f"Updated VERSION file to {release.version}")
|
|
1231
|
+
subprocess.run(["git", "add", "VERSION"], check=True)
|
|
1232
|
+
_append_log(log_path, "Staged VERSION for commit")
|
|
1233
|
+
diff = subprocess.run(["git", "diff", "--cached", "--quiet"], check=False)
|
|
1234
|
+
if diff.returncode != 0:
|
|
1235
|
+
subprocess.run(
|
|
1236
|
+
["git", "commit", "-m", f"pre-release commit {release.version}"],
|
|
1237
|
+
check=True,
|
|
1238
|
+
)
|
|
1239
|
+
_append_log(log_path, f"Committed VERSION update for {release.version}")
|
|
1240
|
+
else:
|
|
1241
|
+
_append_log(
|
|
1242
|
+
log_path, "No changes detected for VERSION or CHANGELOG; skipping commit"
|
|
1243
|
+
)
|
|
1244
|
+
subprocess.run(["git", "reset", "HEAD", "CHANGELOG.rst"], check=False)
|
|
1245
|
+
_append_log(log_path, "Unstaged CHANGELOG.rst")
|
|
1246
|
+
subprocess.run(["git", "reset", "HEAD", "VERSION"], check=False)
|
|
1247
|
+
_append_log(log_path, "Unstaged VERSION file")
|
|
1248
|
+
for path in staged_release_fixtures:
|
|
1249
|
+
subprocess.run(["git", "reset", "HEAD", str(path)], check=False)
|
|
1250
|
+
_append_log(log_path, f"Unstaged release fixture {_format_path(path)}")
|
|
1251
|
+
_append_log(log_path, "Pre-release actions complete")
|
|
1252
|
+
|
|
1253
|
+
|
|
1254
|
+
def _step_run_tests(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1255
|
+
_append_log(log_path, "Complete test suite with --all flag")
|
|
1256
|
+
_append_log(log_path, "Test suite completion acknowledged")
|
|
1257
|
+
|
|
1258
|
+
|
|
1259
|
+
def _step_promote_build(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1260
|
+
from . import release as release_utils
|
|
1261
|
+
|
|
1262
|
+
_append_log(log_path, "Generating build files")
|
|
1263
|
+
if ctx.get("dry_run"):
|
|
1264
|
+
_append_log(log_path, "Dry run: skipping build promotion")
|
|
1265
|
+
return
|
|
1266
|
+
try:
|
|
1267
|
+
_ensure_origin_main_unchanged(log_path)
|
|
1268
|
+
release_utils.promote(
|
|
1269
|
+
package=release.to_package(),
|
|
1270
|
+
version=release.version,
|
|
1271
|
+
creds=release.to_credentials(user=user),
|
|
1272
|
+
)
|
|
1273
|
+
_append_log(
|
|
1274
|
+
log_path,
|
|
1275
|
+
f"Generated release artifacts for v{release.version}",
|
|
1276
|
+
)
|
|
1277
|
+
from glob import glob
|
|
1278
|
+
|
|
1279
|
+
paths = ["VERSION", *glob("core/fixtures/releases__*.json")]
|
|
1280
|
+
diff = subprocess.run(
|
|
1281
|
+
["git", "status", "--porcelain", *paths],
|
|
1282
|
+
capture_output=True,
|
|
1283
|
+
text=True,
|
|
1284
|
+
)
|
|
1285
|
+
if diff.stdout.strip():
|
|
1286
|
+
subprocess.run(["git", "add", *paths], check=True)
|
|
1287
|
+
_append_log(log_path, "Staged release metadata updates")
|
|
1288
|
+
subprocess.run(
|
|
1289
|
+
[
|
|
1290
|
+
"git",
|
|
1291
|
+
"commit",
|
|
1292
|
+
"-m",
|
|
1293
|
+
f"chore: update release metadata for v{release.version}",
|
|
1294
|
+
],
|
|
1295
|
+
check=True,
|
|
1296
|
+
)
|
|
1297
|
+
_append_log(
|
|
1298
|
+
log_path,
|
|
1299
|
+
f"Committed release metadata for v{release.version}",
|
|
1300
|
+
)
|
|
1301
|
+
_push_release_changes(log_path)
|
|
1302
|
+
PackageRelease.dump_fixture()
|
|
1303
|
+
_append_log(log_path, "Updated release fixtures")
|
|
1304
|
+
except Exception:
|
|
1305
|
+
_clean_repo()
|
|
1306
|
+
raise
|
|
1307
|
+
target_name = _release_log_name(release.package.name, release.version)
|
|
1308
|
+
new_log = log_path.with_name(target_name)
|
|
1309
|
+
if log_path != new_log:
|
|
1310
|
+
if new_log.exists():
|
|
1311
|
+
new_log.unlink()
|
|
1312
|
+
log_path.rename(new_log)
|
|
1313
|
+
else:
|
|
1314
|
+
new_log = log_path
|
|
1315
|
+
ctx["log"] = new_log.name
|
|
1316
|
+
_append_log(new_log, "Build complete")
|
|
1317
|
+
|
|
1318
|
+
|
|
1319
|
+
def _step_release_manager_approval(
|
|
1320
|
+
release, ctx, log_path: Path, *, user=None
|
|
1321
|
+
) -> None:
|
|
1322
|
+
if release.to_credentials(user=user) is None:
|
|
1323
|
+
ctx.pop("release_approval", None)
|
|
1324
|
+
if not ctx.get("approval_credentials_missing"):
|
|
1325
|
+
_append_log(log_path, "Release manager publishing credentials missing")
|
|
1326
|
+
ctx["approval_credentials_missing"] = True
|
|
1327
|
+
ctx["awaiting_approval"] = True
|
|
1328
|
+
raise ApprovalRequired()
|
|
1329
|
+
|
|
1330
|
+
missing_before = ctx.pop("approval_credentials_missing", None)
|
|
1331
|
+
if missing_before:
|
|
1332
|
+
ctx.pop("awaiting_approval", None)
|
|
1333
|
+
decision = ctx.get("release_approval")
|
|
1334
|
+
if decision == "approved":
|
|
1335
|
+
ctx.pop("release_approval", None)
|
|
1336
|
+
ctx.pop("awaiting_approval", None)
|
|
1337
|
+
ctx.pop("approval_credentials_missing", None)
|
|
1338
|
+
_append_log(log_path, "Release manager approved release")
|
|
1339
|
+
return
|
|
1340
|
+
if decision == "rejected":
|
|
1341
|
+
ctx.pop("release_approval", None)
|
|
1342
|
+
ctx.pop("awaiting_approval", None)
|
|
1343
|
+
ctx.pop("approval_credentials_missing", None)
|
|
1344
|
+
_append_log(log_path, "Release manager rejected release")
|
|
1345
|
+
raise RuntimeError(
|
|
1346
|
+
_("Release manager rejected the release. Restart required."),
|
|
1347
|
+
)
|
|
1348
|
+
if not ctx.get("awaiting_approval"):
|
|
1349
|
+
ctx["awaiting_approval"] = True
|
|
1350
|
+
_append_log(log_path, "Awaiting release manager approval")
|
|
1351
|
+
else:
|
|
1352
|
+
ctx["awaiting_approval"] = True
|
|
1353
|
+
raise ApprovalRequired()
|
|
1354
|
+
|
|
1355
|
+
|
|
1356
|
+
def _step_publish(release, ctx, log_path: Path, *, user=None) -> None:
|
|
1357
|
+
from . import release as release_utils
|
|
1358
|
+
|
|
1359
|
+
if ctx.get("dry_run"):
|
|
1360
|
+
test_repository_url = os.environ.get(
|
|
1361
|
+
"PYPI_TEST_REPOSITORY_URL", "https://test.pypi.org/legacy/"
|
|
1362
|
+
)
|
|
1363
|
+
test_creds = release.to_credentials(user=user)
|
|
1364
|
+
if not (test_creds and test_creds.has_auth()):
|
|
1365
|
+
test_creds = release_utils.Credentials(
|
|
1366
|
+
token=os.environ.get("PYPI_TEST_API_TOKEN"),
|
|
1367
|
+
username=os.environ.get("PYPI_TEST_USERNAME"),
|
|
1368
|
+
password=os.environ.get("PYPI_TEST_PASSWORD"),
|
|
1369
|
+
)
|
|
1370
|
+
if not test_creds.has_auth():
|
|
1371
|
+
test_creds = None
|
|
1372
|
+
target = release_utils.RepositoryTarget(
|
|
1373
|
+
name="Test PyPI",
|
|
1374
|
+
repository_url=(test_repository_url or None),
|
|
1375
|
+
credentials=test_creds,
|
|
1376
|
+
verify_availability=False,
|
|
1377
|
+
)
|
|
1378
|
+
label = target.repository_url or target.name
|
|
1379
|
+
dist_path = Path("dist")
|
|
1380
|
+
if not dist_path.exists():
|
|
1381
|
+
_append_log(log_path, "Dry run: building distribution artifacts")
|
|
1382
|
+
package = release.to_package()
|
|
1383
|
+
version_path = (
|
|
1384
|
+
Path(package.version_path)
|
|
1385
|
+
if package.version_path
|
|
1386
|
+
else Path("VERSION")
|
|
1387
|
+
)
|
|
1388
|
+
original_version = (
|
|
1389
|
+
version_path.read_text(encoding="utf-8")
|
|
1390
|
+
if version_path.exists()
|
|
1391
|
+
else None
|
|
1392
|
+
)
|
|
1393
|
+
pyproject_path = Path("pyproject.toml")
|
|
1394
|
+
original_pyproject = (
|
|
1395
|
+
pyproject_path.read_text(encoding="utf-8")
|
|
1396
|
+
if pyproject_path.exists()
|
|
1397
|
+
else None
|
|
1398
|
+
)
|
|
1399
|
+
try:
|
|
1400
|
+
release_utils.build(
|
|
1401
|
+
package=package,
|
|
1402
|
+
version=release.version,
|
|
1403
|
+
creds=release.to_credentials(user=user),
|
|
1404
|
+
dist=True,
|
|
1405
|
+
tests=False,
|
|
1406
|
+
twine=False,
|
|
1407
|
+
git=False,
|
|
1408
|
+
tag=False,
|
|
1409
|
+
stash=True,
|
|
1410
|
+
)
|
|
1411
|
+
except release_utils.ReleaseError as exc:
|
|
1412
|
+
_append_log(
|
|
1413
|
+
log_path,
|
|
1414
|
+
f"Dry run: failed to prepare distribution artifacts ({exc})",
|
|
1415
|
+
)
|
|
1416
|
+
raise
|
|
1417
|
+
finally:
|
|
1418
|
+
if original_version is None:
|
|
1419
|
+
if version_path.exists():
|
|
1420
|
+
version_path.unlink()
|
|
1421
|
+
else:
|
|
1422
|
+
version_path.write_text(original_version, encoding="utf-8")
|
|
1423
|
+
if original_pyproject is None:
|
|
1424
|
+
if pyproject_path.exists():
|
|
1425
|
+
pyproject_path.unlink()
|
|
1426
|
+
else:
|
|
1427
|
+
pyproject_path.write_text(original_pyproject, encoding="utf-8")
|
|
1428
|
+
_append_log(log_path, f"Dry run: uploading distribution to {label}")
|
|
1429
|
+
release_utils.publish(
|
|
1430
|
+
package=release.to_package(),
|
|
1431
|
+
version=release.version,
|
|
1432
|
+
creds=target.credentials or release.to_credentials(user=user),
|
|
1433
|
+
repositories=[target],
|
|
1434
|
+
)
|
|
1435
|
+
_append_log(log_path, "Dry run: skipped release metadata updates")
|
|
1436
|
+
return
|
|
1437
|
+
|
|
1438
|
+
targets = release.build_publish_targets(user=user)
|
|
1439
|
+
repo_labels = []
|
|
1440
|
+
for target in targets:
|
|
1441
|
+
label = target.name
|
|
1442
|
+
if target.repository_url:
|
|
1443
|
+
label = f"{label} ({target.repository_url})"
|
|
1444
|
+
repo_labels.append(label)
|
|
1445
|
+
if repo_labels:
|
|
1446
|
+
_append_log(
|
|
1447
|
+
log_path,
|
|
1448
|
+
"Uploading distribution" if len(repo_labels) == 1 else "Uploading distribution to: " + ", ".join(repo_labels),
|
|
1449
|
+
)
|
|
1450
|
+
else:
|
|
1451
|
+
_append_log(log_path, "Uploading distribution")
|
|
1452
|
+
publish_warning: release_utils.PostPublishWarning | None = None
|
|
1453
|
+
try:
|
|
1454
|
+
release_utils.publish(
|
|
1455
|
+
package=release.to_package(),
|
|
1456
|
+
version=release.version,
|
|
1457
|
+
creds=release.to_credentials(user=user),
|
|
1458
|
+
repositories=targets,
|
|
1459
|
+
)
|
|
1460
|
+
except release_utils.PostPublishWarning as warning:
|
|
1461
|
+
publish_warning = warning
|
|
1462
|
+
|
|
1463
|
+
if publish_warning is not None:
|
|
1464
|
+
message = str(publish_warning)
|
|
1465
|
+
followups = _dedupe_preserve_order(publish_warning.followups)
|
|
1466
|
+
warning_entries = ctx.setdefault("warnings", [])
|
|
1467
|
+
if not any(entry.get("message") == message for entry in warning_entries):
|
|
1468
|
+
entry: dict[str, object] = {"message": message}
|
|
1469
|
+
if followups:
|
|
1470
|
+
entry["followups"] = followups
|
|
1471
|
+
warning_entries.append(entry)
|
|
1472
|
+
_append_log(log_path, message)
|
|
1473
|
+
for note in followups:
|
|
1474
|
+
_append_log(log_path, f"Follow-up: {note}")
|
|
1475
|
+
release.pypi_url = (
|
|
1476
|
+
f"https://pypi.org/project/{release.package.name}/{release.version}/"
|
|
1477
|
+
)
|
|
1478
|
+
github_url = ""
|
|
1479
|
+
for target in targets[1:]:
|
|
1480
|
+
if target.repository_url and "github.com" in target.repository_url:
|
|
1481
|
+
github_url = release.github_package_url() or ""
|
|
1482
|
+
break
|
|
1483
|
+
if github_url:
|
|
1484
|
+
release.github_url = github_url
|
|
1485
|
+
else:
|
|
1486
|
+
release.github_url = ""
|
|
1487
|
+
release.release_on = timezone.now()
|
|
1488
|
+
release.save(update_fields=["pypi_url", "github_url", "release_on"])
|
|
1489
|
+
PackageRelease.dump_fixture()
|
|
1490
|
+
_append_log(log_path, f"Recorded PyPI URL: {release.pypi_url}")
|
|
1491
|
+
if release.github_url:
|
|
1492
|
+
_append_log(log_path, f"Recorded GitHub URL: {release.github_url}")
|
|
1493
|
+
fixture_paths = [
|
|
1494
|
+
str(path) for path in Path("core/fixtures").glob("releases__*.json")
|
|
1495
|
+
]
|
|
1496
|
+
if fixture_paths:
|
|
1497
|
+
status = subprocess.run(
|
|
1498
|
+
["git", "status", "--porcelain", "--", *fixture_paths],
|
|
1499
|
+
capture_output=True,
|
|
1500
|
+
text=True,
|
|
1501
|
+
check=True,
|
|
1502
|
+
)
|
|
1503
|
+
if status.stdout.strip():
|
|
1504
|
+
subprocess.run(["git", "add", *fixture_paths], check=True)
|
|
1505
|
+
_append_log(log_path, "Staged publish metadata updates")
|
|
1506
|
+
commit_message = f"chore: record publish metadata for v{release.version}"
|
|
1507
|
+
subprocess.run(["git", "commit", "-m", commit_message], check=True)
|
|
1508
|
+
_append_log(
|
|
1509
|
+
log_path, f"Committed publish metadata for v{release.version}"
|
|
1510
|
+
)
|
|
1511
|
+
_push_release_changes(log_path)
|
|
1512
|
+
else:
|
|
1513
|
+
_append_log(
|
|
1514
|
+
log_path,
|
|
1515
|
+
"No release metadata updates detected after publish; skipping commit",
|
|
1516
|
+
)
|
|
1517
|
+
_append_log(log_path, "Upload complete")
|
|
1518
|
+
|
|
1519
|
+
|
|
1520
|
+
FIXTURE_REVIEW_STEP_NAME = "Freeze, squash and approve migrations"
|
|
1521
|
+
|
|
1522
|
+
|
|
1523
|
+
PUBLISH_STEPS = [
|
|
1524
|
+
("Check version number availability", _step_check_version),
|
|
1525
|
+
("Confirm release TODO completion", _step_check_todos),
|
|
1526
|
+
(FIXTURE_REVIEW_STEP_NAME, _step_handle_migrations),
|
|
1527
|
+
("Compose CHANGELOG and documentation", _step_changelog_docs),
|
|
1528
|
+
("Execute pre-release actions", _step_pre_release_actions),
|
|
1529
|
+
("Build release artifacts", _step_promote_build),
|
|
1530
|
+
("Complete test suite with --all flag", _step_run_tests),
|
|
1531
|
+
("Get Release Manager Approval", _step_release_manager_approval),
|
|
1532
|
+
("Upload final build to PyPI", _step_publish),
|
|
1533
|
+
]
|
|
1534
|
+
|
|
1535
|
+
|
|
1536
|
+
@csrf_exempt
|
|
1537
|
+
def rfid_login(request):
|
|
1538
|
+
"""Authenticate a user using an RFID."""
|
|
1539
|
+
|
|
1540
|
+
if request.method != "POST":
|
|
1541
|
+
return JsonResponse({"detail": "POST required"}, status=400)
|
|
1542
|
+
|
|
1543
|
+
try:
|
|
1544
|
+
data = json.loads(request.body.decode())
|
|
1545
|
+
except json.JSONDecodeError:
|
|
1546
|
+
data = request.POST
|
|
1547
|
+
|
|
1548
|
+
rfid = data.get("rfid")
|
|
1549
|
+
if not rfid:
|
|
1550
|
+
return JsonResponse({"detail": "rfid required"}, status=400)
|
|
1551
|
+
|
|
1552
|
+
user = authenticate(request, rfid=rfid)
|
|
1553
|
+
if user is None:
|
|
1554
|
+
return JsonResponse({"detail": "invalid RFID"}, status=401)
|
|
1555
|
+
|
|
1556
|
+
login(request, user)
|
|
1557
|
+
return JsonResponse({"id": user.id, "username": user.username})
|
|
1558
|
+
|
|
1559
|
+
|
|
1560
|
+
@api_login_required
|
|
1561
|
+
def product_list(request):
|
|
1562
|
+
"""Return a JSON list of products."""
|
|
1563
|
+
|
|
1564
|
+
products = list(
|
|
1565
|
+
Product.objects.values("id", "name", "description", "renewal_period")
|
|
1566
|
+
)
|
|
1567
|
+
return JsonResponse({"products": products})
|
|
1568
|
+
|
|
1569
|
+
|
|
1570
|
+
@csrf_exempt
|
|
1571
|
+
@api_login_required
|
|
1572
|
+
def add_live_subscription(request):
|
|
1573
|
+
"""Create a live subscription for an energy account from POSTed JSON."""
|
|
1574
|
+
|
|
1575
|
+
if request.method != "POST":
|
|
1576
|
+
return JsonResponse({"detail": "POST required"}, status=400)
|
|
1577
|
+
|
|
1578
|
+
try:
|
|
1579
|
+
data = json.loads(request.body.decode())
|
|
1580
|
+
except json.JSONDecodeError:
|
|
1581
|
+
data = request.POST
|
|
1582
|
+
|
|
1583
|
+
account_id = data.get("account_id")
|
|
1584
|
+
product_id = data.get("product_id")
|
|
1585
|
+
|
|
1586
|
+
if not account_id or not product_id:
|
|
1587
|
+
return JsonResponse(
|
|
1588
|
+
{"detail": "account_id and product_id required"}, status=400
|
|
1589
|
+
)
|
|
1590
|
+
|
|
1591
|
+
try:
|
|
1592
|
+
product = Product.objects.get(id=product_id)
|
|
1593
|
+
except Product.DoesNotExist:
|
|
1594
|
+
return JsonResponse({"detail": "invalid product"}, status=404)
|
|
1595
|
+
|
|
1596
|
+
try:
|
|
1597
|
+
account = EnergyAccount.objects.get(id=account_id)
|
|
1598
|
+
except EnergyAccount.DoesNotExist:
|
|
1599
|
+
return JsonResponse({"detail": "invalid account"}, status=404)
|
|
1600
|
+
|
|
1601
|
+
start_date = timezone.now().date()
|
|
1602
|
+
account.live_subscription_product = product
|
|
1603
|
+
account.live_subscription_start_date = start_date
|
|
1604
|
+
account.live_subscription_next_renewal = start_date + timedelta(
|
|
1605
|
+
days=product.renewal_period
|
|
1606
|
+
)
|
|
1607
|
+
account.save()
|
|
1608
|
+
|
|
1609
|
+
return JsonResponse({"id": account.id})
|
|
1610
|
+
|
|
1611
|
+
|
|
1612
|
+
@api_login_required
|
|
1613
|
+
def live_subscription_list(request):
|
|
1614
|
+
"""Return live subscriptions for the given account_id."""
|
|
1615
|
+
|
|
1616
|
+
account_id = request.GET.get("account_id")
|
|
1617
|
+
if not account_id:
|
|
1618
|
+
return JsonResponse({"detail": "account_id required"}, status=400)
|
|
1619
|
+
|
|
1620
|
+
try:
|
|
1621
|
+
account = EnergyAccount.objects.select_related("live_subscription_product").get(
|
|
1622
|
+
id=account_id
|
|
1623
|
+
)
|
|
1624
|
+
except EnergyAccount.DoesNotExist:
|
|
1625
|
+
return JsonResponse({"detail": "invalid account"}, status=404)
|
|
1626
|
+
|
|
1627
|
+
subs = []
|
|
1628
|
+
product = account.live_subscription_product
|
|
1629
|
+
if product:
|
|
1630
|
+
next_renewal = account.live_subscription_next_renewal
|
|
1631
|
+
if not next_renewal and account.live_subscription_start_date:
|
|
1632
|
+
next_renewal = account.live_subscription_start_date + timedelta(
|
|
1633
|
+
days=product.renewal_period
|
|
1634
|
+
)
|
|
1635
|
+
|
|
1636
|
+
subs.append(
|
|
1637
|
+
{
|
|
1638
|
+
"id": account.id,
|
|
1639
|
+
"product__name": product.name,
|
|
1640
|
+
"next_renewal": next_renewal,
|
|
1641
|
+
}
|
|
1642
|
+
)
|
|
1643
|
+
|
|
1644
|
+
return JsonResponse({"live_subscriptions": subs})
|
|
1645
|
+
|
|
1646
|
+
|
|
1647
|
+
@csrf_exempt
|
|
1648
|
+
@api_login_required
|
|
1649
|
+
def rfid_batch(request):
|
|
1650
|
+
"""Export or import RFID tags in batch."""
|
|
1651
|
+
|
|
1652
|
+
if request.method == "GET":
|
|
1653
|
+
color = request.GET.get("color", RFID.BLACK).upper()
|
|
1654
|
+
released = request.GET.get("released")
|
|
1655
|
+
if released is not None:
|
|
1656
|
+
released = released.lower()
|
|
1657
|
+
qs = RFID.objects.all()
|
|
1658
|
+
if color != "ALL":
|
|
1659
|
+
qs = qs.filter(color=color)
|
|
1660
|
+
if released in ("true", "false"):
|
|
1661
|
+
qs = qs.filter(released=(released == "true"))
|
|
1662
|
+
tags = [
|
|
1663
|
+
{
|
|
1664
|
+
"rfid": t.rfid,
|
|
1665
|
+
"custom_label": t.custom_label,
|
|
1666
|
+
"energy_accounts": list(t.energy_accounts.values_list("id", flat=True)),
|
|
1667
|
+
"external_command": t.external_command,
|
|
1668
|
+
"post_auth_command": t.post_auth_command,
|
|
1669
|
+
"allowed": t.allowed,
|
|
1670
|
+
"color": t.color,
|
|
1671
|
+
"released": t.released,
|
|
1672
|
+
}
|
|
1673
|
+
for t in qs.order_by("rfid")
|
|
1674
|
+
]
|
|
1675
|
+
return JsonResponse({"rfids": tags})
|
|
1676
|
+
|
|
1677
|
+
if request.method == "POST":
|
|
1678
|
+
try:
|
|
1679
|
+
data = json.loads(request.body.decode())
|
|
1680
|
+
except json.JSONDecodeError:
|
|
1681
|
+
return JsonResponse({"detail": "invalid JSON"}, status=400)
|
|
1682
|
+
|
|
1683
|
+
tags = data.get("rfids") if isinstance(data, dict) else data
|
|
1684
|
+
if not isinstance(tags, list):
|
|
1685
|
+
return JsonResponse({"detail": "rfids list required"}, status=400)
|
|
1686
|
+
|
|
1687
|
+
count = 0
|
|
1688
|
+
for row in tags:
|
|
1689
|
+
rfid = (row.get("rfid") or "").strip()
|
|
1690
|
+
if not rfid:
|
|
1691
|
+
continue
|
|
1692
|
+
allowed = row.get("allowed", True)
|
|
1693
|
+
energy_accounts = row.get("energy_accounts") or []
|
|
1694
|
+
color = (row.get("color") or RFID.BLACK).strip().upper() or RFID.BLACK
|
|
1695
|
+
released = row.get("released", False)
|
|
1696
|
+
if isinstance(released, str):
|
|
1697
|
+
released = released.lower() == "true"
|
|
1698
|
+
custom_label = (row.get("custom_label") or "").strip()
|
|
1699
|
+
external_command = row.get("external_command")
|
|
1700
|
+
if not isinstance(external_command, str):
|
|
1701
|
+
external_command = ""
|
|
1702
|
+
else:
|
|
1703
|
+
external_command = external_command.strip()
|
|
1704
|
+
post_auth_command = row.get("post_auth_command")
|
|
1705
|
+
if not isinstance(post_auth_command, str):
|
|
1706
|
+
post_auth_command = ""
|
|
1707
|
+
else:
|
|
1708
|
+
post_auth_command = post_auth_command.strip()
|
|
1709
|
+
|
|
1710
|
+
tag, _ = RFID.update_or_create_from_code(
|
|
1711
|
+
rfid,
|
|
1712
|
+
{
|
|
1713
|
+
"allowed": allowed,
|
|
1714
|
+
"color": color,
|
|
1715
|
+
"released": released,
|
|
1716
|
+
"custom_label": custom_label,
|
|
1717
|
+
"external_command": external_command,
|
|
1718
|
+
"post_auth_command": post_auth_command,
|
|
1719
|
+
},
|
|
1720
|
+
)
|
|
1721
|
+
if energy_accounts:
|
|
1722
|
+
tag.energy_accounts.set(
|
|
1723
|
+
EnergyAccount.objects.filter(id__in=energy_accounts)
|
|
1724
|
+
)
|
|
1725
|
+
else:
|
|
1726
|
+
tag.energy_accounts.clear()
|
|
1727
|
+
count += 1
|
|
1728
|
+
|
|
1729
|
+
return JsonResponse({"imported": count})
|
|
1730
|
+
|
|
1731
|
+
return JsonResponse({"detail": "GET or POST required"}, status=400)
|
|
1732
|
+
|
|
1733
|
+
|
|
1734
|
+
@staff_member_required
|
|
1735
|
+
def release_progress(request, pk: int, action: str):
|
|
1736
|
+
release = get_object_or_404(PackageRelease, pk=pk)
|
|
1737
|
+
if action != "publish":
|
|
1738
|
+
raise Http404("Unknown action")
|
|
1739
|
+
session_key = f"release_publish_{pk}"
|
|
1740
|
+
lock_path = Path("locks") / f"release_publish_{pk}.json"
|
|
1741
|
+
restart_path = Path("locks") / f"release_publish_{pk}.restarts"
|
|
1742
|
+
log_dir, log_dir_warning = _resolve_release_log_dir(Path(settings.LOG_DIR))
|
|
1743
|
+
log_dir_warning_message = log_dir_warning
|
|
1744
|
+
|
|
1745
|
+
version_path = Path("VERSION")
|
|
1746
|
+
repo_version_before_sync = ""
|
|
1747
|
+
if version_path.exists():
|
|
1748
|
+
repo_version_before_sync = version_path.read_text(encoding="utf-8").strip()
|
|
1749
|
+
setattr(release, "_repo_version_before_sync", repo_version_before_sync)
|
|
1750
|
+
|
|
1751
|
+
if not release.is_current:
|
|
1752
|
+
if release.is_published:
|
|
1753
|
+
raise Http404("Release is not current")
|
|
1754
|
+
updated, previous_version = _sync_release_with_revision(release)
|
|
1755
|
+
if updated:
|
|
1756
|
+
request.session.pop(session_key, None)
|
|
1757
|
+
if lock_path.exists():
|
|
1758
|
+
lock_path.unlink()
|
|
1759
|
+
if restart_path.exists():
|
|
1760
|
+
restart_path.unlink()
|
|
1761
|
+
pattern = f"pr.{release.package.name}.v{previous_version}*.log"
|
|
1762
|
+
for log_file in log_dir.glob(pattern):
|
|
1763
|
+
log_file.unlink()
|
|
1764
|
+
if not release.is_current:
|
|
1765
|
+
raise Http404("Release is not current")
|
|
1766
|
+
|
|
1767
|
+
if request.GET.get("restart"):
|
|
1768
|
+
count = 0
|
|
1769
|
+
if restart_path.exists():
|
|
1770
|
+
try:
|
|
1771
|
+
count = int(restart_path.read_text(encoding="utf-8"))
|
|
1772
|
+
except Exception:
|
|
1773
|
+
count = 0
|
|
1774
|
+
restart_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1775
|
+
restart_path.write_text(str(count + 1), encoding="utf-8")
|
|
1776
|
+
_clean_repo()
|
|
1777
|
+
release.pypi_url = ""
|
|
1778
|
+
release.release_on = None
|
|
1779
|
+
release.save(update_fields=["pypi_url", "release_on"])
|
|
1780
|
+
request.session.pop(session_key, None)
|
|
1781
|
+
if lock_path.exists():
|
|
1782
|
+
lock_path.unlink()
|
|
1783
|
+
pattern = f"pr.{release.package.name}.v{release.version}*.log"
|
|
1784
|
+
for f in log_dir.glob(pattern):
|
|
1785
|
+
f.unlink()
|
|
1786
|
+
return redirect(request.path)
|
|
1787
|
+
ctx = request.session.get(session_key)
|
|
1788
|
+
if ctx is None and lock_path.exists():
|
|
1789
|
+
try:
|
|
1790
|
+
ctx = json.loads(lock_path.read_text(encoding="utf-8"))
|
|
1791
|
+
except Exception:
|
|
1792
|
+
ctx = {"step": 0}
|
|
1793
|
+
if ctx is None:
|
|
1794
|
+
ctx = {"step": 0}
|
|
1795
|
+
if restart_path.exists():
|
|
1796
|
+
restart_path.unlink()
|
|
1797
|
+
if log_dir_warning_message:
|
|
1798
|
+
ctx["log_dir_warning_message"] = log_dir_warning_message
|
|
1799
|
+
else:
|
|
1800
|
+
log_dir_warning_message = ctx.get("log_dir_warning_message")
|
|
1801
|
+
|
|
1802
|
+
if "changelog_report_url" not in ctx:
|
|
1803
|
+
try:
|
|
1804
|
+
ctx["changelog_report_url"] = reverse("admin:system-changelog-report")
|
|
1805
|
+
except NoReverseMatch:
|
|
1806
|
+
ctx["changelog_report_url"] = ""
|
|
1807
|
+
|
|
1808
|
+
steps = PUBLISH_STEPS
|
|
1809
|
+
total_steps = len(steps)
|
|
1810
|
+
step_count = ctx.get("step", 0)
|
|
1811
|
+
started_flag = bool(ctx.get("started"))
|
|
1812
|
+
paused_flag = bool(ctx.get("paused"))
|
|
1813
|
+
error_flag = bool(ctx.get("error"))
|
|
1814
|
+
done_flag = step_count >= total_steps and not error_flag
|
|
1815
|
+
start_enabled = (not started_flag or paused_flag) and not done_flag and not error_flag
|
|
1816
|
+
|
|
1817
|
+
ctx["dry_run"] = bool(ctx.get("dry_run"))
|
|
1818
|
+
|
|
1819
|
+
if request.GET.get("set_dry_run") is not None:
|
|
1820
|
+
if start_enabled:
|
|
1821
|
+
ctx["dry_run"] = bool(request.GET.get("dry_run"))
|
|
1822
|
+
request.session[session_key] = ctx
|
|
1823
|
+
return redirect(request.path)
|
|
1824
|
+
|
|
1825
|
+
manager = release.release_manager or release.package.release_manager
|
|
1826
|
+
credentials_ready = bool(release.to_credentials(user=request.user))
|
|
1827
|
+
if credentials_ready and ctx.get("approval_credentials_missing"):
|
|
1828
|
+
ctx.pop("approval_credentials_missing", None)
|
|
1829
|
+
|
|
1830
|
+
ack_todos_requested = bool(request.GET.get("ack_todos"))
|
|
1831
|
+
|
|
1832
|
+
if request.GET.get("start"):
|
|
1833
|
+
if start_enabled:
|
|
1834
|
+
ctx["dry_run"] = bool(request.GET.get("dry_run"))
|
|
1835
|
+
ctx["started"] = True
|
|
1836
|
+
ctx["paused"] = False
|
|
1837
|
+
if (
|
|
1838
|
+
ctx.get("awaiting_approval")
|
|
1839
|
+
and not ctx.get("approval_credentials_missing")
|
|
1840
|
+
and credentials_ready
|
|
1841
|
+
):
|
|
1842
|
+
if request.GET.get("approve"):
|
|
1843
|
+
ctx["release_approval"] = "approved"
|
|
1844
|
+
if request.GET.get("reject"):
|
|
1845
|
+
ctx["release_approval"] = "rejected"
|
|
1846
|
+
resume_requested = bool(request.GET.get("resume"))
|
|
1847
|
+
|
|
1848
|
+
if request.GET.get("pause") and ctx.get("started"):
|
|
1849
|
+
ctx["paused"] = True
|
|
1850
|
+
|
|
1851
|
+
if resume_requested:
|
|
1852
|
+
if not ctx.get("started"):
|
|
1853
|
+
ctx["started"] = True
|
|
1854
|
+
if ctx.get("paused"):
|
|
1855
|
+
ctx["paused"] = False
|
|
1856
|
+
restart_count = 0
|
|
1857
|
+
if restart_path.exists():
|
|
1858
|
+
try:
|
|
1859
|
+
restart_count = int(restart_path.read_text(encoding="utf-8"))
|
|
1860
|
+
except Exception:
|
|
1861
|
+
restart_count = 0
|
|
1862
|
+
step_count = ctx.get("step", 0)
|
|
1863
|
+
step_param = request.GET.get("step")
|
|
1864
|
+
if resume_requested and step_param is None:
|
|
1865
|
+
step_param = str(step_count)
|
|
1866
|
+
|
|
1867
|
+
pending_qs = Todo.objects.filter(is_deleted=False, done_on__isnull=True)
|
|
1868
|
+
pending_items = list(pending_qs)
|
|
1869
|
+
blocking_todos = [
|
|
1870
|
+
todo for todo in pending_items if _todo_blocks_publish(todo, release)
|
|
1871
|
+
]
|
|
1872
|
+
if not blocking_todos:
|
|
1873
|
+
ctx["todos_ack"] = True
|
|
1874
|
+
ctx["todos_ack_auto"] = True
|
|
1875
|
+
elif ack_todos_requested:
|
|
1876
|
+
failures = []
|
|
1877
|
+
for todo in blocking_todos:
|
|
1878
|
+
result = todo.check_on_done_condition()
|
|
1879
|
+
if not result.passed:
|
|
1880
|
+
failures.append((todo, result))
|
|
1881
|
+
if failures:
|
|
1882
|
+
ctx["todos_ack"] = False
|
|
1883
|
+
ctx.pop("todos_ack_auto", None)
|
|
1884
|
+
for todo, result in failures:
|
|
1885
|
+
messages.error(request, _format_condition_failure(todo, result))
|
|
1886
|
+
else:
|
|
1887
|
+
ctx["todos_ack"] = True
|
|
1888
|
+
ctx.pop("todos_ack_auto", None)
|
|
1889
|
+
else:
|
|
1890
|
+
if ctx.pop("todos_ack_auto", None):
|
|
1891
|
+
ctx["todos_ack"] = False
|
|
1892
|
+
else:
|
|
1893
|
+
ctx.setdefault("todos_ack", False)
|
|
1894
|
+
|
|
1895
|
+
if ctx.get("todos_ack"):
|
|
1896
|
+
ctx.pop("todos_block_logged", None)
|
|
1897
|
+
ctx.pop("todos", None)
|
|
1898
|
+
ctx.pop("todos_required", None)
|
|
1899
|
+
else:
|
|
1900
|
+
ctx["todos"] = [
|
|
1901
|
+
{
|
|
1902
|
+
"id": todo.pk,
|
|
1903
|
+
"request": todo.request,
|
|
1904
|
+
"url": todo.url,
|
|
1905
|
+
"request_details": todo.request_details,
|
|
1906
|
+
}
|
|
1907
|
+
for todo in blocking_todos
|
|
1908
|
+
]
|
|
1909
|
+
ctx["todos_required"] = True
|
|
1910
|
+
|
|
1911
|
+
log_name = _release_log_name(release.package.name, release.version)
|
|
1912
|
+
if ctx.get("log") != log_name:
|
|
1913
|
+
ctx = {
|
|
1914
|
+
"step": 0,
|
|
1915
|
+
"log": log_name,
|
|
1916
|
+
"started": ctx.get("started", False),
|
|
1917
|
+
}
|
|
1918
|
+
step_count = 0
|
|
1919
|
+
if not blocking_todos:
|
|
1920
|
+
ctx["todos_ack"] = True
|
|
1921
|
+
log_path = log_dir / log_name
|
|
1922
|
+
ctx.setdefault("log", log_name)
|
|
1923
|
+
ctx.setdefault("paused", False)
|
|
1924
|
+
ctx.setdefault("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE)
|
|
1925
|
+
|
|
1926
|
+
dirty_action = request.GET.get("dirty_action")
|
|
1927
|
+
if dirty_action and ctx.get("dirty_files"):
|
|
1928
|
+
if dirty_action == "discard":
|
|
1929
|
+
_clean_repo()
|
|
1930
|
+
remaining = _collect_dirty_files()
|
|
1931
|
+
if remaining:
|
|
1932
|
+
ctx["dirty_files"] = remaining
|
|
1933
|
+
ctx.pop("dirty_commit_error", None)
|
|
1934
|
+
else:
|
|
1935
|
+
ctx.pop("dirty_files", None)
|
|
1936
|
+
ctx.pop("dirty_commit_error", None)
|
|
1937
|
+
ctx.pop("dirty_log_message", None)
|
|
1938
|
+
_append_log(log_path, "Discarded local changes before publish")
|
|
1939
|
+
elif dirty_action == "commit":
|
|
1940
|
+
message = request.GET.get("dirty_message", "").strip()
|
|
1941
|
+
if not message:
|
|
1942
|
+
message = ctx.get("dirty_commit_message") or DIRTY_COMMIT_DEFAULT_MESSAGE
|
|
1943
|
+
ctx["dirty_commit_message"] = message
|
|
1944
|
+
try:
|
|
1945
|
+
subprocess.run(["git", "add", "--all"], check=True)
|
|
1946
|
+
subprocess.run(["git", "commit", "-m", message], check=True)
|
|
1947
|
+
except subprocess.CalledProcessError as exc:
|
|
1948
|
+
ctx["dirty_commit_error"] = _format_subprocess_error(exc)
|
|
1949
|
+
else:
|
|
1950
|
+
ctx.pop("dirty_commit_error", None)
|
|
1951
|
+
remaining = _collect_dirty_files()
|
|
1952
|
+
if remaining:
|
|
1953
|
+
ctx["dirty_files"] = remaining
|
|
1954
|
+
else:
|
|
1955
|
+
ctx.pop("dirty_files", None)
|
|
1956
|
+
ctx.pop("dirty_log_message", None)
|
|
1957
|
+
_append_log(
|
|
1958
|
+
log_path,
|
|
1959
|
+
_("Committed pending changes: %(message)s")
|
|
1960
|
+
% {"message": message},
|
|
1961
|
+
)
|
|
1962
|
+
|
|
1963
|
+
if (
|
|
1964
|
+
ctx.get("started")
|
|
1965
|
+
and step_count == 0
|
|
1966
|
+
and (step_param is None or step_param == "0")
|
|
1967
|
+
):
|
|
1968
|
+
if log_path.exists():
|
|
1969
|
+
log_path.unlink()
|
|
1970
|
+
ctx.pop("log_dir_warning_logged", None)
|
|
1971
|
+
|
|
1972
|
+
if log_dir_warning_message and not ctx.get("log_dir_warning_logged"):
|
|
1973
|
+
_append_log(log_path, log_dir_warning_message)
|
|
1974
|
+
ctx["log_dir_warning_logged"] = True
|
|
1975
|
+
|
|
1976
|
+
fixtures_step_index = next(
|
|
1977
|
+
(
|
|
1978
|
+
index
|
|
1979
|
+
for index, (name, _) in enumerate(steps)
|
|
1980
|
+
if name == FIXTURE_REVIEW_STEP_NAME
|
|
1981
|
+
),
|
|
1982
|
+
None,
|
|
1983
|
+
)
|
|
1984
|
+
error = ctx.get("error")
|
|
1985
|
+
|
|
1986
|
+
if (
|
|
1987
|
+
ctx.get("started")
|
|
1988
|
+
and not ctx.get("paused")
|
|
1989
|
+
and step_param is not None
|
|
1990
|
+
and not error
|
|
1991
|
+
and step_count < len(steps)
|
|
1992
|
+
):
|
|
1993
|
+
to_run = int(step_param)
|
|
1994
|
+
if to_run == step_count:
|
|
1995
|
+
name, func = steps[to_run]
|
|
1996
|
+
try:
|
|
1997
|
+
func(release, ctx, log_path, user=request.user)
|
|
1998
|
+
except PendingTodos:
|
|
1999
|
+
pass
|
|
2000
|
+
except ApprovalRequired:
|
|
2001
|
+
pass
|
|
2002
|
+
except DirtyRepository:
|
|
2003
|
+
pass
|
|
2004
|
+
except Exception as exc: # pragma: no cover - best effort logging
|
|
2005
|
+
_append_log(log_path, f"{name} failed: {exc}")
|
|
2006
|
+
ctx["error"] = str(exc)
|
|
2007
|
+
request.session[session_key] = ctx
|
|
2008
|
+
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
2009
|
+
lock_path.write_text(json.dumps(ctx), encoding="utf-8")
|
|
2010
|
+
else:
|
|
2011
|
+
step_count += 1
|
|
2012
|
+
ctx["step"] = step_count
|
|
2013
|
+
request.session[session_key] = ctx
|
|
2014
|
+
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
2015
|
+
lock_path.write_text(json.dumps(ctx), encoding="utf-8")
|
|
2016
|
+
|
|
2017
|
+
done = step_count >= len(steps) and not ctx.get("error")
|
|
2018
|
+
|
|
2019
|
+
show_log = ctx.get("started") or step_count > 0 or done or ctx.get("error")
|
|
2020
|
+
if show_log and log_path.exists():
|
|
2021
|
+
log_content = log_path.read_text(encoding="utf-8")
|
|
2022
|
+
else:
|
|
2023
|
+
log_content = ""
|
|
2024
|
+
next_step = (
|
|
2025
|
+
step_count
|
|
2026
|
+
if ctx.get("started")
|
|
2027
|
+
and not ctx.get("paused")
|
|
2028
|
+
and not done
|
|
2029
|
+
and not ctx.get("error")
|
|
2030
|
+
else None
|
|
2031
|
+
)
|
|
2032
|
+
has_pending_todos = bool(
|
|
2033
|
+
ctx.get("todos_required") and not ctx.get("todos_ack")
|
|
2034
|
+
)
|
|
2035
|
+
if has_pending_todos:
|
|
2036
|
+
next_step = None
|
|
2037
|
+
dirty_files = ctx.get("dirty_files")
|
|
2038
|
+
if dirty_files:
|
|
2039
|
+
next_step = None
|
|
2040
|
+
awaiting_approval = bool(ctx.get("awaiting_approval"))
|
|
2041
|
+
approval_credentials_missing = bool(ctx.get("approval_credentials_missing"))
|
|
2042
|
+
if awaiting_approval:
|
|
2043
|
+
next_step = None
|
|
2044
|
+
if approval_credentials_missing:
|
|
2045
|
+
next_step = None
|
|
2046
|
+
paused = ctx.get("paused", False)
|
|
2047
|
+
|
|
2048
|
+
step_names = [s[0] for s in steps]
|
|
2049
|
+
approval_credentials_ready = credentials_ready
|
|
2050
|
+
credentials_blocking = approval_credentials_missing or (
|
|
2051
|
+
awaiting_approval and not approval_credentials_ready
|
|
2052
|
+
)
|
|
2053
|
+
step_states = []
|
|
2054
|
+
for index, name in enumerate(step_names):
|
|
2055
|
+
if index < step_count:
|
|
2056
|
+
status = "complete"
|
|
2057
|
+
icon = "✅"
|
|
2058
|
+
label = _("Completed")
|
|
2059
|
+
elif error and index == step_count:
|
|
2060
|
+
status = "error"
|
|
2061
|
+
icon = "❌"
|
|
2062
|
+
label = _("Failed")
|
|
2063
|
+
elif paused and ctx.get("started") and index == step_count and not done:
|
|
2064
|
+
status = "paused"
|
|
2065
|
+
icon = "⏸️"
|
|
2066
|
+
label = _("Paused")
|
|
2067
|
+
elif (
|
|
2068
|
+
has_pending_todos
|
|
2069
|
+
and ctx.get("started")
|
|
2070
|
+
and index == step_count
|
|
2071
|
+
and not done
|
|
2072
|
+
):
|
|
2073
|
+
status = "blocked"
|
|
2074
|
+
icon = "📝"
|
|
2075
|
+
label = _("Awaiting checklist")
|
|
2076
|
+
elif (
|
|
2077
|
+
credentials_blocking
|
|
2078
|
+
and ctx.get("started")
|
|
2079
|
+
and index == step_count
|
|
2080
|
+
and not done
|
|
2081
|
+
):
|
|
2082
|
+
status = "missing-credentials"
|
|
2083
|
+
icon = "🔐"
|
|
2084
|
+
label = _("Credentials required")
|
|
2085
|
+
elif (
|
|
2086
|
+
awaiting_approval
|
|
2087
|
+
and approval_credentials_ready
|
|
2088
|
+
and ctx.get("started")
|
|
2089
|
+
and index == step_count
|
|
2090
|
+
and not done
|
|
2091
|
+
):
|
|
2092
|
+
status = "awaiting-approval"
|
|
2093
|
+
icon = "🤝"
|
|
2094
|
+
label = _("Awaiting approval")
|
|
2095
|
+
elif ctx.get("started") and index == step_count and not done:
|
|
2096
|
+
status = "active"
|
|
2097
|
+
icon = "⏳"
|
|
2098
|
+
label = _("In progress")
|
|
2099
|
+
else:
|
|
2100
|
+
status = "pending"
|
|
2101
|
+
icon = "⬜"
|
|
2102
|
+
label = _("Pending")
|
|
2103
|
+
step_states.append(
|
|
2104
|
+
{
|
|
2105
|
+
"index": index + 1,
|
|
2106
|
+
"name": name,
|
|
2107
|
+
"status": status,
|
|
2108
|
+
"icon": icon,
|
|
2109
|
+
"label": label,
|
|
2110
|
+
}
|
|
2111
|
+
)
|
|
2112
|
+
|
|
2113
|
+
is_running = ctx.get("started") and not paused and not done and not ctx.get("error")
|
|
2114
|
+
resume_available = (
|
|
2115
|
+
ctx.get("started")
|
|
2116
|
+
and not paused
|
|
2117
|
+
and not done
|
|
2118
|
+
and not ctx.get("error")
|
|
2119
|
+
and step_count < len(steps)
|
|
2120
|
+
and next_step is None
|
|
2121
|
+
)
|
|
2122
|
+
can_resume = ctx.get("started") and paused and not done and not ctx.get("error")
|
|
2123
|
+
release_manager_owner = manager.owner_display() if manager else ""
|
|
2124
|
+
try:
|
|
2125
|
+
current_user_admin_url = reverse(
|
|
2126
|
+
"admin:teams_user_change", args=[request.user.pk]
|
|
2127
|
+
)
|
|
2128
|
+
except NoReverseMatch:
|
|
2129
|
+
current_user_admin_url = reverse(
|
|
2130
|
+
"admin:core_user_change", args=[request.user.pk]
|
|
2131
|
+
)
|
|
2132
|
+
|
|
2133
|
+
fixtures_summary = ctx.get("fixtures")
|
|
2134
|
+
if (
|
|
2135
|
+
fixtures_summary
|
|
2136
|
+
and fixtures_step_index is not None
|
|
2137
|
+
and step_count > fixtures_step_index
|
|
2138
|
+
):
|
|
2139
|
+
fixtures_summary = None
|
|
2140
|
+
|
|
2141
|
+
todos_display = ctx.get("todos") if has_pending_todos else None
|
|
2142
|
+
|
|
2143
|
+
dry_run_active = bool(ctx.get("dry_run"))
|
|
2144
|
+
dry_run_toggle_enabled = not is_running and not done and not ctx.get("error")
|
|
2145
|
+
|
|
2146
|
+
context = {
|
|
2147
|
+
"release": release,
|
|
2148
|
+
"action": "publish",
|
|
2149
|
+
"steps": step_names,
|
|
2150
|
+
"current_step": step_count,
|
|
2151
|
+
"next_step": next_step,
|
|
2152
|
+
"done": done,
|
|
2153
|
+
"error": ctx.get("error"),
|
|
2154
|
+
"log_content": log_content,
|
|
2155
|
+
"log_path": str(log_path),
|
|
2156
|
+
"cert_log": ctx.get("cert_log"),
|
|
2157
|
+
"fixtures": fixtures_summary,
|
|
2158
|
+
"todos": todos_display,
|
|
2159
|
+
"changelog_report_url": ctx.get("changelog_report_url", ""),
|
|
2160
|
+
"dirty_files": dirty_files,
|
|
2161
|
+
"dirty_commit_message": ctx.get("dirty_commit_message", DIRTY_COMMIT_DEFAULT_MESSAGE),
|
|
2162
|
+
"dirty_commit_error": ctx.get("dirty_commit_error"),
|
|
2163
|
+
"restart_count": restart_count,
|
|
2164
|
+
"started": ctx.get("started", False),
|
|
2165
|
+
"paused": paused,
|
|
2166
|
+
"show_log": show_log,
|
|
2167
|
+
"step_states": step_states,
|
|
2168
|
+
"has_pending_todos": has_pending_todos,
|
|
2169
|
+
"awaiting_approval": awaiting_approval,
|
|
2170
|
+
"approval_credentials_missing": approval_credentials_missing,
|
|
2171
|
+
"approval_credentials_ready": approval_credentials_ready,
|
|
2172
|
+
"release_manager_owner": release_manager_owner,
|
|
2173
|
+
"has_release_manager": bool(manager),
|
|
2174
|
+
"current_user_admin_url": current_user_admin_url,
|
|
2175
|
+
"is_running": is_running,
|
|
2176
|
+
"resume_available": resume_available,
|
|
2177
|
+
"can_resume": can_resume,
|
|
2178
|
+
"dry_run": dry_run_active,
|
|
2179
|
+
"dry_run_toggle_enabled": dry_run_toggle_enabled,
|
|
2180
|
+
"warnings": ctx.get("warnings", []),
|
|
2181
|
+
}
|
|
2182
|
+
request.session[session_key] = ctx
|
|
2183
|
+
if done or ctx.get("error"):
|
|
2184
|
+
if lock_path.exists():
|
|
2185
|
+
lock_path.unlink()
|
|
2186
|
+
else:
|
|
2187
|
+
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
2188
|
+
lock_path.write_text(json.dumps(ctx), encoding="utf-8")
|
|
2189
|
+
template = get_template("core/release_progress.html")
|
|
2190
|
+
content = template.render(context, request)
|
|
2191
|
+
signals.template_rendered.send(
|
|
2192
|
+
sender=template.__class__,
|
|
2193
|
+
template=template,
|
|
2194
|
+
context=context,
|
|
2195
|
+
using=getattr(getattr(template, "engine", None), "name", None),
|
|
2196
|
+
)
|
|
2197
|
+
response = HttpResponse(content)
|
|
2198
|
+
response.context = context
|
|
2199
|
+
response.templates = [template]
|
|
2200
|
+
return response
|
|
2201
|
+
|
|
2202
|
+
|
|
2203
|
+
def _dedupe_preserve_order(values):
|
|
2204
|
+
seen = set()
|
|
2205
|
+
result = []
|
|
2206
|
+
for value in values:
|
|
2207
|
+
if value in seen:
|
|
2208
|
+
continue
|
|
2209
|
+
seen.add(value)
|
|
2210
|
+
result.append(value)
|
|
2211
|
+
return result
|
|
2212
|
+
|
|
2213
|
+
|
|
2214
|
+
def _parse_todo_auth_directives(query: str):
|
|
2215
|
+
directives = {
|
|
2216
|
+
"require_logout": False,
|
|
2217
|
+
"users": [],
|
|
2218
|
+
"permissions": [],
|
|
2219
|
+
"notes": [],
|
|
2220
|
+
}
|
|
2221
|
+
if not query:
|
|
2222
|
+
return "", directives
|
|
2223
|
+
|
|
2224
|
+
remaining = []
|
|
2225
|
+
for key, value in parse_qsl(query, keep_blank_values=True):
|
|
2226
|
+
if key != "_todo_auth":
|
|
2227
|
+
remaining.append((key, value))
|
|
2228
|
+
continue
|
|
2229
|
+
token = (value or "").strip()
|
|
2230
|
+
if not token:
|
|
2231
|
+
continue
|
|
2232
|
+
kind, _, payload = token.partition(":")
|
|
2233
|
+
kind = kind.strip().lower()
|
|
2234
|
+
payload = payload.strip()
|
|
2235
|
+
if kind in {"logout", "anonymous", "anon"}:
|
|
2236
|
+
directives["require_logout"] = True
|
|
2237
|
+
elif kind in {"user", "username"} and payload:
|
|
2238
|
+
directives["users"].append(payload)
|
|
2239
|
+
elif kind in {"perm", "permission"} and payload:
|
|
2240
|
+
directives["permissions"].append(payload)
|
|
2241
|
+
else:
|
|
2242
|
+
directives["notes"].append(token)
|
|
2243
|
+
|
|
2244
|
+
sanitized_query = urlencode(remaining, doseq=True)
|
|
2245
|
+
return sanitized_query, directives
|
|
2246
|
+
|
|
2247
|
+
|
|
2248
|
+
def _todo_iframe_url(request, todo: Todo):
|
|
2249
|
+
"""Return a safe iframe URL and auth context for ``todo``."""
|
|
2250
|
+
|
|
2251
|
+
fallback = reverse("admin:core_todo_change", args=[todo.pk])
|
|
2252
|
+
raw_url = (todo.url or "").strip()
|
|
2253
|
+
|
|
2254
|
+
auth_context = {
|
|
2255
|
+
"require_logout": False,
|
|
2256
|
+
"users": [],
|
|
2257
|
+
"permissions": [],
|
|
2258
|
+
"notes": [],
|
|
2259
|
+
}
|
|
2260
|
+
|
|
2261
|
+
def _final_context(target_url: str):
|
|
2262
|
+
return {
|
|
2263
|
+
"target_url": target_url or fallback,
|
|
2264
|
+
"require_logout": auth_context["require_logout"],
|
|
2265
|
+
"users": _dedupe_preserve_order(auth_context["users"]),
|
|
2266
|
+
"permissions": _dedupe_preserve_order(auth_context["permissions"]),
|
|
2267
|
+
"notes": _dedupe_preserve_order(auth_context["notes"]),
|
|
2268
|
+
"has_requirements": bool(
|
|
2269
|
+
auth_context["require_logout"]
|
|
2270
|
+
or auth_context["users"]
|
|
2271
|
+
or auth_context["permissions"]
|
|
2272
|
+
or auth_context["notes"]
|
|
2273
|
+
),
|
|
2274
|
+
}
|
|
2275
|
+
|
|
2276
|
+
if not raw_url:
|
|
2277
|
+
return fallback, _final_context(fallback)
|
|
2278
|
+
|
|
2279
|
+
focus_path = reverse("todo-focus", args=[todo.pk])
|
|
2280
|
+
focus_norm = focus_path.strip("/").lower()
|
|
2281
|
+
|
|
2282
|
+
def _is_focus_target(target: str) -> bool:
|
|
2283
|
+
if not target:
|
|
2284
|
+
return False
|
|
2285
|
+
parsed_target = urlsplit(target)
|
|
2286
|
+
path = parsed_target.path
|
|
2287
|
+
if not path and not parsed_target.scheme and not parsed_target.netloc:
|
|
2288
|
+
path = target.split("?", 1)[0].split("#", 1)[0]
|
|
2289
|
+
normalized = path.strip("/").lower()
|
|
2290
|
+
return normalized == focus_norm if normalized else False
|
|
2291
|
+
|
|
2292
|
+
if _is_focus_target(raw_url):
|
|
2293
|
+
return fallback, _final_context(fallback)
|
|
2294
|
+
|
|
2295
|
+
parsed = urlsplit(raw_url)
|
|
2296
|
+
|
|
2297
|
+
def _merge_directives(parsed_result):
|
|
2298
|
+
sanitized_query, directives = _parse_todo_auth_directives(parsed_result.query)
|
|
2299
|
+
if directives["require_logout"]:
|
|
2300
|
+
auth_context["require_logout"] = True
|
|
2301
|
+
auth_context["users"].extend(directives["users"])
|
|
2302
|
+
auth_context["permissions"].extend(directives["permissions"])
|
|
2303
|
+
auth_context["notes"].extend(directives["notes"])
|
|
2304
|
+
return parsed_result._replace(query=sanitized_query)
|
|
2305
|
+
|
|
2306
|
+
if not parsed.scheme and not parsed.netloc:
|
|
2307
|
+
sanitized = _merge_directives(parsed)
|
|
2308
|
+
path = sanitized.path or "/"
|
|
2309
|
+
if not path.startswith("/"):
|
|
2310
|
+
path = f"/{path}"
|
|
2311
|
+
relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
|
|
2312
|
+
if _is_focus_target(relative_url):
|
|
2313
|
+
return fallback, _final_context(fallback)
|
|
2314
|
+
return relative_url or fallback, _final_context(relative_url)
|
|
2315
|
+
|
|
2316
|
+
if parsed.scheme and parsed.scheme.lower() not in {"http", "https"}:
|
|
2317
|
+
return fallback, _final_context(fallback)
|
|
2318
|
+
|
|
2319
|
+
request_host = request.get_host().strip().lower()
|
|
2320
|
+
host_without_port = request_host.split(":", 1)[0]
|
|
2321
|
+
allowed_hosts = {
|
|
2322
|
+
request_host,
|
|
2323
|
+
host_without_port,
|
|
2324
|
+
"localhost",
|
|
2325
|
+
"127.0.0.1",
|
|
2326
|
+
"0.0.0.0",
|
|
2327
|
+
"::1",
|
|
2328
|
+
}
|
|
2329
|
+
|
|
2330
|
+
site_domain = ""
|
|
2331
|
+
try:
|
|
2332
|
+
site_domain = Site.objects.get_current().domain.strip().lower()
|
|
2333
|
+
except Site.DoesNotExist:
|
|
2334
|
+
site_domain = ""
|
|
2335
|
+
if site_domain:
|
|
2336
|
+
allowed_hosts.add(site_domain)
|
|
2337
|
+
allowed_hosts.add(site_domain.split(":", 1)[0])
|
|
2338
|
+
|
|
2339
|
+
for host in getattr(settings, "ALLOWED_HOSTS", []):
|
|
2340
|
+
if not isinstance(host, str):
|
|
2341
|
+
continue
|
|
2342
|
+
normalized = host.strip().lower()
|
|
2343
|
+
if not normalized or normalized.startswith("*"):
|
|
2344
|
+
continue
|
|
2345
|
+
allowed_hosts.add(normalized)
|
|
2346
|
+
allowed_hosts.add(normalized.split(":", 1)[0])
|
|
2347
|
+
|
|
2348
|
+
hostname = (parsed.hostname or "").strip().lower()
|
|
2349
|
+
netloc = parsed.netloc.strip().lower()
|
|
2350
|
+
if hostname in allowed_hosts or netloc in allowed_hosts:
|
|
2351
|
+
sanitized = _merge_directives(parsed)
|
|
2352
|
+
path = sanitized.path or "/"
|
|
2353
|
+
if not path.startswith("/"):
|
|
2354
|
+
path = f"/{path}"
|
|
2355
|
+
relative_url = urlunsplit(("", "", path, sanitized.query, sanitized.fragment))
|
|
2356
|
+
if _is_focus_target(relative_url):
|
|
2357
|
+
return fallback, _final_context(fallback)
|
|
2358
|
+
return relative_url or fallback, _final_context(relative_url)
|
|
2359
|
+
|
|
2360
|
+
return fallback, _final_context(fallback)
|
|
2361
|
+
|
|
2362
|
+
|
|
2363
|
+
@staff_member_required
|
|
2364
|
+
def todo_focus(request, pk: int):
|
|
2365
|
+
todo = get_object_or_404(Todo, pk=pk, is_deleted=False)
|
|
2366
|
+
if todo.done_on:
|
|
2367
|
+
return redirect(_get_return_url(request))
|
|
2368
|
+
|
|
2369
|
+
iframe_url, focus_auth = _todo_iframe_url(request, todo)
|
|
2370
|
+
focus_target_url = focus_auth.get("target_url", iframe_url) if focus_auth else iframe_url
|
|
2371
|
+
context = {
|
|
2372
|
+
"todo": todo,
|
|
2373
|
+
"iframe_url": iframe_url,
|
|
2374
|
+
"focus_target_url": focus_target_url,
|
|
2375
|
+
"focus_auth": focus_auth,
|
|
2376
|
+
"next_url": _get_return_url(request),
|
|
2377
|
+
"done_url": reverse("todo-done", args=[todo.pk]),
|
|
2378
|
+
"delete_url": reverse("todo-delete", args=[todo.pk]),
|
|
2379
|
+
"snapshot_url": reverse("todo-snapshot", args=[todo.pk]),
|
|
2380
|
+
}
|
|
2381
|
+
return render(request, "core/todo_focus.html", context)
|
|
2382
|
+
|
|
2383
|
+
|
|
2384
|
+
@staff_member_required
|
|
2385
|
+
@require_POST
|
|
2386
|
+
def todo_done(request, pk: int):
|
|
2387
|
+
redirect_to = _get_return_url(request)
|
|
2388
|
+
try:
|
|
2389
|
+
todo = Todo.objects.get(pk=pk, is_deleted=False, done_on__isnull=True)
|
|
2390
|
+
except Todo.DoesNotExist:
|
|
2391
|
+
return redirect(redirect_to)
|
|
2392
|
+
result = todo.check_on_done_condition()
|
|
2393
|
+
if not result.passed:
|
|
2394
|
+
messages.error(request, _format_condition_failure(todo, result))
|
|
2395
|
+
return redirect(redirect_to)
|
|
2396
|
+
todo.done_on = timezone.now()
|
|
2397
|
+
todo.populate_done_metadata(request.user)
|
|
2398
|
+
todo.save(
|
|
2399
|
+
update_fields=[
|
|
2400
|
+
"done_on",
|
|
2401
|
+
"done_node",
|
|
2402
|
+
"done_version",
|
|
2403
|
+
"done_revision",
|
|
2404
|
+
"done_username",
|
|
2405
|
+
]
|
|
2406
|
+
)
|
|
2407
|
+
return redirect(redirect_to)
|
|
2408
|
+
|
|
2409
|
+
|
|
2410
|
+
@staff_member_required
|
|
2411
|
+
@require_POST
|
|
2412
|
+
def todo_delete(request, pk: int):
|
|
2413
|
+
redirect_to = reverse("admin:index")
|
|
2414
|
+
try:
|
|
2415
|
+
todo = Todo.objects.get(pk=pk, is_deleted=False)
|
|
2416
|
+
except Todo.DoesNotExist:
|
|
2417
|
+
return redirect(redirect_to)
|
|
2418
|
+
todo.is_deleted = True
|
|
2419
|
+
todo.save(update_fields=["is_deleted"])
|
|
2420
|
+
return redirect(redirect_to)
|
|
2421
|
+
|
|
2422
|
+
|
|
2423
|
+
@staff_member_required
|
|
2424
|
+
@require_POST
|
|
2425
|
+
def todo_snapshot(request, pk: int):
|
|
2426
|
+
todo = get_object_or_404(Todo, pk=pk, is_deleted=False)
|
|
2427
|
+
if todo.done_on:
|
|
2428
|
+
return JsonResponse({"detail": _("This TODO has already been completed.")}, status=400)
|
|
2429
|
+
|
|
2430
|
+
try:
|
|
2431
|
+
payload = json.loads(request.body.decode("utf-8") or "{}")
|
|
2432
|
+
except json.JSONDecodeError:
|
|
2433
|
+
return JsonResponse({"detail": _("Invalid JSON payload.")}, status=400)
|
|
2434
|
+
|
|
2435
|
+
image_data = payload.get("image", "") if isinstance(payload, dict) else ""
|
|
2436
|
+
if not isinstance(image_data, str) or not image_data.startswith("data:image/png;base64,"):
|
|
2437
|
+
return JsonResponse({"detail": _("A PNG data URL is required.")}, status=400)
|
|
2438
|
+
|
|
2439
|
+
try:
|
|
2440
|
+
encoded = image_data.split(",", 1)[1]
|
|
2441
|
+
except IndexError:
|
|
2442
|
+
return JsonResponse({"detail": _("Screenshot data is incomplete.")}, status=400)
|
|
2443
|
+
|
|
2444
|
+
try:
|
|
2445
|
+
image_bytes = base64.b64decode(encoded, validate=True)
|
|
2446
|
+
except (ValueError, binascii.Error):
|
|
2447
|
+
return JsonResponse({"detail": _("Unable to decode screenshot data.")}, status=400)
|
|
2448
|
+
|
|
2449
|
+
if not image_bytes:
|
|
2450
|
+
return JsonResponse({"detail": _("Screenshot data is empty.")}, status=400)
|
|
2451
|
+
|
|
2452
|
+
max_size = 5 * 1024 * 1024
|
|
2453
|
+
if len(image_bytes) > max_size:
|
|
2454
|
+
return JsonResponse({"detail": _("Screenshot is too large to store.")}, status=400)
|
|
2455
|
+
|
|
2456
|
+
relative_path = Path("screenshots") / f"todo-{todo.pk}-{uuid.uuid4().hex}.png"
|
|
2457
|
+
full_path = settings.LOG_DIR / relative_path
|
|
2458
|
+
full_path.parent.mkdir(parents=True, exist_ok=True)
|
|
2459
|
+
with full_path.open("wb") as fh:
|
|
2460
|
+
fh.write(image_bytes)
|
|
2461
|
+
|
|
2462
|
+
primary_text = strip_tags(todo.request or "").strip()
|
|
2463
|
+
details_text = strip_tags(todo.request_details or "").strip()
|
|
2464
|
+
alt_parts = [part for part in (primary_text, details_text) if part]
|
|
2465
|
+
if alt_parts:
|
|
2466
|
+
alt_text = " — ".join(alt_parts)
|
|
2467
|
+
else:
|
|
2468
|
+
alt_text = _("TODO %(id)s snapshot") % {"id": todo.pk}
|
|
2469
|
+
|
|
2470
|
+
sample = save_screenshot(
|
|
2471
|
+
relative_path,
|
|
2472
|
+
method="TODO_QA",
|
|
2473
|
+
content=alt_text,
|
|
2474
|
+
user=request.user if request.user.is_authenticated else None,
|
|
2475
|
+
)
|
|
2476
|
+
|
|
2477
|
+
if sample is None:
|
|
2478
|
+
try:
|
|
2479
|
+
full_path.unlink()
|
|
2480
|
+
except FileNotFoundError:
|
|
2481
|
+
pass
|
|
2482
|
+
return JsonResponse({"detail": _("Duplicate snapshot ignored.")})
|
|
2483
|
+
|
|
2484
|
+
return JsonResponse({"detail": _("Snapshot saved."), "sample": str(sample.pk)})
|