euparliamentmonitor 0.9.6 → 0.9.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +7 -1
- package/scripts/aggregator/analysis-aggregator.js +48 -0
- package/scripts/aggregator/article-metadata.js +69 -6
- package/scripts/aggregator/artifact-order.js +28 -5
- package/scripts/aggregator/reader-guide-constants.js +13 -1
- package/scripts/aggregator/reader-intelligence-guide.js +105 -0
- package/scripts/generators/news-indexes.d.ts +12 -0
- package/scripts/generators/news-indexes.js +91 -2
- package/scripts/generators/sitemap/rss.js +1 -0
- package/scripts/generators/sitemap/xml.js +1 -0
- package/scripts/minify-assets.js +238 -0
- package/scripts/optimize-css.js +79 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "euparliamentmonitor",
|
|
3
|
-
"version": "0.9.
|
|
3
|
+
"version": "0.9.7",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "European Parliament Intelligence Platform - Monitor political activity with systematic transparency",
|
|
6
6
|
"main": "scripts/index.js",
|
|
@@ -70,6 +70,8 @@
|
|
|
70
70
|
"generate-article:all": "node scripts/aggregator/article-generator.js --all",
|
|
71
71
|
"generate-news-indexes": "node scripts/generators/news-indexes.js",
|
|
72
72
|
"generate-sitemap": "node scripts/generators/sitemap.js",
|
|
73
|
+
"optimize-css": "node scripts/optimize-css.js",
|
|
74
|
+
"minify-assets": "node scripts/minify-assets.js",
|
|
73
75
|
"validate-ep-api": "npx tsx src/utils/validate-ep-api.ts",
|
|
74
76
|
"lint:prompts": "node scripts/lint-prompts.js",
|
|
75
77
|
"htmlhint": "sh -c 'htmlhint *.html; set -- news/*.html; if [ -e \"$1\" ]; then htmlhint \"$@\"; else echo \"No news/*.html files to lint\"; fi'",
|
|
@@ -154,6 +156,7 @@
|
|
|
154
156
|
"@vitest/ui": "4.1.6",
|
|
155
157
|
"chart.js": "4.5.1",
|
|
156
158
|
"chartjs-plugin-annotation": "3.1.0",
|
|
159
|
+
"clean-css": "^5.3.3",
|
|
157
160
|
"d3": "7.9.0",
|
|
158
161
|
"eslint": "10.3.0",
|
|
159
162
|
"eslint-config-prettier": "10.1.8",
|
|
@@ -161,6 +164,7 @@
|
|
|
161
164
|
"eslint-plugin-security": "4.0.0",
|
|
162
165
|
"eslint-plugin-sonarjs": "4.0.3",
|
|
163
166
|
"happy-dom": "20.9.0",
|
|
167
|
+
"html-minifier-terser": "^7.2.0",
|
|
164
168
|
"htmlhint": "1.9.2",
|
|
165
169
|
"husky": "9.1.7",
|
|
166
170
|
"jscpd": "4.1.1",
|
|
@@ -169,6 +173,8 @@
|
|
|
169
173
|
"mermaid": "11.15.0",
|
|
170
174
|
"papaparse": "5.5.3",
|
|
171
175
|
"prettier": "3.8.3",
|
|
176
|
+
"purgecss": "7.0.2",
|
|
177
|
+
"terser": "^5.47.1",
|
|
172
178
|
"ts-api-utils": "2.5.0",
|
|
173
179
|
"tsx": "4.21.0",
|
|
174
180
|
"typedoc": "0.28.19",
|
|
@@ -284,6 +284,10 @@ const READER_GUIDE_EN = {
|
|
|
284
284
|
need: 'Significance scoring',
|
|
285
285
|
value: 'why this story outranks or trails other same-day European Parliament signals',
|
|
286
286
|
},
|
|
287
|
+
'section-actors-forces': {
|
|
288
|
+
need: 'Actors and forces',
|
|
289
|
+
value: 'who is driving the story, what political forces line up behind them, and which institutional levers they can pull',
|
|
290
|
+
},
|
|
287
291
|
'section-coalitions-voting': {
|
|
288
292
|
need: 'Coalitions and voting',
|
|
289
293
|
value: 'political group alignment, voting evidence, and coalition pressure points',
|
|
@@ -304,6 +308,50 @@ const READER_GUIDE_EN = {
|
|
|
304
308
|
need: 'Risk assessment',
|
|
305
309
|
value: 'policy, institutional, coalition, communications, and implementation risk register',
|
|
306
310
|
},
|
|
311
|
+
'section-threat': {
|
|
312
|
+
need: 'Threat landscape',
|
|
313
|
+
value: 'hostile actors, attack vectors, consequence trees, and legislative-disruption pathways',
|
|
314
|
+
},
|
|
315
|
+
'section-forward-projection': {
|
|
316
|
+
need: 'What to watch',
|
|
317
|
+
value: 'dated trigger events, calendar dependencies, and legislative-pipeline forecasts',
|
|
318
|
+
},
|
|
319
|
+
'section-electoral-arc': {
|
|
320
|
+
need: 'Electoral arc and mandate',
|
|
321
|
+
value: 'where the story sits in the EP term, mandate fulfilment, seat projection, and presidency-trio context',
|
|
322
|
+
},
|
|
323
|
+
'section-pestle-context': {
|
|
324
|
+
need: 'PESTLE and structural context',
|
|
325
|
+
value: 'political, economic, social, technological, legal, and environmental forces plus the historical baseline',
|
|
326
|
+
},
|
|
327
|
+
'section-continuity': {
|
|
328
|
+
need: 'Cross-run continuity',
|
|
329
|
+
value: 'what changed since prior sessions and how confidence shifted between runs',
|
|
330
|
+
},
|
|
331
|
+
'section-deep-analysis': {
|
|
332
|
+
need: 'Deep analysis',
|
|
333
|
+
value: 'long-form Economist-style explanation for readers who want the full argument',
|
|
334
|
+
},
|
|
335
|
+
'section-documents': {
|
|
336
|
+
need: 'Document trail',
|
|
337
|
+
value: 'the document index and per-file analysis behind the public judgement',
|
|
338
|
+
},
|
|
339
|
+
'section-extended-intel': {
|
|
340
|
+
need: 'Extended intelligence',
|
|
341
|
+
value: "devil's-advocate critique, comparative parallels, historical precedents, and media framing",
|
|
342
|
+
},
|
|
343
|
+
'section-mcp-reliability': {
|
|
344
|
+
need: 'MCP data reliability',
|
|
345
|
+
value: 'which feeds were healthy, which were degraded, and how data limits bound conclusions',
|
|
346
|
+
},
|
|
347
|
+
'section-quality-reflection': {
|
|
348
|
+
need: 'Analytical quality and reflection',
|
|
349
|
+
value: 'self-assessment scores, methodology audit, structured analytic techniques, and known limitations',
|
|
350
|
+
},
|
|
351
|
+
'section-supplementary-intelligence': {
|
|
352
|
+
need: 'Supplementary intelligence',
|
|
353
|
+
value: 'additional markdown discovered in the run that has not yet been assigned to a canonical section',
|
|
354
|
+
},
|
|
307
355
|
};
|
|
308
356
|
/**
|
|
309
357
|
* Render the generated reader-intelligence guide that appears before the
|
|
@@ -455,6 +455,56 @@ export function stripInlineMarkdown(raw) {
|
|
|
455
455
|
.replace(/\s+/g, ' ')
|
|
456
456
|
.trim();
|
|
457
457
|
}
|
|
458
|
+
/** Connector / determiner words that read as broken copy when they are
|
|
459
|
+
* the final token before a truncation ellipsis. */
|
|
460
|
+
const TRAILING_STOP_WORDS = new Set([
|
|
461
|
+
'the',
|
|
462
|
+
'a',
|
|
463
|
+
'an',
|
|
464
|
+
'of',
|
|
465
|
+
'to',
|
|
466
|
+
'for',
|
|
467
|
+
'in',
|
|
468
|
+
'on',
|
|
469
|
+
'at',
|
|
470
|
+
'by',
|
|
471
|
+
'and',
|
|
472
|
+
'or',
|
|
473
|
+
'with',
|
|
474
|
+
'from',
|
|
475
|
+
]);
|
|
476
|
+
/** Trailing characters we always strip before appending our own ellipsis,
|
|
477
|
+
* so we never emit double-ellipsis or stray punctuation. */
|
|
478
|
+
const TRAILING_PUNCT = /[.,;:—\-…\s]/u;
|
|
479
|
+
/**
|
|
480
|
+
* Repeatedly strip trailing stop-words (separated by a single space) and
|
|
481
|
+
* trailing punctuation (including any pre-existing ellipsis). Implemented
|
|
482
|
+
* imperatively to avoid super-linear regex backtracking on the
|
|
483
|
+
* `(?:\s+stop-word)+$` pattern flagged by `security/detect-unsafe-regex`.
|
|
484
|
+
*
|
|
485
|
+
* @param input - Pre-clipped string to clean up
|
|
486
|
+
* @returns Cleaned string with no trailing stop-words or punctuation
|
|
487
|
+
*/
|
|
488
|
+
function stripTrailingStopWordsAndPunctuation(input) {
|
|
489
|
+
let result = input;
|
|
490
|
+
let changed = true;
|
|
491
|
+
while (changed) {
|
|
492
|
+
changed = false;
|
|
493
|
+
while (result.length > 0 && TRAILING_PUNCT.test(result.charAt(result.length - 1))) {
|
|
494
|
+
result = result.slice(0, -1);
|
|
495
|
+
changed = true;
|
|
496
|
+
}
|
|
497
|
+
const lastSpace = result.lastIndexOf(' ');
|
|
498
|
+
if (lastSpace >= 0) {
|
|
499
|
+
const tail = result.slice(lastSpace + 1).toLowerCase();
|
|
500
|
+
if (TRAILING_STOP_WORDS.has(tail)) {
|
|
501
|
+
result = result.slice(0, lastSpace);
|
|
502
|
+
changed = true;
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
return result;
|
|
507
|
+
}
|
|
458
508
|
/**
|
|
459
509
|
* Clamp a string to `DESCRIPTION_MAX_LENGTH` characters, appending
|
|
460
510
|
* an ellipsis when truncation actually happens. Does not break words if
|
|
@@ -467,10 +517,22 @@ export function stripInlineMarkdown(raw) {
|
|
|
467
517
|
export function truncateDescription(text) {
|
|
468
518
|
if (text.length <= DESCRIPTION_MAX_LENGTH)
|
|
469
519
|
return text;
|
|
470
|
-
const cut = text.slice(0, DESCRIPTION_MAX_LENGTH -
|
|
520
|
+
const cut = text.slice(0, DESCRIPTION_MAX_LENGTH - 1);
|
|
521
|
+
// Prefer the last full sentence terminator within the cut so we don't
|
|
522
|
+
// end on a dangling determiner ("…year. The"). Period/!/? followed by
|
|
523
|
+
// a space marks a clean boundary. Only honour the boundary when it
|
|
524
|
+
// sits past the soft minimum so we keep enough body text to be useful.
|
|
525
|
+
const sentenceEnd = Math.max(cut.lastIndexOf('. '), cut.lastIndexOf('! '), cut.lastIndexOf('? '));
|
|
526
|
+
if (sentenceEnd >= DESCRIPTION_MIN_LENGTH) {
|
|
527
|
+
return cut.slice(0, sentenceEnd + 1).replace(/\s+$/, '');
|
|
528
|
+
}
|
|
471
529
|
const lastSpace = cut.lastIndexOf(' ');
|
|
472
|
-
|
|
473
|
-
|
|
530
|
+
let safe = lastSpace > DESCRIPTION_MAX_LENGTH - 60 ? cut.slice(0, lastSpace) : cut;
|
|
531
|
+
// Drop dangling stop-words and trailing punctuation/ellipsis so we
|
|
532
|
+
// never emit broken copy ("…year. The" → "…year.") or double-ellipsis
|
|
533
|
+
// ("The……") when the upstream input already carried an ellipsis.
|
|
534
|
+
safe = stripTrailingStopWordsAndPunctuation(safe);
|
|
535
|
+
return `${safe}…`;
|
|
474
536
|
}
|
|
475
537
|
/**
|
|
476
538
|
* Clamp a title to `TITLE_MAX_LENGTH` characters in the same
|
|
@@ -482,10 +544,11 @@ export function truncateDescription(text) {
|
|
|
482
544
|
export function truncateTitle(text) {
|
|
483
545
|
if (text.length <= TITLE_MAX_LENGTH)
|
|
484
546
|
return text;
|
|
485
|
-
const cut = text.slice(0, TITLE_MAX_LENGTH -
|
|
547
|
+
const cut = text.slice(0, TITLE_MAX_LENGTH - 1);
|
|
486
548
|
const lastSpace = cut.lastIndexOf(' ');
|
|
487
|
-
|
|
488
|
-
|
|
549
|
+
let safe = lastSpace > TITLE_MAX_LENGTH - 40 ? cut.slice(0, lastSpace) : cut;
|
|
550
|
+
safe = stripTrailingStopWordsAndPunctuation(safe);
|
|
551
|
+
return `${safe}…`;
|
|
489
552
|
}
|
|
490
553
|
/**
|
|
491
554
|
* Return the first Markdown H1 (`# …`) in the supplied text, stripped of
|
|
@@ -29,7 +29,7 @@ export const ARTIFACT_SECTIONS = [
|
|
|
29
29
|
{
|
|
30
30
|
id: 'synthesis',
|
|
31
31
|
title: 'Synthesis Summary',
|
|
32
|
-
artifacts: ['intelligence/synthesis-summary.md'],
|
|
32
|
+
artifacts: ['intelligence/synthesis-summary.md', 'synthesis.md'],
|
|
33
33
|
},
|
|
34
34
|
{
|
|
35
35
|
id: 'significance',
|
|
@@ -40,6 +40,7 @@ export const ARTIFACT_SECTIONS = [
|
|
|
40
40
|
'classification/priority-matrix.md',
|
|
41
41
|
'classification/issue-classification.md',
|
|
42
42
|
'intelligence/significance-scoring.md',
|
|
43
|
+
'significance-assessment.md',
|
|
43
44
|
],
|
|
44
45
|
},
|
|
45
46
|
{
|
|
@@ -50,6 +51,9 @@ export const ARTIFACT_SECTIONS = [
|
|
|
50
51
|
'classification/forces-analysis.md',
|
|
51
52
|
'classification/impact-matrix.md',
|
|
52
53
|
'classification/stakeholder-classification.md',
|
|
54
|
+
'actor-mapping.md',
|
|
55
|
+
'political-forces.md',
|
|
56
|
+
'impact-assessment.md',
|
|
53
57
|
// Catch-all for any other classification/*.md not consumed above
|
|
54
58
|
// (keeps non-canonical artifact names out of the Supplementary bucket
|
|
55
59
|
// and inside their journalist-correct section).
|
|
@@ -68,7 +72,11 @@ export const ARTIFACT_SECTIONS = [
|
|
|
68
72
|
{
|
|
69
73
|
id: 'stakeholder-map',
|
|
70
74
|
title: 'Stakeholder Map',
|
|
71
|
-
artifacts: [
|
|
75
|
+
artifacts: [
|
|
76
|
+
'intelligence/stakeholder-map.md',
|
|
77
|
+
'existing/stakeholder-impact.md',
|
|
78
|
+
'stakeholder-perspectives.md',
|
|
79
|
+
],
|
|
72
80
|
},
|
|
73
81
|
{
|
|
74
82
|
id: 'economic-context',
|
|
@@ -87,6 +95,8 @@ export const ARTIFACT_SECTIONS = [
|
|
|
87
95
|
'risk-scoring/legislative-risk.md',
|
|
88
96
|
'risk-scoring/economic-risk.md',
|
|
89
97
|
'risk-scoring/institutional-risk.md',
|
|
98
|
+
'risk-matrix.md',
|
|
99
|
+
'quantitative-swot.md',
|
|
90
100
|
// Catch-all for any other risk-scoring/*.md (e.g. naming variants) so
|
|
91
101
|
// they render under Risk Assessment instead of Supplementary.
|
|
92
102
|
'risk-scoring/',
|
|
@@ -104,7 +114,11 @@ export const ARTIFACT_SECTIONS = [
|
|
|
104
114
|
{
|
|
105
115
|
id: 'scenarios',
|
|
106
116
|
title: 'Scenarios & Wildcards',
|
|
107
|
-
artifacts: [
|
|
117
|
+
artifacts: [
|
|
118
|
+
'intelligence/scenario-forecast.md',
|
|
119
|
+
'intelligence/wildcards-blackswans.md',
|
|
120
|
+
'scenario-forecast.md',
|
|
121
|
+
],
|
|
108
122
|
},
|
|
109
123
|
{
|
|
110
124
|
id: 'forward-projection',
|
|
@@ -113,6 +127,9 @@ export const ARTIFACT_SECTIONS = [
|
|
|
113
127
|
'intelligence/forward-projection.md',
|
|
114
128
|
'intelligence/legislative-pipeline-forecast.md',
|
|
115
129
|
'intelligence/parliamentary-calendar-projection.md',
|
|
130
|
+
'forward/forward-projection.md',
|
|
131
|
+
'forward/legislative-pipeline-forecast.md',
|
|
132
|
+
'forward/parliamentary-calendar-projection.md',
|
|
116
133
|
'extended/forward-indicators.md',
|
|
117
134
|
],
|
|
118
135
|
},
|
|
@@ -130,7 +147,11 @@ export const ARTIFACT_SECTIONS = [
|
|
|
130
147
|
{
|
|
131
148
|
id: 'pestle-context',
|
|
132
149
|
title: 'PESTLE & Context',
|
|
133
|
-
artifacts: [
|
|
150
|
+
artifacts: [
|
|
151
|
+
'intelligence/pestle-analysis.md',
|
|
152
|
+
'intelligence/historical-baseline.md',
|
|
153
|
+
'pestle-analysis.md',
|
|
154
|
+
],
|
|
134
155
|
},
|
|
135
156
|
{
|
|
136
157
|
id: 'continuity',
|
|
@@ -162,7 +183,7 @@ export const ARTIFACT_SECTIONS = [
|
|
|
162
183
|
{
|
|
163
184
|
id: 'extended-intel',
|
|
164
185
|
title: 'Extended Intelligence',
|
|
165
|
-
artifacts: ['extended/'],
|
|
186
|
+
artifacts: ['extended/', 'media-framing.md'],
|
|
166
187
|
},
|
|
167
188
|
{
|
|
168
189
|
id: 'mcp-reliability',
|
|
@@ -177,6 +198,8 @@ export const ARTIFACT_SECTIONS = [
|
|
|
177
198
|
'intelligence/reference-analysis-quality.md',
|
|
178
199
|
'intelligence/workflow-audit.md',
|
|
179
200
|
'intelligence/methodology-reflection.md',
|
|
201
|
+
'article-index.md',
|
|
202
|
+
'methodology-reflection.md',
|
|
180
203
|
],
|
|
181
204
|
},
|
|
182
205
|
];
|
|
@@ -14,10 +14,22 @@ export const READER_GUIDE_SECTION_IDS = [
|
|
|
14
14
|
'section-executive-brief',
|
|
15
15
|
'section-synthesis',
|
|
16
16
|
'section-significance',
|
|
17
|
+
'section-actors-forces',
|
|
17
18
|
'section-coalitions-voting',
|
|
18
19
|
'section-stakeholder-map',
|
|
19
20
|
'section-economic-context',
|
|
20
|
-
'section-scenarios',
|
|
21
21
|
'section-risk',
|
|
22
|
+
'section-threat',
|
|
23
|
+
'section-scenarios',
|
|
24
|
+
'section-forward-projection',
|
|
25
|
+
'section-electoral-arc',
|
|
26
|
+
'section-pestle-context',
|
|
27
|
+
'section-continuity',
|
|
28
|
+
'section-deep-analysis',
|
|
29
|
+
'section-documents',
|
|
30
|
+
'section-extended-intel',
|
|
31
|
+
'section-mcp-reliability',
|
|
32
|
+
'section-quality-reflection',
|
|
33
|
+
'section-supplementary-intelligence',
|
|
22
34
|
];
|
|
23
35
|
//# sourceMappingURL=reader-guide-constants.js.map
|
|
@@ -567,6 +567,74 @@ const READER_GUIDE_ROWS = {
|
|
|
567
567
|
zh: '本次运行如何与先前会话关联、变化了什么以及置信度在运行之间如何变化',
|
|
568
568
|
},
|
|
569
569
|
},
|
|
570
|
+
'section-deep-analysis': {
|
|
571
|
+
need: {
|
|
572
|
+
en: 'Deep analysis',
|
|
573
|
+
sv: 'Djupanalys',
|
|
574
|
+
da: 'Dybdegående analyse',
|
|
575
|
+
no: 'Dybdeanalyse',
|
|
576
|
+
fi: 'Syväanalyysi',
|
|
577
|
+
de: 'Tiefenanalyse',
|
|
578
|
+
fr: 'Analyse approfondie',
|
|
579
|
+
es: 'Análisis profundo',
|
|
580
|
+
nl: 'Diepteanalyse',
|
|
581
|
+
ar: 'تحليل معمق',
|
|
582
|
+
he: 'ניתוח עומק',
|
|
583
|
+
ja: '詳細分析',
|
|
584
|
+
ko: '심층 분석',
|
|
585
|
+
zh: '深度分析',
|
|
586
|
+
},
|
|
587
|
+
value: {
|
|
588
|
+
en: 'long-form Economist-style explanation for readers who want the full argument',
|
|
589
|
+
sv: 'lång Economist-liknande förklaring för läsare som vill ha hela argumentet',
|
|
590
|
+
da: 'lang Economist-lignende forklaring for læsere der ønsker hele argumentet',
|
|
591
|
+
no: 'lang Economist-lignende forklaring for lesere som ønsker hele argumentet',
|
|
592
|
+
fi: 'pitkä Economist-tyylinen selitys lukijoille, jotka haluavat koko perustelun',
|
|
593
|
+
de: 'lange, Economist-artige Erklärung für Leser, die das ganze Argument wollen',
|
|
594
|
+
fr: "explication longue de style Economist pour les lecteurs qui veulent l'argument complet",
|
|
595
|
+
es: 'explicación extensa de estilo Economist para lectores que quieren el argumento completo',
|
|
596
|
+
nl: 'lange uitleg in Economist-stijl voor lezers die het volledige argument willen',
|
|
597
|
+
ar: 'شرح مطول بأسلوب إيكونوميست للقراء الذين يريدون الحجة كاملة',
|
|
598
|
+
he: 'הסבר ארוך בסגנון האקונומיסט לקוראים שרוצים את הטיעון המלא',
|
|
599
|
+
ja: '全体の論旨を求める読者向けのエコノミスト風長文解説',
|
|
600
|
+
ko: '전체 논지를 원하는 독자를 위한 이코노미스트식 장문 설명',
|
|
601
|
+
zh: '为希望了解完整论证的读者提供的《经济学人》式长篇解释',
|
|
602
|
+
},
|
|
603
|
+
},
|
|
604
|
+
'section-documents': {
|
|
605
|
+
need: {
|
|
606
|
+
en: 'Document trail',
|
|
607
|
+
sv: 'Dokumentspår',
|
|
608
|
+
da: 'Dokumentspor',
|
|
609
|
+
no: 'Dokumentspor',
|
|
610
|
+
fi: 'Asiakirjapolku',
|
|
611
|
+
de: 'Dokumentenspur',
|
|
612
|
+
fr: 'Piste documentaire',
|
|
613
|
+
es: 'Rastro documental',
|
|
614
|
+
nl: 'Documentspoor',
|
|
615
|
+
ar: 'مسار الوثائق',
|
|
616
|
+
he: 'מסלול מסמכים',
|
|
617
|
+
ja: '文書トレイル',
|
|
618
|
+
ko: '문서 추적',
|
|
619
|
+
zh: '文件线索',
|
|
620
|
+
},
|
|
621
|
+
value: {
|
|
622
|
+
en: 'the document index and per-file analysis behind the public judgement',
|
|
623
|
+
sv: 'dokumentindexet och analysen per fil bakom den offentliga bedömningen',
|
|
624
|
+
da: 'dokumentindekset og analyse pr. fil bag den offentlige vurdering',
|
|
625
|
+
no: 'dokumentindeksen og analyse per fil bak den offentlige vurderingen',
|
|
626
|
+
fi: 'asiakirjahakemisto ja tiedostokohtainen analyysi julkisen arvion taustalla',
|
|
627
|
+
de: 'Dokumentenindex und Einzeldateianalyse hinter der öffentlichen Bewertung',
|
|
628
|
+
fr: "l'index des documents et l'analyse fichier par fichier derrière le jugement public",
|
|
629
|
+
es: 'el índice documental y el análisis por archivo detrás del juicio público',
|
|
630
|
+
nl: 'de documentenindex en analyse per bestand achter het publieke oordeel',
|
|
631
|
+
ar: 'فهرس الوثائق والتحليل لكل ملف خلف الحكم العام',
|
|
632
|
+
he: 'אינדקס המסמכים וניתוח לפי קובץ שמאחורי השיפוט הציבורי',
|
|
633
|
+
ja: '公開判断の背後にある文書索引とファイル別分析',
|
|
634
|
+
ko: '공개 판단 뒤에 있는 문서 색인과 파일별 분석',
|
|
635
|
+
zh: '公共判断背后的文件索引和逐文件分析',
|
|
636
|
+
},
|
|
637
|
+
},
|
|
570
638
|
'section-extended-intel': {
|
|
571
639
|
need: {
|
|
572
640
|
en: 'Extended intelligence',
|
|
@@ -669,6 +737,40 @@ const READER_GUIDE_ROWS = {
|
|
|
669
737
|
zh: '自我评估分数、方法论审计、使用的结构化分析技术和已知限制',
|
|
670
738
|
},
|
|
671
739
|
},
|
|
740
|
+
'section-supplementary-intelligence': {
|
|
741
|
+
need: {
|
|
742
|
+
en: 'Supplementary intelligence',
|
|
743
|
+
sv: 'Kompletterande underrättelse',
|
|
744
|
+
da: 'Supplerende efterretning',
|
|
745
|
+
no: 'Supplerende etterretning',
|
|
746
|
+
fi: 'Täydentävä tiedustelu',
|
|
747
|
+
de: 'Ergänzende Aufklärung',
|
|
748
|
+
fr: 'Renseignement supplémentaire',
|
|
749
|
+
es: 'Inteligencia suplementaria',
|
|
750
|
+
nl: 'Aanvullende inlichtingen',
|
|
751
|
+
ar: 'استخبارات تكميلية',
|
|
752
|
+
he: 'מודיעין משלים',
|
|
753
|
+
ja: '補足インテリジェンス',
|
|
754
|
+
ko: '보충 인텔리전스',
|
|
755
|
+
zh: '补充情报',
|
|
756
|
+
},
|
|
757
|
+
value: {
|
|
758
|
+
en: 'additional markdown discovered in the run that has not yet been assigned to a canonical section',
|
|
759
|
+
sv: 'ytterligare markdown som hittats i körningen och ännu inte tilldelats en kanonisk sektion',
|
|
760
|
+
da: 'yderligere markdown fundet i kørslen som endnu ikke er tildelt en kanonisk sektion',
|
|
761
|
+
no: 'ytterligere markdown funnet i kjøringen som ennå ikke er tilordnet en kanonisk seksjon',
|
|
762
|
+
fi: 'ajossa löydetty lisämarkdown, jota ei vielä ole liitetty kanoniseen osioon',
|
|
763
|
+
de: 'zusätzliches Markdown aus dem Lauf, das noch keinem kanonischen Abschnitt zugeordnet ist',
|
|
764
|
+
fr: "markdown supplémentaire découvert dans l'exécution et pas encore affecté à une section canonique",
|
|
765
|
+
es: 'markdown adicional descubierto en la ejecución que aún no se ha asignado a una sección canónica',
|
|
766
|
+
nl: 'extra markdown gevonden in de run dat nog niet aan een canonieke sectie is toegewezen',
|
|
767
|
+
ar: 'ملفات ماركداون إضافية اكتُشفت في التشغيل ولم تُسند بعد إلى قسم معياري',
|
|
768
|
+
he: 'מרקדאון נוסף שהתגלה בהרצה ועדיין לא שובץ למדור קנוני',
|
|
769
|
+
ja: '実行内で見つかったがまだ正規セクションに割り当てられていない追加Markdown',
|
|
770
|
+
ko: '실행에서 발견되었지만 아직 표준 섹션에 할당되지 않은 추가 마크다운',
|
|
771
|
+
zh: '运行中发现但尚未分配到规范章节的附加Markdown',
|
|
772
|
+
},
|
|
773
|
+
},
|
|
672
774
|
};
|
|
673
775
|
/* ─── Section icons ─────────────────────────────────────────────── */
|
|
674
776
|
/** Visual icons for each reader guide section to improve scannability. */
|
|
@@ -687,9 +789,12 @@ const SECTION_ICONS = {
|
|
|
687
789
|
'section-electoral-arc': '🗳️',
|
|
688
790
|
'section-pestle-context': '🌍',
|
|
689
791
|
'section-continuity': '🔁',
|
|
792
|
+
'section-deep-analysis': '🔬',
|
|
793
|
+
'section-documents': '📄',
|
|
690
794
|
'section-extended-intel': '🧠',
|
|
691
795
|
'section-mcp-reliability': '📡',
|
|
692
796
|
'section-quality-reflection': '🪞',
|
|
797
|
+
'section-supplementary-intelligence': '📎',
|
|
693
798
|
};
|
|
694
799
|
/**
|
|
695
800
|
* Look up the visual icon for a known article section.
|
|
@@ -8,6 +8,18 @@ import type { ParsedArticle } from '../types/index.js';
|
|
|
8
8
|
* @returns Filename string
|
|
9
9
|
*/
|
|
10
10
|
export declare function getIndexFilename(lang: string): string;
|
|
11
|
+
/**
|
|
12
|
+
* Backfill hreflang alternate links for all article HTML files.
|
|
13
|
+
*
|
|
14
|
+
* Handles three cases:
|
|
15
|
+
* 1. Articles with no hreflang links at all → inject the full block before `</head>`
|
|
16
|
+
* 2. Articles with relative hreflang URLs → replace with absolute URLs
|
|
17
|
+
* 3. Articles already correct → skip
|
|
18
|
+
*
|
|
19
|
+
* @param filenames - News article filenames
|
|
20
|
+
* @returns Number of HTML files updated
|
|
21
|
+
*/
|
|
22
|
+
export declare function backfillArticleHreflang(filenames: readonly string[]): number;
|
|
11
23
|
/**
|
|
12
24
|
* Generate index HTML for a language.
|
|
13
25
|
*
|
|
@@ -92,9 +92,9 @@ function renderCard(article, meta, categoryLabels) {
|
|
|
92
92
|
function buildHreflangTags() {
|
|
93
93
|
const links = ALL_LANGUAGES.map((code) => {
|
|
94
94
|
const href = getIndexFilename(code);
|
|
95
|
-
return `<link rel="alternate" hreflang="${code}" href="${href}">`;
|
|
95
|
+
return `<link rel="alternate" hreflang="${code}" href="${BASE_URL}/${href}">`;
|
|
96
96
|
});
|
|
97
|
-
links.push(
|
|
97
|
+
links.push(`<link rel="alternate" hreflang="x-default" href="${BASE_URL}/index.html">`);
|
|
98
98
|
return links.join('\n ');
|
|
99
99
|
}
|
|
100
100
|
/**
|
|
@@ -253,6 +253,91 @@ function applyArticleSeoBackfill(html, description, keywords) {
|
|
|
253
253
|
next = next.replace(/"description":"[^"]*"/u, `"description":"${jsonDescription}"`);
|
|
254
254
|
return next;
|
|
255
255
|
}
|
|
256
|
+
/**
|
|
257
|
+
* Build hreflang `<link rel="alternate">` tags for an article slug.
|
|
258
|
+
* Produces one tag per supported language plus an `x-default` pointing at
|
|
259
|
+
* the English variant, all using absolute URLs.
|
|
260
|
+
*
|
|
261
|
+
* @param articleSlug - Slug without language suffix (e.g. `2026-02-24-propositions`)
|
|
262
|
+
* @returns Newline-joined `<link>` tags
|
|
263
|
+
*/
|
|
264
|
+
function buildArticleHreflang(articleSlug) {
|
|
265
|
+
const entries = ALL_LANGUAGES.map((code) => ` <link rel="alternate" hreflang="${code}" href="${BASE_URL}/news/${articleSlug}-${code}.html">`);
|
|
266
|
+
entries.push(` <link rel="alternate" hreflang="x-default" href="${BASE_URL}/news/${articleSlug}-en.html">`);
|
|
267
|
+
return entries.join('\n');
|
|
268
|
+
}
|
|
269
|
+
/**
|
|
270
|
+
* Inject hreflang links into an article that has none.
|
|
271
|
+
*
|
|
272
|
+
* @param html - Article HTML content
|
|
273
|
+
* @param hreflangBlock - Pre-built hreflang link block
|
|
274
|
+
* @returns Updated HTML, or original if no change needed
|
|
275
|
+
*/
|
|
276
|
+
function injectHreflangLinks(html, hreflangBlock) {
|
|
277
|
+
return html.replace(/(<\/head>)/u, `${hreflangBlock}\n$1`);
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Replace existing relative hreflang links with absolute URLs.
|
|
281
|
+
*
|
|
282
|
+
* @param html - Article HTML content
|
|
283
|
+
* @param hreflangBlock - Pre-built hreflang link block with absolute URLs
|
|
284
|
+
* @returns Updated HTML, or original if no change needed
|
|
285
|
+
*/
|
|
286
|
+
function fixRelativeHreflangLinks(html, hreflangBlock) {
|
|
287
|
+
const stripped = html.replace(/\s*<link\s+rel="alternate"\s+hreflang="[^"]*"\s+href="[^"]*">\n?/gu, '');
|
|
288
|
+
return stripped.replace(/(<\/head>)/u, `${hreflangBlock}\n$1`);
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Backfill hreflang alternate links for all article HTML files.
|
|
292
|
+
*
|
|
293
|
+
* Handles three cases:
|
|
294
|
+
* 1. Articles with no hreflang links at all → inject the full block before `</head>`
|
|
295
|
+
* 2. Articles with relative hreflang URLs → replace with absolute URLs
|
|
296
|
+
* 3. Articles already correct → skip
|
|
297
|
+
*
|
|
298
|
+
* @param filenames - News article filenames
|
|
299
|
+
* @returns Number of HTML files updated
|
|
300
|
+
*/
|
|
301
|
+
export function backfillArticleHreflang(filenames) {
|
|
302
|
+
let updated = 0;
|
|
303
|
+
for (const filename of filenames) {
|
|
304
|
+
if (backfillOneArticleHreflang(filename))
|
|
305
|
+
updated++;
|
|
306
|
+
}
|
|
307
|
+
return updated;
|
|
308
|
+
}
|
|
309
|
+
/**
|
|
310
|
+
* Backfill hreflang for a single article file.
|
|
311
|
+
*
|
|
312
|
+
* @param filename - News article filename
|
|
313
|
+
* @returns True when the file was updated
|
|
314
|
+
*/
|
|
315
|
+
function backfillOneArticleHreflang(filename) {
|
|
316
|
+
const parsed = parseArticleFilename(filename);
|
|
317
|
+
if (!parsed)
|
|
318
|
+
return false;
|
|
319
|
+
const filepath = path.join(NEWS_DIR, filename);
|
|
320
|
+
const html = readArticleHtml(filepath);
|
|
321
|
+
if (!html)
|
|
322
|
+
return false;
|
|
323
|
+
const articleSlug = `${parsed.date}-${parsed.slug}`;
|
|
324
|
+
const hreflangBlock = buildArticleHreflang(articleSlug);
|
|
325
|
+
const hasHreflang = /<link\s+rel="alternate"\s+hreflang="/u.test(html);
|
|
326
|
+
let next;
|
|
327
|
+
if (!hasHreflang) {
|
|
328
|
+
next = injectHreflangLinks(html, hreflangBlock);
|
|
329
|
+
}
|
|
330
|
+
else {
|
|
331
|
+
const hasRelative = /<link\s+rel="alternate"\s+hreflang="[^"]*"\s+href="(?!https?:\/\/)/u.test(html);
|
|
332
|
+
if (!hasRelative)
|
|
333
|
+
return false;
|
|
334
|
+
next = fixRelativeHreflangLinks(html, hreflangBlock);
|
|
335
|
+
}
|
|
336
|
+
if (next === html)
|
|
337
|
+
return false;
|
|
338
|
+
atomicWrite(filepath, next);
|
|
339
|
+
return true;
|
|
340
|
+
}
|
|
256
341
|
/**
|
|
257
342
|
* Generate index HTML for a language.
|
|
258
343
|
*
|
|
@@ -522,6 +607,10 @@ function main() {
|
|
|
522
607
|
if (backfilled > 0) {
|
|
523
608
|
console.log(`🔎 Backfilled SEO metadata for ${backfilled} legacy article file(s)`);
|
|
524
609
|
}
|
|
610
|
+
const hreflangBackfilled = backfillArticleHreflang(articles);
|
|
611
|
+
if (hreflangBackfilled > 0) {
|
|
612
|
+
console.log(`🔗 Backfilled hreflang links for ${hreflangBackfilled} article file(s)`);
|
|
613
|
+
}
|
|
525
614
|
const grouped = groupArticlesByLanguage(articles, ALL_LANGUAGES);
|
|
526
615
|
const metaBuildTimerLabel = `⏱️ Built metadata map for ${articles.length} articles`;
|
|
527
616
|
console.time(metaBuildTimerLabel);
|
|
@@ -37,6 +37,7 @@ export function generateRssFeed(articleInfos, buildDate = new Date().toUTCString
|
|
|
37
37
|
<dc:language>${escapeXML(item.lang)}</dc:language>
|
|
38
38
|
</item>`)
|
|
39
39
|
.join('\n');
|
|
40
|
+
// REUSE-IgnoreStart
|
|
40
41
|
return `<?xml version="1.0" encoding="UTF-8"?>
|
|
41
42
|
<!-- SPDX-FileCopyrightText: 2024-2026 Hack23 AB -->
|
|
42
43
|
<!-- SPDX-License-Identifier: Apache-2.0 -->
|
|
@@ -86,6 +86,7 @@ export function generateSitemap(articles, docsFiles = []) {
|
|
|
86
86
|
...buildArticleUrls(articles),
|
|
87
87
|
...buildDocsUrls(docsFiles, today),
|
|
88
88
|
];
|
|
89
|
+
// REUSE-IgnoreStart
|
|
89
90
|
return `<?xml version="1.0" encoding="UTF-8"?>
|
|
90
91
|
<!-- SPDX-FileCopyrightText: 2024-2026 Hack23 AB -->
|
|
91
92
|
<!-- SPDX-License-Identifier: Apache-2.0 -->
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// SPDX-FileCopyrightText: 2024-2026 Hack23 AB
|
|
3
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
//
|
|
5
|
+
// Minifies HTML, CSS, and JS files in place using pure-Node packages so that
|
|
6
|
+
// the deploy pipeline never needs to pull a Docker image (compatible with the
|
|
7
|
+
// `egress-policy: block` harden-runner configuration in deploy-s3.yml).
|
|
8
|
+
//
|
|
9
|
+
// Run order in deploy-s3.yml:
|
|
10
|
+
// 1. prebuild — generate all pages
|
|
11
|
+
// 2. optimize-css — PurgeCSS drops unused selectors from styles.css
|
|
12
|
+
// 3. minify-assets (THIS SCRIPT) — compress the now-smaller CSS + HTML + JS
|
|
13
|
+
// 4. rm -rf node_modules — clean up before S3 sync
|
|
14
|
+
// 5. aws s3 sync passes — upload minified payload
|
|
15
|
+
//
|
|
16
|
+
// Must run BEFORE `rm -rf node_modules` because html-minifier-terser,
|
|
17
|
+
// clean-css, and terser are devDependencies.
|
|
18
|
+
//
|
|
19
|
+
// Scopes:
|
|
20
|
+
// CSS — styles.css only (the one deployed stylesheet)
|
|
21
|
+
// HTML — root *.html + news/*.html (all pages shipped to S3)
|
|
22
|
+
// JS — js/**/*.js excluding *.min.js (vendor files already minified upstream;
|
|
23
|
+
// re-minifying risks stripping required license banners)
|
|
24
|
+
//
|
|
25
|
+
// HTML files are processed with a concurrency cap (CONCURRENCY) to avoid
|
|
26
|
+
// overwhelming the event loop on the 4400+ news/*.html archive while still
|
|
27
|
+
// finishing in reasonable time.
|
|
28
|
+
//
|
|
29
|
+
// Exits non-zero if any file fails so the deploy halts before uploading a
|
|
30
|
+
// partially-minified payload.
|
|
31
|
+
|
|
32
|
+
import { readFileSync, writeFileSync, readdirSync } from 'node:fs';
|
|
33
|
+
import { resolve, join, dirname } from 'node:path';
|
|
34
|
+
import { fileURLToPath } from 'node:url';
|
|
35
|
+
import { minify as minifyHtml } from 'html-minifier-terser';
|
|
36
|
+
import CleanCSS from 'clean-css';
|
|
37
|
+
import { minify as minifyJs } from 'terser';
|
|
38
|
+
|
|
39
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
40
|
+
const repoRoot = resolve(__dirname, '..');
|
|
41
|
+
|
|
42
|
+
const CONCURRENCY = 32; // parallel HTML workers
|
|
43
|
+
|
|
44
|
+
// ─── helpers ────────────────────────────────────────────────────────────────
|
|
45
|
+
|
|
46
|
+
function fmt(before, after) {
|
|
47
|
+
const saved = before - after;
|
|
48
|
+
const pct = before > 0 ? ((saved / before) * 100).toFixed(1) : '0.0';
|
|
49
|
+
return `${before} → ${after} B (saved ${saved} B / ${pct}%)`;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
function errorMessage(e) {
|
|
53
|
+
return e instanceof Error ? e.message : String(e);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/** Run tasks with at most `limit` in-flight at once. */
|
|
57
|
+
async function pool(tasks, limit) {
|
|
58
|
+
const results = [];
|
|
59
|
+
let next = 0;
|
|
60
|
+
async function worker() {
|
|
61
|
+
while (next < tasks.length) {
|
|
62
|
+
const task = tasks[next++];
|
|
63
|
+
results.push(await task());
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
const workers = Array.from({ length: Math.min(limit, tasks.length) }, worker);
|
|
67
|
+
await Promise.all(workers);
|
|
68
|
+
return results;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
let totalBefore = 0;
|
|
72
|
+
let totalAfter = 0;
|
|
73
|
+
let errors = 0;
|
|
74
|
+
|
|
75
|
+
// ─── CSS ────────────────────────────────────────────────────────────────────
|
|
76
|
+
|
|
77
|
+
const cssPath = resolve(repoRoot, 'styles.css');
|
|
78
|
+
{
|
|
79
|
+
const src = readFileSync(cssPath, 'utf8');
|
|
80
|
+
const before = Buffer.byteLength(src, 'utf8');
|
|
81
|
+
const result = new CleanCSS({ level: 2 }).minify(src);
|
|
82
|
+
if (result.errors && result.errors.length) {
|
|
83
|
+
console.error('❌ clean-css errors in styles.css:', result.errors);
|
|
84
|
+
errors++;
|
|
85
|
+
} else {
|
|
86
|
+
writeFileSync(cssPath, result.styles);
|
|
87
|
+
const after = Buffer.byteLength(result.styles, 'utf8');
|
|
88
|
+
totalBefore += before;
|
|
89
|
+
totalAfter += after;
|
|
90
|
+
console.log(` styles.css ${fmt(before, after)}`);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// ─── HTML ────────────────────────────────────────────────────────────────────
|
|
95
|
+
|
|
96
|
+
const htmlOpts = {
|
|
97
|
+
collapseWhitespace: true,
|
|
98
|
+
removeComments: true,
|
|
99
|
+
removeOptionalTags: false,
|
|
100
|
+
removeRedundantAttributes: true,
|
|
101
|
+
removeScriptTypeAttributes: true,
|
|
102
|
+
removeStyleLinkTypeAttributes: true,
|
|
103
|
+
minifyCSS: true,
|
|
104
|
+
minifyJS: true,
|
|
105
|
+
useShortDoctype: true,
|
|
106
|
+
};
|
|
107
|
+
|
|
108
|
+
// Collect HTML files: root *.html + news/*.html
|
|
109
|
+
const rootHtml = readdirSync(repoRoot)
|
|
110
|
+
.filter((f) => f.endsWith('.html'))
|
|
111
|
+
.map((f) => resolve(repoRoot, f));
|
|
112
|
+
|
|
113
|
+
const newsDir = resolve(repoRoot, 'news');
|
|
114
|
+
let newsHtml = [];
|
|
115
|
+
try {
|
|
116
|
+
newsHtml = readdirSync(newsDir)
|
|
117
|
+
.filter((f) => f.endsWith('.html'))
|
|
118
|
+
.map((f) => join(newsDir, f));
|
|
119
|
+
} catch {
|
|
120
|
+
// news/ directory may not exist in all environments
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const allHtml = [...rootHtml, ...newsHtml];
|
|
124
|
+
let htmlBefore = 0;
|
|
125
|
+
let htmlAfter = 0;
|
|
126
|
+
let htmlErrors = 0;
|
|
127
|
+
|
|
128
|
+
const htmlTasks = allHtml.map((p) => async () => {
|
|
129
|
+
try {
|
|
130
|
+
const src = readFileSync(p, 'utf8');
|
|
131
|
+
const before = Buffer.byteLength(src, 'utf8');
|
|
132
|
+
const minified = await minifyHtml(src, htmlOpts);
|
|
133
|
+
writeFileSync(p, minified);
|
|
134
|
+
const after = Buffer.byteLength(minified, 'utf8');
|
|
135
|
+
return { before, after, ok: true };
|
|
136
|
+
} catch (e) {
|
|
137
|
+
console.error(`❌ HTML minify failed for ${p}: ${errorMessage(e)}`);
|
|
138
|
+
return { before: 0, after: 0, ok: false };
|
|
139
|
+
}
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
const htmlResults = await pool(htmlTasks, CONCURRENCY);
|
|
143
|
+
for (const r of htmlResults) {
|
|
144
|
+
if (r.ok) {
|
|
145
|
+
htmlBefore += r.before;
|
|
146
|
+
htmlAfter += r.after;
|
|
147
|
+
} else {
|
|
148
|
+
htmlErrors++;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
totalBefore += htmlBefore;
|
|
152
|
+
totalAfter += htmlAfter;
|
|
153
|
+
errors += htmlErrors;
|
|
154
|
+
console.log(
|
|
155
|
+
` HTML: ${allHtml.length - htmlErrors} files minified ${fmt(htmlBefore, htmlAfter)}`,
|
|
156
|
+
);
|
|
157
|
+
|
|
158
|
+
// ─── JS ─────────────────────────────────────────────────────────────────────
|
|
159
|
+
|
|
160
|
+
const jsDir = resolve(repoRoot, 'js');
|
|
161
|
+
let jsFiles = [];
|
|
162
|
+
function collectJs(dir) {
|
|
163
|
+
try {
|
|
164
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
165
|
+
const full = join(dir, entry.name);
|
|
166
|
+
if (entry.isDirectory()) {
|
|
167
|
+
collectJs(full);
|
|
168
|
+
} else if (entry.name.endsWith('.js') && !entry.name.endsWith('.min.js')) {
|
|
169
|
+
// Skip *.min.js — vendor bundles are already minified upstream;
|
|
170
|
+
// re-minifying them is wasteful and risks stripping required license banners.
|
|
171
|
+
jsFiles.push(full);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
} catch {
|
|
175
|
+
// js/ may not exist in test environments
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
collectJs(jsDir);
|
|
179
|
+
|
|
180
|
+
let jsBefore = 0;
|
|
181
|
+
let jsAfter = 0;
|
|
182
|
+
let jsErrors = 0;
|
|
183
|
+
|
|
184
|
+
const jsTasks = jsFiles.map((p) => async () => {
|
|
185
|
+
try {
|
|
186
|
+
const src = readFileSync(p, 'utf8');
|
|
187
|
+
const before = Buffer.byteLength(src, 'utf8');
|
|
188
|
+
const result = await minifyJs(src, {
|
|
189
|
+
compress: true,
|
|
190
|
+
mangle: true,
|
|
191
|
+
// Preserve /*! ... */ license banners (e.g. Chart.js, D3, MIT headers).
|
|
192
|
+
// terser 'some' keeps comments that start with ! or contain @license / @preserve.
|
|
193
|
+
format: { comments: 'some' },
|
|
194
|
+
});
|
|
195
|
+
if (result.code) {
|
|
196
|
+
writeFileSync(p, result.code);
|
|
197
|
+
const after = Buffer.byteLength(result.code, 'utf8');
|
|
198
|
+
return { before, after, ok: true };
|
|
199
|
+
}
|
|
200
|
+
// Terser succeeded but produced no output — log and skip (file stays as-is)
|
|
201
|
+
console.warn(`⚠️ terser returned no code for ${p} — skipping`);
|
|
202
|
+
return { before, after: before, ok: true };
|
|
203
|
+
} catch (e) {
|
|
204
|
+
console.error(`❌ JS minify failed for ${p}: ${errorMessage(e)}`);
|
|
205
|
+
return { before: 0, after: 0, ok: false };
|
|
206
|
+
}
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
const jsResults = await pool(jsTasks, CONCURRENCY);
|
|
210
|
+
for (const r of jsResults) {
|
|
211
|
+
if (r.ok) {
|
|
212
|
+
jsBefore += r.before;
|
|
213
|
+
jsAfter += r.after;
|
|
214
|
+
} else {
|
|
215
|
+
jsErrors++;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
totalBefore += jsBefore;
|
|
219
|
+
totalAfter += jsAfter;
|
|
220
|
+
errors += jsErrors;
|
|
221
|
+
console.log(
|
|
222
|
+
` JS: ${jsFiles.length - jsErrors} files minified ${fmt(jsBefore, jsAfter)}`,
|
|
223
|
+
);
|
|
224
|
+
|
|
225
|
+
// ─── summary ────────────────────────────────────────────────────────────────
|
|
226
|
+
|
|
227
|
+
const savedTotal = totalBefore - totalAfter;
|
|
228
|
+
const pctTotal =
|
|
229
|
+
totalBefore > 0 ? ((savedTotal / totalBefore) * 100).toFixed(1) : '0.0';
|
|
230
|
+
console.log(
|
|
231
|
+
`✅ Minification complete: ${totalBefore} → ${totalAfter} B ` +
|
|
232
|
+
`(saved ${savedTotal} B / ${pctTotal}% across CSS + ${allHtml.length} HTML + ${jsFiles.length} JS)`,
|
|
233
|
+
);
|
|
234
|
+
|
|
235
|
+
if (errors > 0) {
|
|
236
|
+
console.error(`❌ ${errors} file(s) failed to minify — aborting deploy.`);
|
|
237
|
+
process.exit(1);
|
|
238
|
+
}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// SPDX-FileCopyrightText: 2024-2026 Hack23 AB
|
|
3
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
//
|
|
5
|
+
// Run PurgeCSS against the deployed CSS using the committed config in
|
|
6
|
+
// purgecss.config.cjs. Writes the purged result back to styles.css
|
|
7
|
+
// in place — keeps the existing filename so every HTML
|
|
8
|
+
// `<link rel="stylesheet" href="styles.css">` continues to resolve and
|
|
9
|
+
// no rewrite of generated pages is required.
|
|
10
|
+
//
|
|
11
|
+
// Invoked from `.github/workflows/deploy-s3.yml` after `npm run prebuild`
|
|
12
|
+
// (so all generated HTML pages exist) and before the
|
|
13
|
+
// `npm run minify-assets` step (which then minifies the now-smaller CSS).
|
|
14
|
+
//
|
|
15
|
+
// Logs before/after byte counts so the deploy log records the saving,
|
|
16
|
+
// and exits non-zero on failure so the deploy halts before publishing
|
|
17
|
+
// half-stripped CSS.
|
|
18
|
+
|
|
19
|
+
import { readFileSync, statSync, writeFileSync } from 'node:fs';
|
|
20
|
+
import { fileURLToPath } from 'node:url';
|
|
21
|
+
import { dirname, resolve } from 'node:path';
|
|
22
|
+
import { createRequire } from 'node:module';
|
|
23
|
+
import { PurgeCSS } from 'purgecss';
|
|
24
|
+
|
|
25
|
+
const require = createRequire(import.meta.url);
|
|
26
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
27
|
+
const repoRoot = resolve(__dirname, '..');
|
|
28
|
+
|
|
29
|
+
const configPath = resolve(repoRoot, 'purgecss.config.cjs');
|
|
30
|
+
const cssPath = resolve(repoRoot, 'styles.css');
|
|
31
|
+
const config = require(configPath);
|
|
32
|
+
|
|
33
|
+
const beforeBytes = statSync(cssPath).size;
|
|
34
|
+
// Capture the pre-purge CSS so we can restore it verbatim if the purge
|
|
35
|
+
// produces output that fails the downstream sanity floor — reading the
|
|
36
|
+
// file AFTER the in-place write would only recover the (broken) purged
|
|
37
|
+
// content.
|
|
38
|
+
const originalCss = readFileSync(cssPath, 'utf8');
|
|
39
|
+
|
|
40
|
+
const result = await new PurgeCSS().purge({
|
|
41
|
+
content: config.content,
|
|
42
|
+
css: config.css,
|
|
43
|
+
safelist: config.safelist,
|
|
44
|
+
variables: config.variables,
|
|
45
|
+
keyframes: config.keyframes,
|
|
46
|
+
fontFace: config.fontFace,
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
if (!result.length || typeof result[0].css !== 'string') {
|
|
50
|
+
console.error('❌ PurgeCSS returned no result for styles.css — aborting.');
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const purged = result[0].css;
|
|
55
|
+
writeFileSync(cssPath, purged);
|
|
56
|
+
|
|
57
|
+
const afterBytes = statSync(cssPath).size;
|
|
58
|
+
const savedBytes = beforeBytes - afterBytes;
|
|
59
|
+
const savedPct = ((savedBytes / beforeBytes) * 100).toFixed(1);
|
|
60
|
+
|
|
61
|
+
console.log(
|
|
62
|
+
`✅ styles.css purged: ${beforeBytes} → ${afterBytes} bytes ` +
|
|
63
|
+
`(saved ${savedBytes} B / ${savedPct}%)`,
|
|
64
|
+
);
|
|
65
|
+
|
|
66
|
+
// Sanity floor: if the purge produces a styles.css smaller than 32 KiB,
|
|
67
|
+
// something has likely scanned the wrong content set (eg. globs missed
|
|
68
|
+
// the news/ directory). Halt instead of shipping a near-empty stylesheet.
|
|
69
|
+
const FLOOR_BYTES = 32 * 1024;
|
|
70
|
+
if (afterBytes < FLOOR_BYTES) {
|
|
71
|
+
console.error(
|
|
72
|
+
`❌ styles.css after purge (${afterBytes} B) is below the ${FLOOR_BYTES} B sanity ` +
|
|
73
|
+
'floor — restoring the pre-purge CSS and refusing to ship a likely-broken stylesheet.',
|
|
74
|
+
);
|
|
75
|
+
// Restore from the in-memory capture taken BEFORE the purge so a re-run
|
|
76
|
+
// from cache cannot observe the broken file.
|
|
77
|
+
writeFileSync(cssPath, originalCss);
|
|
78
|
+
process.exit(1);
|
|
79
|
+
}
|