ultimate-jekyll-manager 0.0.93 → 0.0.95
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/migrate.js +331 -0
- package/dist/commands/minify-html.js +19 -0
- package/dist/defaults/dist/pages/test/libraries/error.html +29 -0
- package/dist/gulp/tasks/minifyHtml.js +254 -70
- package/dist/gulp/tasks/translation.js +5 -0
- package/dist/gulp/tasks/utils/BU/minifyHtml.js +183 -0
- package/dist/gulp/tasks/utils/BU/minifyHtml.worker.js +72 -0
- package/dist/gulp/tasks/webpack.js +9 -3
- package/firebase-debug.log +32 -0
- package/package.json +2 -1
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
// Libraries
|
|
2
|
+
const Manager = new (require('../build.js'));
|
|
3
|
+
const logger = Manager.logger('migrate');
|
|
4
|
+
const { execute } = require('node-powertools');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const jetpack = require('fs-jetpack');
|
|
7
|
+
|
|
8
|
+
// Load package
|
|
9
|
+
const package = Manager.getPackage('main');
|
|
10
|
+
|
|
11
|
+
module.exports = async function () {
|
|
12
|
+
// Log
|
|
13
|
+
logger.log(`Starting migration to Ultimate Jekyll v${package.version}...`);
|
|
14
|
+
logger.log(`Current working directory: ${process.cwd()}`);
|
|
15
|
+
|
|
16
|
+
try {
|
|
17
|
+
// Run migration tasks
|
|
18
|
+
await migratePosts();
|
|
19
|
+
await migrateAssets();
|
|
20
|
+
await fixPostsLayout();
|
|
21
|
+
await fixPostFilenames();
|
|
22
|
+
|
|
23
|
+
// Log completion
|
|
24
|
+
logger.log(logger.format.green('✓ Migration complete!'));
|
|
25
|
+
} catch (e) {
|
|
26
|
+
logger.error('Migration failed:', e);
|
|
27
|
+
throw e;
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
async function migratePosts() {
|
|
32
|
+
const sourcePath = path.join(process.cwd(), '_posts');
|
|
33
|
+
const targetPath = path.join(process.cwd(), 'src', '_posts');
|
|
34
|
+
|
|
35
|
+
// Check if _posts exists in root
|
|
36
|
+
const sourceExists = jetpack.exists(sourcePath);
|
|
37
|
+
|
|
38
|
+
if (!sourceExists) {
|
|
39
|
+
logger.log('No _posts directory found in root - skipping posts migration');
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Check if target already exists
|
|
44
|
+
const targetExists = jetpack.exists(targetPath);
|
|
45
|
+
|
|
46
|
+
if (targetExists) {
|
|
47
|
+
logger.warn(`Target directory ${targetPath} already exists!`);
|
|
48
|
+
logger.log('Checking for conflicts...');
|
|
49
|
+
|
|
50
|
+
// Get list of files in both directories
|
|
51
|
+
const sourceFiles = jetpack.list(sourcePath) || [];
|
|
52
|
+
const targetFiles = jetpack.list(targetPath) || [];
|
|
53
|
+
|
|
54
|
+
// Find conflicts
|
|
55
|
+
const conflicts = sourceFiles.filter((file) => targetFiles.includes(file));
|
|
56
|
+
|
|
57
|
+
if (conflicts.length > 0) {
|
|
58
|
+
logger.warn(`Found ${conflicts.length} conflicting file(s):`);
|
|
59
|
+
conflicts.forEach((file) => logger.warn(` - ${file}`));
|
|
60
|
+
throw new Error('Cannot migrate _posts: conflicts detected. Please resolve manually.');
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Log the migration
|
|
65
|
+
logger.log(`Migrating _posts from root to src/...`);
|
|
66
|
+
logger.log(` Source: ${sourcePath}`);
|
|
67
|
+
logger.log(` Target: ${targetPath}`);
|
|
68
|
+
|
|
69
|
+
// Ensure target directory exists
|
|
70
|
+
jetpack.dir(path.dirname(targetPath));
|
|
71
|
+
|
|
72
|
+
// Move the directory
|
|
73
|
+
jetpack.move(sourcePath, targetPath);
|
|
74
|
+
|
|
75
|
+
// Verify the move
|
|
76
|
+
const moveSuccessful = jetpack.exists(targetPath)
|
|
77
|
+
&& !jetpack.exists(sourcePath);
|
|
78
|
+
|
|
79
|
+
if (moveSuccessful) {
|
|
80
|
+
logger.log(logger.format.green('✓ Successfully migrated _posts to src/_posts'));
|
|
81
|
+
} else {
|
|
82
|
+
throw new Error('Failed to migrate _posts directory');
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
async function migrateAssets() {
|
|
87
|
+
const sourcePath = path.join(process.cwd(), 'assets');
|
|
88
|
+
const targetPath = path.join(process.cwd(), 'src', 'assets');
|
|
89
|
+
|
|
90
|
+
// Check if assets exists in root
|
|
91
|
+
const sourceExists = jetpack.exists(sourcePath);
|
|
92
|
+
|
|
93
|
+
if (!sourceExists) {
|
|
94
|
+
logger.log('No assets directory found in root - skipping assets migration');
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Check if target already exists
|
|
99
|
+
const targetExists = jetpack.exists(targetPath);
|
|
100
|
+
|
|
101
|
+
if (targetExists) {
|
|
102
|
+
logger.warn(`Target directory ${targetPath} already exists!`);
|
|
103
|
+
logger.log('Merging assets directories...');
|
|
104
|
+
|
|
105
|
+
// Get list of files in both directories (recursively)
|
|
106
|
+
const sourceFiles = jetpack.find(sourcePath, { matching: '**/*' }) || [];
|
|
107
|
+
const targetFiles = jetpack.find(targetPath, { matching: '**/*' }) || [];
|
|
108
|
+
|
|
109
|
+
// Convert to relative paths for comparison
|
|
110
|
+
const sourceRelative = sourceFiles.map((f) => path.relative(sourcePath, f));
|
|
111
|
+
const targetRelative = targetFiles.map((f) => path.relative(targetPath, f));
|
|
112
|
+
|
|
113
|
+
// Find conflicts
|
|
114
|
+
const conflicts = sourceRelative.filter((file) => targetRelative.includes(file));
|
|
115
|
+
|
|
116
|
+
if (conflicts.length > 0) {
|
|
117
|
+
logger.warn(`Found ${conflicts.length} conflicting file(s):`);
|
|
118
|
+
conflicts.slice(0, 10).forEach((file) => logger.warn(` - ${file}`));
|
|
119
|
+
if (conflicts.length > 10) {
|
|
120
|
+
logger.warn(` ... and ${conflicts.length - 10} more`);
|
|
121
|
+
}
|
|
122
|
+
throw new Error('Cannot migrate assets: conflicts detected. Please resolve manually.');
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Move all files from source to target
|
|
126
|
+
logger.log('Moving files...');
|
|
127
|
+
sourceRelative.forEach((file) => {
|
|
128
|
+
const src = path.join(sourcePath, file);
|
|
129
|
+
const dest = path.join(targetPath, file);
|
|
130
|
+
|
|
131
|
+
// Only move files, not directories
|
|
132
|
+
if (jetpack.exists(src) === 'file') {
|
|
133
|
+
jetpack.move(src, dest);
|
|
134
|
+
}
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
// Remove the old assets directory
|
|
138
|
+
jetpack.remove(sourcePath);
|
|
139
|
+
|
|
140
|
+
logger.log(logger.format.green('✓ Successfully merged assets into src/assets'));
|
|
141
|
+
} else {
|
|
142
|
+
// Log the migration
|
|
143
|
+
logger.log(`Migrating assets from root to src/...`);
|
|
144
|
+
logger.log(` Source: ${sourcePath}`);
|
|
145
|
+
logger.log(` Target: ${targetPath}`);
|
|
146
|
+
|
|
147
|
+
// Ensure target directory parent exists
|
|
148
|
+
jetpack.dir(path.dirname(targetPath));
|
|
149
|
+
|
|
150
|
+
// Move the directory
|
|
151
|
+
jetpack.move(sourcePath, targetPath);
|
|
152
|
+
|
|
153
|
+
// Verify the move
|
|
154
|
+
const moveSuccessful = jetpack.exists(targetPath)
|
|
155
|
+
&& !jetpack.exists(sourcePath);
|
|
156
|
+
|
|
157
|
+
if (moveSuccessful) {
|
|
158
|
+
logger.log(logger.format.green('✓ Successfully migrated assets to src/assets'));
|
|
159
|
+
} else {
|
|
160
|
+
throw new Error('Failed to migrate assets directory');
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
async function fixPostsLayout() {
|
|
166
|
+
const postsPath = path.join(process.cwd(), 'src', '_posts');
|
|
167
|
+
|
|
168
|
+
// Check if posts directory exists
|
|
169
|
+
const postsExists = jetpack.exists(postsPath);
|
|
170
|
+
|
|
171
|
+
if (!postsExists) {
|
|
172
|
+
logger.log('No src/_posts directory found - skipping layout fix');
|
|
173
|
+
return;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Get all post files
|
|
177
|
+
const postFiles = jetpack.find(postsPath, {
|
|
178
|
+
matching: ['*.md', '*.html', '*.markdown'],
|
|
179
|
+
}) || [];
|
|
180
|
+
|
|
181
|
+
if (postFiles.length === 0) {
|
|
182
|
+
logger.log('No post files found in src/_posts - skipping layout fix');
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Log
|
|
187
|
+
logger.log(`Fixing ${postFiles.length} post file(s)...`);
|
|
188
|
+
|
|
189
|
+
let updatedCount = 0;
|
|
190
|
+
|
|
191
|
+
// Process each post file
|
|
192
|
+
postFiles.forEach((file) => {
|
|
193
|
+
const content = jetpack.read(file);
|
|
194
|
+
|
|
195
|
+
if (!content) {
|
|
196
|
+
return;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// Match frontmatter
|
|
200
|
+
const frontmatterRegex = /^---\r?\n([\s\S]*?)\r?\n---/;
|
|
201
|
+
const match = content.match(frontmatterRegex);
|
|
202
|
+
|
|
203
|
+
if (!match) {
|
|
204
|
+
logger.warn(` Skipping ${path.basename(file)} - no frontmatter found`);
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
let frontmatter = match[1];
|
|
209
|
+
let restOfContent = content.slice(match[0].length);
|
|
210
|
+
let modified = false;
|
|
211
|
+
|
|
212
|
+
// 1. Fix layout if needed
|
|
213
|
+
if (!frontmatter.includes('layout: blueprint/blog/post')) {
|
|
214
|
+
frontmatter = frontmatter.replace(
|
|
215
|
+
/^layout:\s*.+$/m,
|
|
216
|
+
'layout: blueprint/blog/post'
|
|
217
|
+
);
|
|
218
|
+
modified = true;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// 2. Change excerpt to description
|
|
222
|
+
if (frontmatter.includes('excerpt:')) {
|
|
223
|
+
frontmatter = frontmatter.replace(
|
|
224
|
+
/^excerpt:/m,
|
|
225
|
+
'description:'
|
|
226
|
+
);
|
|
227
|
+
modified = true;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// 3. Remove affiliate-search-term line
|
|
231
|
+
if (frontmatter.includes('affiliate-search-term:')) {
|
|
232
|
+
frontmatter = frontmatter.replace(
|
|
233
|
+
/^affiliate-search-term:.*\r?\n/m,
|
|
234
|
+
''
|
|
235
|
+
);
|
|
236
|
+
modified = true;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// 4. Remove ad unit includes from content
|
|
240
|
+
const adUnitRegex = /{%\s*include\s+\/master\/modules\/adunits\/adsense-in-article\.html\s+index="[^"]*"\s*%}/g;
|
|
241
|
+
const cleanedContent = restOfContent.replace(adUnitRegex, '');
|
|
242
|
+
|
|
243
|
+
if (cleanedContent !== restOfContent) {
|
|
244
|
+
restOfContent = cleanedContent;
|
|
245
|
+
modified = true;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Only write if modifications were made
|
|
249
|
+
if (!modified) {
|
|
250
|
+
return;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// Write back to file
|
|
254
|
+
const updatedContent = `---\n${frontmatter}\n---${restOfContent}`;
|
|
255
|
+
jetpack.write(file, updatedContent);
|
|
256
|
+
|
|
257
|
+
updatedCount++;
|
|
258
|
+
logger.log(` ✓ Updated ${path.basename(file)}`);
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
if (updatedCount > 0) {
|
|
262
|
+
logger.log(logger.format.green(`✓ Fixed ${updatedCount} post file(s)`));
|
|
263
|
+
} else {
|
|
264
|
+
logger.log('All posts are already up to date');
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
async function fixPostFilenames() {
|
|
269
|
+
const postsPath = path.join(process.cwd(), 'src', '_posts');
|
|
270
|
+
|
|
271
|
+
// Check if posts directory exists
|
|
272
|
+
const postsExists = jetpack.exists(postsPath);
|
|
273
|
+
|
|
274
|
+
if (!postsExists) {
|
|
275
|
+
logger.log('No src/_posts directory found - skipping filename fix');
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// Get all post files
|
|
280
|
+
const postFiles = jetpack.find(postsPath, {
|
|
281
|
+
matching: ['*.md', '*.html', '*.markdown'],
|
|
282
|
+
}) || [];
|
|
283
|
+
|
|
284
|
+
if (postFiles.length === 0) {
|
|
285
|
+
logger.log('No post files found in src/_posts - skipping filename fix');
|
|
286
|
+
return;
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
// Log
|
|
290
|
+
logger.log(`Checking post filenames for trailing and leading dashes...`);
|
|
291
|
+
|
|
292
|
+
let renamedCount = 0;
|
|
293
|
+
|
|
294
|
+
// Process each post file
|
|
295
|
+
postFiles.forEach((file) => {
|
|
296
|
+
const dir = path.dirname(file);
|
|
297
|
+
const basename = path.basename(file);
|
|
298
|
+
const ext = path.extname(basename);
|
|
299
|
+
const nameWithoutExt = basename.slice(0, -ext.length);
|
|
300
|
+
|
|
301
|
+
// Remove leading and trailing dashes only
|
|
302
|
+
const cleanedName = nameWithoutExt.replace(/^-+|-+$/g, '');
|
|
303
|
+
|
|
304
|
+
// Check if name changed
|
|
305
|
+
if (cleanedName === nameWithoutExt) {
|
|
306
|
+
return;
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
// Build new filename
|
|
310
|
+
const newFilename = `${cleanedName}${ext}`;
|
|
311
|
+
const newPath = path.join(dir, newFilename);
|
|
312
|
+
|
|
313
|
+
// Check if target already exists
|
|
314
|
+
if (jetpack.exists(newPath)) {
|
|
315
|
+
logger.warn(` Cannot rename ${basename} - ${newFilename} already exists`);
|
|
316
|
+
return;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// Rename the file
|
|
320
|
+
jetpack.move(file, newPath);
|
|
321
|
+
|
|
322
|
+
renamedCount++;
|
|
323
|
+
logger.log(` ✓ Renamed ${basename} → ${newFilename}`);
|
|
324
|
+
});
|
|
325
|
+
|
|
326
|
+
if (renamedCount > 0) {
|
|
327
|
+
logger.log(logger.format.green(`✓ Renamed ${renamedCount} post file(s)`));
|
|
328
|
+
} else {
|
|
329
|
+
logger.log('All post filenames are already clean');
|
|
330
|
+
}
|
|
331
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
// Libraries
|
|
2
|
+
const Manager = new (require('../build.js'));
|
|
3
|
+
const logger = Manager.logger('minify');
|
|
4
|
+
const { execute } = require('node-powertools');
|
|
5
|
+
|
|
6
|
+
// Load package
|
|
7
|
+
const package = Manager.getPackage('main');
|
|
8
|
+
const project = Manager.getPackage('project');
|
|
9
|
+
|
|
10
|
+
module.exports = async function (options) {
|
|
11
|
+
// Log
|
|
12
|
+
logger.log(`Starting minify...`);
|
|
13
|
+
|
|
14
|
+
// Build environment variables with all options
|
|
15
|
+
const envVars = `UJ_MINIFY_HTML_FORCE=true`;
|
|
16
|
+
|
|
17
|
+
// Run the full build process with minify force enabled
|
|
18
|
+
await execute(`${envVars} bundle exec npm run gulp -- minifyHtml`, { log: true })
|
|
19
|
+
};
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
---
|
|
2
|
+
### ALL PAGES ###
|
|
3
|
+
layout: themes/[ site.theme.id ]/frontend/core/base
|
|
4
|
+
permalink: /test/libraries/error
|
|
5
|
+
|
|
6
|
+
### REGULAR PAGES ###
|
|
7
|
+
meta:
|
|
8
|
+
title: "Error Test Page"
|
|
9
|
+
description: "Test page for demonstrating error handling"
|
|
10
|
+
breadcrumb: "Error Test"
|
|
11
|
+
---
|
|
12
|
+
|
|
13
|
+
<div class="container py-5">
|
|
14
|
+
<div class="row">
|
|
15
|
+
<div class="col-lg-8 mx-auto">
|
|
16
|
+
<h1 class="h2 mb-4">Error Test Page</h1>
|
|
17
|
+
<p class="lead mb-5">This page demonstrates different error handling scenarios.</p>
|
|
18
|
+
|
|
19
|
+
<!-- Trigger Auth Signin Error -->
|
|
20
|
+
<button id="trigger-error" class="btn btn-danger">Trigger Auth Signin Error</button>
|
|
21
|
+
<script>
|
|
22
|
+
document.getElementById('trigger-error').addEventListener('click', function() {
|
|
23
|
+
Manager.webManager.auth().signInWithEmailAndPassword('invalid@example.com', 'wrongpassword')
|
|
24
|
+
});
|
|
25
|
+
</script>
|
|
26
|
+
|
|
27
|
+
</div>
|
|
28
|
+
</div>
|
|
29
|
+
</div>
|
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
const Manager = new (require('../../build.js'));
|
|
3
3
|
const logger = Manager.logger('minifyHtml');
|
|
4
4
|
const { src, dest, series } = require('gulp');
|
|
5
|
-
const { minify } = require('html
|
|
5
|
+
const { minify: minifyRust } = require('@minify-html/node');
|
|
6
|
+
const { minify: minifyJs } = require('terser');
|
|
6
7
|
const through2 = require('through2');
|
|
7
8
|
|
|
8
9
|
// Load package
|
|
@@ -19,6 +20,33 @@ const input = [
|
|
|
19
20
|
];
|
|
20
21
|
const output = '_site';
|
|
21
22
|
|
|
23
|
+
// Helper function to minify a single file's content using Rust-based minifier
|
|
24
|
+
async function minifyFileContent(htmlContent, options, filePath) {
|
|
25
|
+
// Extract and temporarily replace JSON-LD scripts
|
|
26
|
+
const { content: contentAfterJsonLd, extracted: jsonLdScripts } = extractJsonLdScripts(htmlContent);
|
|
27
|
+
|
|
28
|
+
// Extract and temporarily replace inline scripts (minified with Terser)
|
|
29
|
+
const { content: contentAfterScripts, extracted: inlineScripts } = await extractInlineScripts(contentAfterJsonLd, filePath);
|
|
30
|
+
|
|
31
|
+
// Extract and temporarily replace IE conditional comments
|
|
32
|
+
const { content: contentAfterComments, extracted: conditionalComments } = extractConditionalComments(contentAfterScripts);
|
|
33
|
+
|
|
34
|
+
// Minify the HTML content using Rust-based minifier (synchronous, much faster)
|
|
35
|
+
const minifiedBuffer = minifyRust(Buffer.from(contentAfterComments), options);
|
|
36
|
+
const minified = minifiedBuffer.toString();
|
|
37
|
+
|
|
38
|
+
// Restore the conditional comments
|
|
39
|
+
let finalHtml = restoreConditionalComments(minified, conditionalComments);
|
|
40
|
+
|
|
41
|
+
// Restore the inline scripts
|
|
42
|
+
finalHtml = restoreInlineScripts(finalHtml, inlineScripts);
|
|
43
|
+
|
|
44
|
+
// Restore the JSON-LD scripts
|
|
45
|
+
finalHtml = restoreJsonLdScripts(finalHtml, jsonLdScripts);
|
|
46
|
+
|
|
47
|
+
return finalHtml;
|
|
48
|
+
}
|
|
49
|
+
|
|
22
50
|
// Main task
|
|
23
51
|
function minifyHtmlTask(complete) {
|
|
24
52
|
// Check if we should minify
|
|
@@ -33,92 +61,248 @@ function minifyHtmlTask(complete) {
|
|
|
33
61
|
logger.log('Starting...');
|
|
34
62
|
Manager.logMemory(logger, 'Start');
|
|
35
63
|
|
|
36
|
-
// Configure minify options
|
|
64
|
+
// Configure minify options for @minify-html/node (Rust-based)
|
|
65
|
+
// NOTE: Inline scripts are extracted before minification to avoid bugs in minify-js
|
|
37
66
|
const options = {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
minifyCSS: true,
|
|
48
|
-
minifyJS: true
|
|
67
|
+
keep_closing_tags: false,
|
|
68
|
+
keep_comments: false,
|
|
69
|
+
keep_html_and_head_opening_tags: false,
|
|
70
|
+
keep_spaces_between_attributes: false,
|
|
71
|
+
keep_ssi_comments: false,
|
|
72
|
+
minify_css: true,
|
|
73
|
+
minify_js: false, // Disabled - inline scripts are extracted, so nothing to minify
|
|
74
|
+
remove_bangs: false,
|
|
75
|
+
remove_processing_instructions: false
|
|
49
76
|
};
|
|
50
77
|
|
|
78
|
+
// Get concurrency limit from environment or use default
|
|
79
|
+
const CONCURRENCY_LIMIT = parseInt(process.env.UJ_MINIFY_CONCURRENCY || '1', 10);
|
|
80
|
+
logger.log(`Concurrency: ${CONCURRENCY_LIMIT} files at a time`);
|
|
81
|
+
|
|
82
|
+
// Collect files for batch processing
|
|
83
|
+
const fileQueue = [];
|
|
84
|
+
const processed = { count: 0 };
|
|
85
|
+
|
|
51
86
|
// Process HTML files
|
|
52
87
|
return src(input)
|
|
53
|
-
.pipe(through2.obj(
|
|
88
|
+
.pipe(through2.obj(function(file, _enc, callback) {
|
|
54
89
|
if (file.isBuffer()) {
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
const
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
.
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
conditionalComments.forEach((commentContent, index) => {
|
|
103
|
-
finalHtml = finalHtml.replace(`__CONDITIONAL_COMMENT_PLACEHOLDER_${index}__`, commentContent);
|
|
104
|
-
});
|
|
105
|
-
|
|
106
|
-
file.contents = Buffer.from(finalHtml);
|
|
107
|
-
} catch (err) {
|
|
108
|
-
logger.error(`Error minifying ${file.path}: ${err.message}`);
|
|
90
|
+
fileQueue.push({ file });
|
|
91
|
+
callback();
|
|
92
|
+
} else {
|
|
93
|
+
callback(null, file);
|
|
94
|
+
}
|
|
95
|
+
}, async function(callback) {
|
|
96
|
+
// This function is called when all files have been queued
|
|
97
|
+
if (fileQueue.length === 0) {
|
|
98
|
+
logger.log('No HTML files to minify');
|
|
99
|
+
return callback();
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
const totalFiles = fileQueue.length;
|
|
103
|
+
logger.log(`Minifying ${totalFiles} HTML files...`);
|
|
104
|
+
|
|
105
|
+
try {
|
|
106
|
+
// Process files in batches
|
|
107
|
+
for (let i = 0; i < fileQueue.length; i += CONCURRENCY_LIMIT) {
|
|
108
|
+
const batch = fileQueue.slice(i, i + CONCURRENCY_LIMIT);
|
|
109
|
+
|
|
110
|
+
// Process batch in parallel
|
|
111
|
+
const processedFiles = await Promise.all(
|
|
112
|
+
batch.map(async ({ file }) => {
|
|
113
|
+
try {
|
|
114
|
+
const htmlContent = file.contents.toString();
|
|
115
|
+
const finalHtml = await minifyFileContent(htmlContent, options, file.path);
|
|
116
|
+
file.contents = Buffer.from(finalHtml);
|
|
117
|
+
processed.count++;
|
|
118
|
+
|
|
119
|
+
// Log progress every 50 files or on last file
|
|
120
|
+
if (processed.count % 50 === 0 || processed.count === totalFiles) {
|
|
121
|
+
const percentage = ((processed.count / totalFiles) * 100).toFixed(1);
|
|
122
|
+
logger.log(`Progress: ${processed.count}/${totalFiles} files (${percentage}%)`);
|
|
123
|
+
Manager.logMemory(logger, `After ${processed.count} files`);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return file;
|
|
127
|
+
} catch (err) {
|
|
128
|
+
logger.error(`Error minifying ${file.path}: ${err.message}`);
|
|
129
|
+
return file;
|
|
130
|
+
}
|
|
131
|
+
})
|
|
132
|
+
);
|
|
133
|
+
|
|
134
|
+
// Push processed files to the stream
|
|
135
|
+
processedFiles.forEach(file => this.push(file));
|
|
109
136
|
}
|
|
137
|
+
|
|
138
|
+
callback();
|
|
139
|
+
} catch (err) {
|
|
140
|
+
logger.error(`Batch processing error: ${err.message}`);
|
|
141
|
+
callback(err);
|
|
110
142
|
}
|
|
111
|
-
callback(null, file);
|
|
112
143
|
}))
|
|
113
144
|
.pipe(dest(output))
|
|
114
145
|
.on('finish', () => {
|
|
115
146
|
// Log
|
|
116
147
|
logger.log('Finished!');
|
|
148
|
+
Manager.logMemory(logger, 'End');
|
|
117
149
|
|
|
118
150
|
// Complete
|
|
119
|
-
|
|
151
|
+
complete();
|
|
120
152
|
});
|
|
121
153
|
}
|
|
122
154
|
|
|
155
|
+
// Helper: Extract JSON-LD scripts and replace with placeholders
|
|
156
|
+
function extractJsonLdScripts(htmlContent) {
|
|
157
|
+
const extracted = [];
|
|
158
|
+
// Match both quoted and unquoted type attributes (minifier removes quotes)
|
|
159
|
+
const jsonLdRegex = /<script[^>]*type=(?:["']?application\/ld\+json["']?)[^>]*>([\s\S]*?)<\/script>/gi;
|
|
160
|
+
|
|
161
|
+
const content = htmlContent.replace(jsonLdRegex, (match, jsonContent) => {
|
|
162
|
+
// Minify the JSON content
|
|
163
|
+
try {
|
|
164
|
+
const parsed = JSON.parse(jsonContent);
|
|
165
|
+
const minifiedJson = JSON.stringify(parsed);
|
|
166
|
+
extracted.push(minifiedJson);
|
|
167
|
+
} catch (e) {
|
|
168
|
+
extracted.push(jsonContent);
|
|
169
|
+
}
|
|
170
|
+
return `__JSON_LD_PLACEHOLDER_${extracted.length - 1}__`;
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
return { content, extracted };
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Helper: Restore JSON-LD scripts from placeholders
|
|
177
|
+
function restoreJsonLdScripts(htmlContent, jsonLdScripts) {
|
|
178
|
+
let content = htmlContent;
|
|
179
|
+
|
|
180
|
+
jsonLdScripts.forEach((jsonContent, index) => {
|
|
181
|
+
const scriptTag = `<script type=application/ld+json>${jsonContent}</script>`;
|
|
182
|
+
content = content.replace(`__JSON_LD_PLACEHOLDER_${index}__`, scriptTag);
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
return content;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// Helper: Extract inline scripts, minify with Terser, and replace with placeholders
|
|
189
|
+
async function extractInlineScripts(htmlContent, filePath) {
|
|
190
|
+
const extracted = [];
|
|
191
|
+
const scripts = [];
|
|
192
|
+
|
|
193
|
+
// Match <script> tags that are NOT application/ld+json (those are already extracted)
|
|
194
|
+
// This regex excludes external scripts (those with src attribute)
|
|
195
|
+
// Handles both quoted and unquoted type attributes (minifier removes quotes)
|
|
196
|
+
const scriptRegex = /<script(?![^>]*type=(?:["']?application\/ld\+json["']?))(?![^>]*src=)([^>]*)>([\s\S]*?)<\/script>/gi;
|
|
197
|
+
|
|
198
|
+
// First pass: collect all scripts and create placeholders
|
|
199
|
+
const content = htmlContent.replace(scriptRegex, (fullMatch, attributes, jsCode) => {
|
|
200
|
+
const index = scripts.length;
|
|
201
|
+
scripts.push({ fullMatch, attributes, jsCode });
|
|
202
|
+
return `__INLINE_SCRIPT_PLACEHOLDER_${index}__`;
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
// Second pass: minify all scripts in parallel
|
|
206
|
+
const minifyPromises = scripts.map(async ({ fullMatch, attributes, jsCode }, scriptIndex) => {
|
|
207
|
+
// Skip empty scripts
|
|
208
|
+
if (!jsCode.trim()) {
|
|
209
|
+
return fullMatch;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// Try to minify the JavaScript with Terser
|
|
213
|
+
try {
|
|
214
|
+
const minified = await minifyJs(jsCode, {
|
|
215
|
+
compress: {
|
|
216
|
+
dead_code: true,
|
|
217
|
+
drop_console: false,
|
|
218
|
+
drop_debugger: true,
|
|
219
|
+
keep_classnames: false,
|
|
220
|
+
keep_fargs: true,
|
|
221
|
+
keep_fnames: false,
|
|
222
|
+
keep_infinity: false,
|
|
223
|
+
},
|
|
224
|
+
mangle: false, // Don't mangle variable names to avoid breaking code
|
|
225
|
+
format: {
|
|
226
|
+
comments: false,
|
|
227
|
+
},
|
|
228
|
+
});
|
|
229
|
+
|
|
230
|
+
if (minified && minified.code) {
|
|
231
|
+
return `<script${attributes}>${minified.code}</script>`;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
return fullMatch;
|
|
235
|
+
} catch (err) {
|
|
236
|
+
// Minification failed - use original and log detailed error
|
|
237
|
+
const preview = jsCode.length > 100 ? jsCode.substring(0, 100) + '...' : jsCode;
|
|
238
|
+
const lines = jsCode.split('\n');
|
|
239
|
+
|
|
240
|
+
logger.error(`Failed to minify inline script in ${filePath}`);
|
|
241
|
+
logger.error(` Script #${scriptIndex + 1} (${lines.length} lines)`);
|
|
242
|
+
logger.error(` Error: ${err.message}`);
|
|
243
|
+
|
|
244
|
+
if (err.line !== undefined) {
|
|
245
|
+
logger.error(` Line ${err.line}, Column ${err.col || '?'}`);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
logger.error(` Preview: ${preview.replace(/\n/g, ' ')}`);
|
|
249
|
+
|
|
250
|
+
return fullMatch;
|
|
251
|
+
}
|
|
252
|
+
});
|
|
253
|
+
|
|
254
|
+
// Wait for all minification to complete
|
|
255
|
+
const minifiedScripts = await Promise.all(minifyPromises);
|
|
256
|
+
|
|
257
|
+
// Add all minified scripts to extracted array
|
|
258
|
+
minifiedScripts.forEach(script => extracted.push(script));
|
|
259
|
+
|
|
260
|
+
return { content, extracted };
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// Helper: Restore inline scripts from placeholders
|
|
264
|
+
function restoreInlineScripts(htmlContent, inlineScripts) {
|
|
265
|
+
let content = htmlContent;
|
|
266
|
+
|
|
267
|
+
inlineScripts.forEach((scriptContent, index) => {
|
|
268
|
+
content = content.replace(`__INLINE_SCRIPT_PLACEHOLDER_${index}__`, scriptContent);
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
return content;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// Helper: Extract IE conditional comments and replace with placeholders
|
|
275
|
+
function extractConditionalComments(htmlContent) {
|
|
276
|
+
const extracted = [];
|
|
277
|
+
const conditionalRegex = /<!--\[if[^>]*\]>([\s\S]*?)<!\[endif\]-->/gi;
|
|
278
|
+
|
|
279
|
+
const content = htmlContent.replace(conditionalRegex, (match, commentContent) => {
|
|
280
|
+
// Minify the content inside the conditional comment
|
|
281
|
+
try {
|
|
282
|
+
const minifiedContent = commentContent
|
|
283
|
+
.replace(/\s+/g, ' ')
|
|
284
|
+
.replace(/>\s+</g, '><')
|
|
285
|
+
.trim();
|
|
286
|
+
extracted.push(match.replace(commentContent, minifiedContent));
|
|
287
|
+
} catch (e) {
|
|
288
|
+
extracted.push(match);
|
|
289
|
+
}
|
|
290
|
+
return `__CONDITIONAL_COMMENT_PLACEHOLDER_${extracted.length - 1}__`;
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
return { content, extracted };
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// Helper: Restore IE conditional comments from placeholders
|
|
297
|
+
function restoreConditionalComments(htmlContent, conditionalComments) {
|
|
298
|
+
let content = htmlContent;
|
|
299
|
+
|
|
300
|
+
conditionalComments.forEach((commentContent, index) => {
|
|
301
|
+
content = content.replace(`__CONDITIONAL_COMMENT_PLACEHOLDER_${index}__`, commentContent);
|
|
302
|
+
});
|
|
303
|
+
|
|
304
|
+
return content;
|
|
305
|
+
}
|
|
306
|
+
|
|
123
307
|
// Default Task (no watcher for minifyHtml as it runs after Jekyll build)
|
|
124
308
|
module.exports = minifyHtmlTask;
|
|
@@ -82,6 +82,11 @@ let index = -1;
|
|
|
82
82
|
const input = [
|
|
83
83
|
// Files to include
|
|
84
84
|
'_site/**/*.html',
|
|
85
|
+
|
|
86
|
+
// Files to exclude
|
|
87
|
+
// Test pages (except translation.html)
|
|
88
|
+
'!_site/**/test/**',
|
|
89
|
+
'_site/test/translation.html',
|
|
85
90
|
];
|
|
86
91
|
const output = '';
|
|
87
92
|
const delay = 250;
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
// Libraries
|
|
2
|
+
const Manager = new (require('../../build.js'));
|
|
3
|
+
const logger = Manager.logger('minifyHtml');
|
|
4
|
+
const { src, dest, series } = require('gulp');
|
|
5
|
+
const { minify } = require('html-minifier-terser');
|
|
6
|
+
const through2 = require('through2');
|
|
7
|
+
|
|
8
|
+
// Load package
|
|
9
|
+
const package = Manager.getPackage('main');
|
|
10
|
+
const project = Manager.getPackage('project');
|
|
11
|
+
const config = Manager.getConfig('project');
|
|
12
|
+
const rootPathPackage = Manager.getRootPath('main');
|
|
13
|
+
const rootPathProject = Manager.getRootPath('project');
|
|
14
|
+
|
|
15
|
+
// Glob
|
|
16
|
+
const input = [
|
|
17
|
+
// Files to include
|
|
18
|
+
'_site/**/*.html',
|
|
19
|
+
];
|
|
20
|
+
const output = '_site';
|
|
21
|
+
|
|
22
|
+
// Helper function to minify a single file's content
|
|
23
|
+
async function minifyFileContent(htmlContent, options) {
|
|
24
|
+
// Extract and temporarily replace JSON-LD scripts
|
|
25
|
+
const jsonLdScripts = [];
|
|
26
|
+
const jsonLdRegex = /<script[^>]*type=["']application\/ld\+json["'][^>]*>([\s\S]*?)<\/script>/gi;
|
|
27
|
+
|
|
28
|
+
htmlContent = htmlContent.replace(jsonLdRegex, (match, jsonContent) => {
|
|
29
|
+
// Minify the JSON content
|
|
30
|
+
try {
|
|
31
|
+
const parsed = JSON.parse(jsonContent);
|
|
32
|
+
const minifiedJson = JSON.stringify(parsed);
|
|
33
|
+
jsonLdScripts.push(minifiedJson);
|
|
34
|
+
} catch (e) {
|
|
35
|
+
jsonLdScripts.push(jsonContent);
|
|
36
|
+
}
|
|
37
|
+
return `__JSON_LD_PLACEHOLDER_${jsonLdScripts.length - 1}__`;
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
// Extract and temporarily replace IE conditional comments
|
|
41
|
+
const conditionalComments = [];
|
|
42
|
+
const conditionalRegex = /<!--\[if[^>]*\]>([\s\S]*?)<!\[endif\]-->/gi;
|
|
43
|
+
|
|
44
|
+
htmlContent = htmlContent.replace(conditionalRegex, (match, content) => {
|
|
45
|
+
// Minify the content inside the conditional comment
|
|
46
|
+
try {
|
|
47
|
+
const minifiedContent = content
|
|
48
|
+
.replace(/\s+/g, ' ')
|
|
49
|
+
.replace(/>\s+</g, '><')
|
|
50
|
+
.trim();
|
|
51
|
+
conditionalComments.push(match.replace(content, minifiedContent));
|
|
52
|
+
} catch (e) {
|
|
53
|
+
conditionalComments.push(match);
|
|
54
|
+
}
|
|
55
|
+
return `__CONDITIONAL_COMMENT_PLACEHOLDER_${conditionalComments.length - 1}__`;
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
// Minify the HTML content
|
|
59
|
+
const minified = await minify(htmlContent, options);
|
|
60
|
+
|
|
61
|
+
// Restore the JSON-LD scripts and conditional comments
|
|
62
|
+
let finalHtml = minified;
|
|
63
|
+
jsonLdScripts.forEach((jsonContent, index) => {
|
|
64
|
+
const scriptTag = `<script type=application/ld+json>${jsonContent}</script>`;
|
|
65
|
+
finalHtml = finalHtml.replace(`__JSON_LD_PLACEHOLDER_${index}__`, scriptTag);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
conditionalComments.forEach((commentContent, index) => {
|
|
69
|
+
finalHtml = finalHtml.replace(`__CONDITIONAL_COMMENT_PLACEHOLDER_${index}__`, commentContent);
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
return finalHtml;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Helper function to process files in batches
|
|
76
|
+
async function processBatch(batch, options, processed, total) {
|
|
77
|
+
return Promise.all(batch.map(async ({ file }) => {
|
|
78
|
+
try {
|
|
79
|
+
const htmlContent = file.contents.toString();
|
|
80
|
+
const finalHtml = await minifyFileContent(htmlContent, options);
|
|
81
|
+
file.contents = Buffer.from(finalHtml);
|
|
82
|
+
processed.count++;
|
|
83
|
+
|
|
84
|
+
// Log progress every 10 files or on last file
|
|
85
|
+
if (processed.count % 10 === 0 || processed.count === total) {
|
|
86
|
+
const percentage = ((processed.count / total) * 100).toFixed(1);
|
|
87
|
+
logger.log(`Progress: ${processed.count}/${total} files (${percentage}%)`);
|
|
88
|
+
Manager.logMemory(logger, `After ${processed.count} files`);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return file;
|
|
92
|
+
} catch (err) {
|
|
93
|
+
logger.error(`Error minifying ${file.path}: ${err.message}`);
|
|
94
|
+
return file;
|
|
95
|
+
}
|
|
96
|
+
}));
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// Main task
|
|
100
|
+
function minifyHtmlTask(complete) {
|
|
101
|
+
// Check if we should minify
|
|
102
|
+
const shouldMinify = Manager.isBuildMode() || process.env.UJ_MINIFY_HTML_FORCE === 'true';
|
|
103
|
+
|
|
104
|
+
if (!shouldMinify) {
|
|
105
|
+
logger.log('Skipping HTML minification (not in production mode and UJ_MINIFY_HTML_FORCE not set)');
|
|
106
|
+
return complete();
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// Get concurrency limit from environment or use default
|
|
110
|
+
const CONCURRENCY_LIMIT = parseInt(process.env.UJ_MINIFY_CONCURRENCY || '10', 10);
|
|
111
|
+
|
|
112
|
+
// Log
|
|
113
|
+
logger.log('Starting...');
|
|
114
|
+
logger.log(`Concurrency limit: ${CONCURRENCY_LIMIT} files at a time`);
|
|
115
|
+
Manager.logMemory(logger, 'Start');
|
|
116
|
+
|
|
117
|
+
// Configure minify options
|
|
118
|
+
const options = {
|
|
119
|
+
collapseWhitespace: true,
|
|
120
|
+
removeComments: true,
|
|
121
|
+
removeAttributeQuotes: true,
|
|
122
|
+
removeRedundantAttributes: true,
|
|
123
|
+
removeScriptTypeAttributes: true,
|
|
124
|
+
removeStyleLinkTypeAttributes: true,
|
|
125
|
+
useShortDoctype: true,
|
|
126
|
+
removeEmptyAttributes: true,
|
|
127
|
+
removeOptionalTags: false,
|
|
128
|
+
minifyCSS: true,
|
|
129
|
+
minifyJS: true
|
|
130
|
+
};
|
|
131
|
+
|
|
132
|
+
// Collect files for batch processing
|
|
133
|
+
const fileQueue = [];
|
|
134
|
+
const processed = { count: 0 };
|
|
135
|
+
|
|
136
|
+
// Process HTML files
|
|
137
|
+
return src(input)
|
|
138
|
+
.pipe(through2.obj(function(file, _enc, callback) {
|
|
139
|
+
if (file.isBuffer()) {
|
|
140
|
+
fileQueue.push({ file });
|
|
141
|
+
callback();
|
|
142
|
+
} else {
|
|
143
|
+
callback(null, file);
|
|
144
|
+
}
|
|
145
|
+
}, async function(callback) {
|
|
146
|
+
// This function is called when all files have been queued
|
|
147
|
+
if (fileQueue.length === 0) {
|
|
148
|
+
logger.log('No HTML files to minify');
|
|
149
|
+
return callback();
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
const totalFiles = fileQueue.length;
|
|
153
|
+
logger.log(`Minifying ${totalFiles} HTML files...`);
|
|
154
|
+
|
|
155
|
+
try {
|
|
156
|
+
// Process files in batches
|
|
157
|
+
for (let i = 0; i < fileQueue.length; i += CONCURRENCY_LIMIT) {
|
|
158
|
+
const batch = fileQueue.slice(i, i + CONCURRENCY_LIMIT);
|
|
159
|
+
const processedFiles = await processBatch(batch, options, processed, totalFiles);
|
|
160
|
+
|
|
161
|
+
// Push processed files to the stream
|
|
162
|
+
processedFiles.forEach(file => this.push(file));
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
callback();
|
|
166
|
+
} catch (err) {
|
|
167
|
+
logger.error(`Batch processing error: ${err.message}`);
|
|
168
|
+
callback(err);
|
|
169
|
+
}
|
|
170
|
+
}))
|
|
171
|
+
.pipe(dest(output))
|
|
172
|
+
.on('finish', () => {
|
|
173
|
+
// Log
|
|
174
|
+
logger.log('Finished!');
|
|
175
|
+
Manager.logMemory(logger, 'End');
|
|
176
|
+
|
|
177
|
+
// Complete
|
|
178
|
+
complete();
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Default Task (no watcher for minifyHtml as it runs after Jekyll build)
|
|
183
|
+
module.exports = minifyHtmlTask;
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
// Worker thread for HTML minification
|
|
2
|
+
const { parentPort, workerData } = require('worker_threads');
|
|
3
|
+
const { minify } = require('html-minifier-terser');
|
|
4
|
+
|
|
5
|
+
// Listen for messages from parent
|
|
6
|
+
parentPort.on('message', async (data) => {
|
|
7
|
+
const { htmlContent, options, index } = data;
|
|
8
|
+
|
|
9
|
+
try {
|
|
10
|
+
// Extract and temporarily replace JSON-LD scripts
|
|
11
|
+
const jsonLdScripts = [];
|
|
12
|
+
const jsonLdRegex = /<script[^>]*type=["']application\/ld\+json["'][^>]*>([\s\S]*?)<\/script>/gi;
|
|
13
|
+
|
|
14
|
+
let processedContent = htmlContent.replace(jsonLdRegex, (match, jsonContent) => {
|
|
15
|
+
// Minify the JSON content
|
|
16
|
+
try {
|
|
17
|
+
const parsed = JSON.parse(jsonContent);
|
|
18
|
+
const minifiedJson = JSON.stringify(parsed);
|
|
19
|
+
jsonLdScripts.push(minifiedJson);
|
|
20
|
+
} catch (e) {
|
|
21
|
+
jsonLdScripts.push(jsonContent);
|
|
22
|
+
}
|
|
23
|
+
return `__JSON_LD_PLACEHOLDER_${jsonLdScripts.length - 1}__`;
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
// Extract and temporarily replace IE conditional comments
|
|
27
|
+
const conditionalComments = [];
|
|
28
|
+
const conditionalRegex = /<!--\[if[^>]*\]>([\s\S]*?)<!\[endif\]-->/gi;
|
|
29
|
+
|
|
30
|
+
processedContent = processedContent.replace(conditionalRegex, (match, content) => {
|
|
31
|
+
// Minify the content inside the conditional comment
|
|
32
|
+
try {
|
|
33
|
+
const minifiedContent = content
|
|
34
|
+
.replace(/\s+/g, ' ')
|
|
35
|
+
.replace(/>\s+</g, '><')
|
|
36
|
+
.trim();
|
|
37
|
+
conditionalComments.push(match.replace(content, minifiedContent));
|
|
38
|
+
} catch (e) {
|
|
39
|
+
conditionalComments.push(match);
|
|
40
|
+
}
|
|
41
|
+
return `__CONDITIONAL_COMMENT_PLACEHOLDER_${conditionalComments.length - 1}__`;
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
// Minify the HTML content
|
|
45
|
+
const minified = await minify(processedContent, options);
|
|
46
|
+
|
|
47
|
+
// Restore the JSON-LD scripts and conditional comments
|
|
48
|
+
let finalHtml = minified;
|
|
49
|
+
jsonLdScripts.forEach((jsonContent, idx) => {
|
|
50
|
+
const scriptTag = `<script type=application/ld+json>${jsonContent}</script>`;
|
|
51
|
+
finalHtml = finalHtml.replace(`__JSON_LD_PLACEHOLDER_${idx}__`, scriptTag);
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
conditionalComments.forEach((commentContent, idx) => {
|
|
55
|
+
finalHtml = finalHtml.replace(`__CONDITIONAL_COMMENT_PLACEHOLDER_${idx}__`, commentContent);
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
// Send result back to parent
|
|
59
|
+
parentPort.postMessage({
|
|
60
|
+
success: true,
|
|
61
|
+
index,
|
|
62
|
+
result: finalHtml
|
|
63
|
+
});
|
|
64
|
+
} catch (err) {
|
|
65
|
+
// Send error back to parent
|
|
66
|
+
parentPort.postMessage({
|
|
67
|
+
success: false,
|
|
68
|
+
index,
|
|
69
|
+
error: err.message
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
});
|
|
@@ -104,7 +104,11 @@ function getSettings() {
|
|
|
104
104
|
? DEFAULT_WEBPACK_TARGET
|
|
105
105
|
: (ujmConfig?.webpack?.target || DEFAULT_WEBPACK_TARGET)
|
|
106
106
|
],
|
|
107
|
-
devtool: Manager.actLikeProduction() ? 'source-map' : 'eval-source-map',
|
|
107
|
+
// devtool: Manager.actLikeProduction() ? 'source-map' : 'eval-source-map',
|
|
108
|
+
// Production: nosources-source-map, hidden-source-map
|
|
109
|
+
devtool: Manager.actLikeProduction() ? false : 'eval-source-map',
|
|
110
|
+
// devtool: 'nosources-source-map',
|
|
111
|
+
// devtool: 'source-map',
|
|
108
112
|
// devtool: false,
|
|
109
113
|
plugins: [
|
|
110
114
|
new StripDevBlocksPlugin(),
|
|
@@ -213,7 +217,8 @@ function getSettings() {
|
|
|
213
217
|
use: {
|
|
214
218
|
loader: 'babel-loader',
|
|
215
219
|
options: {
|
|
216
|
-
|
|
220
|
+
// sourceMaps: false,
|
|
221
|
+
sourceMaps: !Manager.actLikeProduction(),
|
|
217
222
|
presets: [
|
|
218
223
|
[require.resolve('@babel/preset-env', {
|
|
219
224
|
paths: [path.resolve(process.cwd(), 'node_modules', package.name, 'node_modules')]
|
|
@@ -319,7 +324,8 @@ function webpack(complete) {
|
|
|
319
324
|
use: {
|
|
320
325
|
loader: 'babel-loader',
|
|
321
326
|
options: {
|
|
322
|
-
sourceMaps:
|
|
327
|
+
// sourceMaps: false,
|
|
328
|
+
sourceMaps: !Manager.actLikeProduction(),
|
|
323
329
|
presets: [
|
|
324
330
|
[require.resolve('@babel/preset-env', {
|
|
325
331
|
paths: [path.resolve(process.cwd(), 'node_modules', package.name, 'node_modules')]
|
package/firebase-debug.log
CHANGED
|
@@ -54,3 +54,35 @@
|
|
|
54
54
|
[debug] [2025-10-22T06:51:09.274Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
55
55
|
[debug] [2025-10-22T06:51:09.274Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
56
56
|
[debug] [2025-10-22T06:51:09.274Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
57
|
+
[debug] [2025-10-22T08:40:01.642Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
58
|
+
[debug] [2025-10-22T08:40:01.642Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
59
|
+
[debug] [2025-10-22T08:40:01.644Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
60
|
+
[debug] [2025-10-22T08:40:01.644Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
61
|
+
[debug] [2025-10-22T08:40:01.644Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
62
|
+
[debug] [2025-10-22T08:40:01.644Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
63
|
+
[debug] [2025-10-22T08:40:01.644Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
64
|
+
[debug] [2025-10-22T08:40:01.644Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
65
|
+
[debug] [2025-10-22T09:43:23.845Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
66
|
+
[debug] [2025-10-22T09:43:23.848Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
67
|
+
[debug] [2025-10-22T09:43:23.848Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
68
|
+
[debug] [2025-10-22T09:43:23.848Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
69
|
+
[debug] [2025-10-22T09:43:23.847Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
70
|
+
[debug] [2025-10-22T09:43:23.853Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
71
|
+
[debug] [2025-10-22T09:43:23.853Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
72
|
+
[debug] [2025-10-22T09:43:23.854Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
73
|
+
[debug] [2025-10-22T10:05:29.839Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
74
|
+
[debug] [2025-10-22T10:05:29.841Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
75
|
+
[debug] [2025-10-22T10:05:29.841Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
76
|
+
[debug] [2025-10-22T10:05:29.841Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
77
|
+
[debug] [2025-10-22T10:05:29.842Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
78
|
+
[debug] [2025-10-22T10:05:29.844Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
79
|
+
[debug] [2025-10-22T10:05:29.844Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
80
|
+
[debug] [2025-10-22T10:05:29.844Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
81
|
+
[debug] [2025-10-22T10:35:12.194Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
82
|
+
[debug] [2025-10-22T10:35:12.196Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
83
|
+
[debug] [2025-10-22T10:35:12.197Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
84
|
+
[debug] [2025-10-22T10:35:12.197Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
85
|
+
[debug] [2025-10-22T10:35:12.197Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
86
|
+
[debug] [2025-10-22T10:35:12.198Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
|
87
|
+
[debug] [2025-10-22T10:35:12.198Z] > command requires scopes: ["email","openid","https://www.googleapis.com/auth/cloudplatformprojects.readonly","https://www.googleapis.com/auth/firebase","https://www.googleapis.com/auth/cloud-platform"]
|
|
88
|
+
[debug] [2025-10-22T10:35:12.198Z] > authorizing via signed-in user (ian.wiedenman@gmail.com)
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ultimate-jekyll-manager",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.95",
|
|
4
4
|
"description": "Ultimate Jekyll dependency manager",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"exports": {
|
|
@@ -62,6 +62,7 @@
|
|
|
62
62
|
"@babel/core": "^7.28.4",
|
|
63
63
|
"@babel/preset-env": "^7.28.3",
|
|
64
64
|
"@fullhuman/postcss-purgecss": "^7.0.2",
|
|
65
|
+
"@minify-html/node": "^0.16.4",
|
|
65
66
|
"@octokit/rest": "^22.0.0",
|
|
66
67
|
"@popperjs/core": "^2.11.8",
|
|
67
68
|
"@prettier/plugin-xml": "^3.4.2",
|