@kenjura/ursa 0.52.0 → 0.54.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/meta/menu.js +44 -13
- package/package.json +2 -1
- package/src/helper/automenu.js +23 -12
- package/src/helper/build/autoIndex.js +197 -0
- package/src/helper/build/batch.js +19 -0
- package/src/helper/build/cacheBust.js +62 -0
- package/src/helper/build/excludeFilter.js +67 -0
- package/src/helper/build/footer.js +113 -0
- package/src/helper/build/index.js +13 -0
- package/src/helper/build/menu.js +19 -0
- package/src/helper/build/metadata.js +30 -0
- package/src/helper/build/pathUtils.js +13 -0
- package/src/helper/build/progress.js +35 -0
- package/src/helper/build/templates.js +30 -0
- package/src/helper/build/titleCase.js +7 -0
- package/src/helper/build/watchCache.js +26 -0
- package/src/jobs/generate.js +82 -573
- package/src/serve.js +10 -0
package/src/jobs/generate.js
CHANGED
|
@@ -1,108 +1,5 @@
|
|
|
1
1
|
import { recurse } from "../helper/recursive-readdir.js";
|
|
2
|
-
|
|
3
2
|
import { copyFile, mkdir, readdir, readFile, stat } from "fs/promises";
|
|
4
|
-
|
|
5
|
-
// Concurrency limiter for batch processing to avoid memory exhaustion
|
|
6
|
-
const BATCH_SIZE = parseInt(process.env.URSA_BATCH_SIZE || '50', 10);
|
|
7
|
-
|
|
8
|
-
/**
|
|
9
|
-
* Cache for watch mode - stores expensive data that doesn't change often
|
|
10
|
-
* This allows single-file regeneration to skip re-building menu, templates, etc.
|
|
11
|
-
*/
|
|
12
|
-
const watchModeCache = {
|
|
13
|
-
templates: null,
|
|
14
|
-
menu: null,
|
|
15
|
-
footer: null,
|
|
16
|
-
validPaths: null,
|
|
17
|
-
source: null,
|
|
18
|
-
meta: null,
|
|
19
|
-
output: null,
|
|
20
|
-
hashCache: null,
|
|
21
|
-
lastFullBuild: 0,
|
|
22
|
-
isInitialized: false,
|
|
23
|
-
};
|
|
24
|
-
|
|
25
|
-
/**
|
|
26
|
-
* Clear the watch mode cache (call when templates/meta/config change)
|
|
27
|
-
*/
|
|
28
|
-
export function clearWatchCache() {
|
|
29
|
-
watchModeCache.templates = null;
|
|
30
|
-
watchModeCache.menu = null;
|
|
31
|
-
watchModeCache.footer = null;
|
|
32
|
-
watchModeCache.validPaths = null;
|
|
33
|
-
watchModeCache.hashCache = null;
|
|
34
|
-
watchModeCache.isInitialized = false;
|
|
35
|
-
cssPathCache.clear(); // Also clear CSS path cache
|
|
36
|
-
console.log('Watch cache cleared');
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
/**
|
|
40
|
-
* Progress reporter that updates lines in place (like pnpm)
|
|
41
|
-
*/
|
|
42
|
-
class ProgressReporter {
|
|
43
|
-
constructor() {
|
|
44
|
-
this.lines = {};
|
|
45
|
-
this.isTTY = process.stdout.isTTY;
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
// Update a named status line in place
|
|
49
|
-
status(name, message) {
|
|
50
|
-
if (this.isTTY) {
|
|
51
|
-
// Save cursor, move to line, clear it, write, restore cursor
|
|
52
|
-
const line = `${name}: ${message}`;
|
|
53
|
-
this.lines[name] = line;
|
|
54
|
-
// Clear line and write
|
|
55
|
-
process.stdout.write(`\r\x1b[K${line}`);
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
// Complete a status line (print final state and newline)
|
|
60
|
-
done(name, message) {
|
|
61
|
-
if (this.isTTY) {
|
|
62
|
-
process.stdout.write(`\r\x1b[K${name}: ${message}\n`);
|
|
63
|
-
} else {
|
|
64
|
-
console.log(`${name}: ${message}`);
|
|
65
|
-
}
|
|
66
|
-
delete this.lines[name];
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
// Regular log that doesn't get overwritten
|
|
70
|
-
log(message) {
|
|
71
|
-
if (this.isTTY) {
|
|
72
|
-
// Clear current line first, print message, then newline
|
|
73
|
-
process.stdout.write(`\r\x1b[K${message}\n`);
|
|
74
|
-
} else {
|
|
75
|
-
console.log(message);
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
// Clear all status lines
|
|
80
|
-
clear() {
|
|
81
|
-
if (this.isTTY) {
|
|
82
|
-
process.stdout.write(`\r\x1b[K`);
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
const progress = new ProgressReporter();
|
|
88
|
-
|
|
89
|
-
/**
|
|
90
|
-
* Process items in batches to limit memory usage
|
|
91
|
-
* @param {Array} items - Items to process
|
|
92
|
-
* @param {Function} processor - Async function to process each item
|
|
93
|
-
* @param {number} batchSize - Max concurrent operations
|
|
94
|
-
*/
|
|
95
|
-
async function processBatched(items, processor, batchSize = BATCH_SIZE) {
|
|
96
|
-
const results = [];
|
|
97
|
-
for (let i = 0; i < items.length; i += batchSize) {
|
|
98
|
-
const batch = items.slice(i, i + batchSize);
|
|
99
|
-
const batchResults = await Promise.all(batch.map(processor));
|
|
100
|
-
results.push(...batchResults);
|
|
101
|
-
// Allow GC to run between batches
|
|
102
|
-
if (global.gc) global.gc();
|
|
103
|
-
}
|
|
104
|
-
return results;
|
|
105
|
-
}
|
|
106
3
|
import { getAutomenu } from "../helper/automenu.js";
|
|
107
4
|
import { filterAsync } from "../helper/filterAsync.js";
|
|
108
5
|
import { isDirectory } from "../helper/isDirectory.js";
|
|
@@ -124,38 +21,6 @@ import {
|
|
|
124
21
|
} from "../helper/linkValidator.js";
|
|
125
22
|
import { getAndIncrementBuildId } from "../helper/ursaConfig.js";
|
|
126
23
|
import { extractSections } from "../helper/sectionExtractor.js";
|
|
127
|
-
|
|
128
|
-
// Helper function to build search index from processed files
|
|
129
|
-
function buildSearchIndex(jsonCache, source, output) {
|
|
130
|
-
const searchIndex = [];
|
|
131
|
-
|
|
132
|
-
for (const [filePath, jsonObject] of jsonCache.entries()) {
|
|
133
|
-
// Generate URL path relative to output
|
|
134
|
-
const relativePath = filePath.replace(source, '').replace(/\.(md|txt|yml)$/, '.html');
|
|
135
|
-
const url = relativePath.startsWith('/') ? relativePath : '/' + relativePath;
|
|
136
|
-
|
|
137
|
-
// Extract text content from body (strip HTML tags for search)
|
|
138
|
-
const textContent = jsonObject.bodyHtml.replace(/<[^>]*>/g, ' ').replace(/\s+/g, ' ').trim();
|
|
139
|
-
const excerpt = textContent.substring(0, 200); // First 200 chars for preview
|
|
140
|
-
|
|
141
|
-
searchIndex.push({
|
|
142
|
-
title: toTitleCase(jsonObject.name),
|
|
143
|
-
path: relativePath,
|
|
144
|
-
url: url,
|
|
145
|
-
content: excerpt
|
|
146
|
-
});
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
return searchIndex;
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
// Helper function to convert filename to title case
|
|
153
|
-
function toTitleCase(filename) {
|
|
154
|
-
return filename
|
|
155
|
-
.split(/[-_\s]+/) // Split on hyphens, underscores, and spaces
|
|
156
|
-
.map(word => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
|
|
157
|
-
.join(' ');
|
|
158
|
-
}
|
|
159
24
|
import { renderFile } from "../helper/fileRenderer.js";
|
|
160
25
|
import { findStyleCss } from "../helper/findStyleCss.js";
|
|
161
26
|
import { copy as copyDir, emptyDir, outputFile } from "fs-extra";
|
|
@@ -164,78 +29,43 @@ import { URL } from "url";
|
|
|
164
29
|
import o2x from "object-to-xml";
|
|
165
30
|
import { existsSync } from "fs";
|
|
166
31
|
import { fileExists } from "../helper/fileExists.js";
|
|
167
|
-
|
|
168
32
|
import { createWhitelistFilter } from "../helper/whitelistFilter.js";
|
|
169
33
|
|
|
170
|
-
|
|
171
|
-
|
|
34
|
+
// Import build helpers from organized modules
|
|
35
|
+
import {
|
|
36
|
+
generateCacheBustTimestamp,
|
|
37
|
+
addTimestampToCssUrls,
|
|
38
|
+
addTimestampToHtmlStaticRefs,
|
|
39
|
+
processBatched,
|
|
40
|
+
ProgressReporter,
|
|
41
|
+
watchModeCache,
|
|
42
|
+
clearWatchCache as clearWatchCacheBase,
|
|
43
|
+
toTitleCase,
|
|
44
|
+
parseExcludeOption,
|
|
45
|
+
createExcludeFilter,
|
|
46
|
+
addTrailingSlash,
|
|
47
|
+
getTemplates,
|
|
48
|
+
getMenu,
|
|
49
|
+
getTransformedMetadata,
|
|
50
|
+
getFooter,
|
|
51
|
+
generateAutoIndices,
|
|
52
|
+
} from "../helper/build/index.js";
|
|
53
|
+
|
|
54
|
+
// Concurrency limiter for batch processing to avoid memory exhaustion
|
|
55
|
+
const BATCH_SIZE = parseInt(process.env.URSA_BATCH_SIZE || '50', 10);
|
|
172
56
|
|
|
173
57
|
// Cache for CSS path lookups to avoid repeated filesystem walks
|
|
174
58
|
const cssPathCache = new Map();
|
|
175
59
|
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
* @param {string} source - Source directory path
|
|
180
|
-
* @returns {Promise<Set<string>>} Set of excluded folder paths (normalized)
|
|
181
|
-
*/
|
|
182
|
-
async function parseExcludeOption(excludeOption, source) {
|
|
183
|
-
const excludedPaths = new Set();
|
|
184
|
-
|
|
185
|
-
if (!excludeOption) return excludedPaths;
|
|
186
|
-
|
|
187
|
-
// Check if it's a file path (exists as a file)
|
|
188
|
-
const isFile = existsSync(excludeOption) && (await stat(excludeOption)).isFile();
|
|
189
|
-
|
|
190
|
-
let patterns;
|
|
191
|
-
if (isFile) {
|
|
192
|
-
// Read patterns from file (one per line)
|
|
193
|
-
const content = await readFile(excludeOption, 'utf8');
|
|
194
|
-
patterns = content.split('\n')
|
|
195
|
-
.map(line => line.trim())
|
|
196
|
-
.filter(line => line && !line.startsWith('#')); // Skip empty lines and comments
|
|
197
|
-
} else {
|
|
198
|
-
// Treat as comma-separated list
|
|
199
|
-
patterns = excludeOption.split(',').map(p => p.trim()).filter(Boolean);
|
|
200
|
-
}
|
|
201
|
-
|
|
202
|
-
// Normalize patterns to absolute paths
|
|
203
|
-
for (const pattern of patterns) {
|
|
204
|
-
// Remove leading/trailing slashes and normalize
|
|
205
|
-
const normalized = pattern.replace(/^\/+|\/+$/g, '');
|
|
206
|
-
// Store as relative path for easier matching
|
|
207
|
-
excludedPaths.add(normalized);
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
return excludedPaths;
|
|
60
|
+
// Wrapper for clearWatchCache that passes cssPathCache
|
|
61
|
+
export function clearWatchCache() {
|
|
62
|
+
clearWatchCacheBase(cssPathCache);
|
|
211
63
|
}
|
|
212
64
|
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
* @returns {Function} Filter function
|
|
218
|
-
*/
|
|
219
|
-
function createExcludeFilter(excludedPaths, source) {
|
|
220
|
-
if (excludedPaths.size === 0) {
|
|
221
|
-
return () => true; // No exclusions, allow all
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
return (filePath) => {
|
|
225
|
-
// Get path relative to source
|
|
226
|
-
const relativePath = filePath.replace(source, '').replace(/^\/+/, '');
|
|
227
|
-
|
|
228
|
-
// Check if file is in any excluded folder
|
|
229
|
-
for (const excluded of excludedPaths) {
|
|
230
|
-
if (relativePath === excluded ||
|
|
231
|
-
relativePath.startsWith(excluded + '/') ||
|
|
232
|
-
relativePath.startsWith(excluded + '\\')) {
|
|
233
|
-
return false; // Exclude this file
|
|
234
|
-
}
|
|
235
|
-
}
|
|
236
|
-
return true; // Include this file
|
|
237
|
-
};
|
|
238
|
-
}
|
|
65
|
+
const progress = new ProgressReporter();
|
|
66
|
+
|
|
67
|
+
const DEFAULT_TEMPLATE_NAME =
|
|
68
|
+
process.env.DEFAULT_TEMPLATE_NAME ?? "default-template";
|
|
239
69
|
|
|
240
70
|
export async function generate({
|
|
241
71
|
_source = join(process.cwd(), "."),
|
|
@@ -252,6 +82,10 @@ export async function generate({
|
|
|
252
82
|
const output = resolve(_output) + "/";
|
|
253
83
|
console.log({ source, meta, output });
|
|
254
84
|
|
|
85
|
+
// Generate cache-busting timestamp for this build
|
|
86
|
+
const cacheBustTimestamp = generateCacheBustTimestamp();
|
|
87
|
+
progress.log(`Cache-bust timestamp: ${cacheBustTimestamp}`);
|
|
88
|
+
|
|
255
89
|
// Clear output directory when --clean is specified
|
|
256
90
|
if (_clean) {
|
|
257
91
|
progress.log(`Clean build: clearing output directory ${output}`);
|
|
@@ -323,7 +157,9 @@ export async function generate({
|
|
|
323
157
|
const validPaths = buildValidPaths(allSourceFilenamesThatAreArticles, source, allSourceFilenamesThatAreDirectories);
|
|
324
158
|
progress.log(`Built ${validPaths.size} valid paths for link validation`);
|
|
325
159
|
|
|
326
|
-
const
|
|
160
|
+
const menuResult = await getMenu(allSourceFilenames, source, validPaths);
|
|
161
|
+
const menu = menuResult.html;
|
|
162
|
+
const menuData = menuResult.menuData;
|
|
327
163
|
|
|
328
164
|
// Get and increment build ID from .ursa.json
|
|
329
165
|
const buildId = getAndIncrementBuildId(resolve(_source));
|
|
@@ -341,11 +177,30 @@ export async function generate({
|
|
|
341
177
|
progress.log(`Clean build: ignoring cached hashes`);
|
|
342
178
|
}
|
|
343
179
|
|
|
180
|
+
|
|
344
181
|
// create public folder
|
|
345
182
|
const pub = join(output, "public");
|
|
346
183
|
await mkdir(pub, { recursive: true });
|
|
347
184
|
await copyDir(meta, pub);
|
|
348
185
|
|
|
186
|
+
// Process all CSS files in the entire output directory tree for cache-busting
|
|
187
|
+
const allOutputFiles = await recurse(output, [() => false]);
|
|
188
|
+
for (const cssFile of allOutputFiles.filter(f => f.endsWith('.css'))) {
|
|
189
|
+
const cssContent = await readFile(cssFile, 'utf8');
|
|
190
|
+
const processedCss = addTimestampToCssUrls(cssContent, cacheBustTimestamp);
|
|
191
|
+
await outputFile(cssFile, processedCss);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Process JS files in output for cache-busting fetch URLs
|
|
195
|
+
for (const jsFile of allOutputFiles.filter(f => f.endsWith('.js'))) {
|
|
196
|
+
let jsContent = await readFile(jsFile, 'utf8');
|
|
197
|
+
jsContent = jsContent.replace(
|
|
198
|
+
/fetch\(['"]([^'"\)]+\.(json))['"](?!\s*\+)/g,
|
|
199
|
+
`fetch('$1?v=${cacheBustTimestamp}'`
|
|
200
|
+
);
|
|
201
|
+
await outputFile(jsFile, jsContent);
|
|
202
|
+
}
|
|
203
|
+
|
|
349
204
|
// Track errors for error report
|
|
350
205
|
const errors = [];
|
|
351
206
|
|
|
@@ -498,6 +353,9 @@ export async function generate({
|
|
|
498
353
|
// Resolve links and mark broken internal links as inactive
|
|
499
354
|
finalHtml = markInactiveLinks(finalHtml, validPaths, docUrlPath, false);
|
|
500
355
|
|
|
356
|
+
// Add cache-busting timestamps to static file references
|
|
357
|
+
finalHtml = addTimestampToHtmlStaticRefs(finalHtml, cacheBustTimestamp);
|
|
358
|
+
|
|
501
359
|
await outputFile(outputFilename, finalHtml);
|
|
502
360
|
|
|
503
361
|
// Clear finalHtml reference to allow GC
|
|
@@ -549,6 +407,13 @@ export async function generate({
|
|
|
549
407
|
progress.log(`Writing search index with ${searchIndex.length} entries`);
|
|
550
408
|
await outputFile(searchIndexPath, JSON.stringify(searchIndex));
|
|
551
409
|
|
|
410
|
+
// Write menu data as a separate JSON file (not embedded in each page)
|
|
411
|
+
// This dramatically reduces HTML file sizes for large sites
|
|
412
|
+
const menuDataPath = join(output, 'public', 'menu-data.json');
|
|
413
|
+
const menuDataJson = JSON.stringify(menuData);
|
|
414
|
+
progress.log(`Writing menu data (${(menuDataJson.length / 1024).toFixed(1)} KB)`);
|
|
415
|
+
await outputFile(menuDataPath, menuDataJson);
|
|
416
|
+
|
|
552
417
|
// Process directory indices with batched concurrency
|
|
553
418
|
const totalDirs = allSourceFilenamesThatAreDirectories.length;
|
|
554
419
|
let processedDirs = 0;
|
|
@@ -604,6 +469,8 @@ export async function generate({
|
|
|
604
469
|
for (const [key, value] of Object.entries(replacements)) {
|
|
605
470
|
finalHtml = finalHtml.replace(key, value);
|
|
606
471
|
}
|
|
472
|
+
// Add cache-busting timestamps to static file references
|
|
473
|
+
finalHtml = addTimestampToHtmlStaticRefs(finalHtml, cacheBustTimestamp);
|
|
607
474
|
await outputFile(htmlOutputFilename, finalHtml);
|
|
608
475
|
}
|
|
609
476
|
} catch (e) {
|
|
@@ -638,7 +505,7 @@ export async function generate({
|
|
|
638
505
|
processedStatic++;
|
|
639
506
|
const shortFile = file.replace(source, '');
|
|
640
507
|
progress.status('Static files', `${processedStatic}/${totalStatic} ${shortFile}`);
|
|
641
|
-
|
|
508
|
+
|
|
642
509
|
// Check if file has changed using file stat as a quick check
|
|
643
510
|
const fileStat = await stat(file);
|
|
644
511
|
const statKey = `${file}:stat`;
|
|
@@ -650,9 +517,16 @@ export async function generate({
|
|
|
650
517
|
copiedStatic++;
|
|
651
518
|
|
|
652
519
|
const outputFilename = file.replace(source, output);
|
|
653
|
-
|
|
654
520
|
await mkdir(dirname(outputFilename), { recursive: true });
|
|
655
|
-
|
|
521
|
+
|
|
522
|
+
if (file.endsWith('.css')) {
|
|
523
|
+
// Process CSS for cache busting
|
|
524
|
+
const cssContent = await readFile(file, 'utf8');
|
|
525
|
+
const processedCss = addTimestampToCssUrls(cssContent, cacheBustTimestamp);
|
|
526
|
+
await outputFile(outputFilename, processedCss);
|
|
527
|
+
} else {
|
|
528
|
+
await copyFile(file, outputFilename);
|
|
529
|
+
}
|
|
656
530
|
} catch (e) {
|
|
657
531
|
progress.log(`Error processing static file ${file}: ${e.message}`);
|
|
658
532
|
errors.push({ file, phase: 'static-file', error: e });
|
|
@@ -663,7 +537,7 @@ export async function generate({
|
|
|
663
537
|
|
|
664
538
|
// Automatic index generation for folders without index.html
|
|
665
539
|
progress.log(`Checking for missing index files...`);
|
|
666
|
-
await generateAutoIndices(output, allSourceFilenamesThatAreDirectories, source, templates, menu, footer, allSourceFilenamesThatAreArticles, copiedCssFiles, existingHtmlFiles);
|
|
540
|
+
await generateAutoIndices(output, allSourceFilenamesThatAreDirectories, source, templates, menu, footer, allSourceFilenamesThatAreArticles, copiedCssFiles, existingHtmlFiles, cacheBustTimestamp, progress);
|
|
667
541
|
|
|
668
542
|
// Save the hash cache to .ursa folder in source directory
|
|
669
543
|
if (hashCache.size > 0) {
|
|
@@ -720,191 +594,6 @@ export async function generate({
|
|
|
720
594
|
}
|
|
721
595
|
}
|
|
722
596
|
|
|
723
|
-
/**
|
|
724
|
-
* Generate automatic index.html files for folders that don't have one
|
|
725
|
-
* @param {string} output - Output directory path
|
|
726
|
-
* @param {string[]} directories - List of source directories
|
|
727
|
-
* @param {string} source - Source directory path
|
|
728
|
-
* @param {object} templates - Template map
|
|
729
|
-
* @param {string} menu - Rendered menu HTML
|
|
730
|
-
* @param {string} footer - Footer HTML
|
|
731
|
-
* @param {string[]} generatedArticles - List of source article paths that were generated
|
|
732
|
-
* @param {Set<string>} copiedCssFiles - Set of CSS files already copied to output
|
|
733
|
-
* @param {Set<string>} existingHtmlFiles - Set of existing HTML files in source (relative paths)
|
|
734
|
-
*/
|
|
735
|
-
async function generateAutoIndices(output, directories, source, templates, menu, footer, generatedArticles, copiedCssFiles, existingHtmlFiles) {
|
|
736
|
-
// Alternate index file names to look for (in priority order)
|
|
737
|
-
const INDEX_ALTERNATES = ['_index.html', 'home.html', '_home.html'];
|
|
738
|
-
|
|
739
|
-
// Normalize paths (remove trailing slashes for consistent replacement)
|
|
740
|
-
const sourceNorm = source.replace(/\/+$/, '');
|
|
741
|
-
const outputNorm = output.replace(/\/+$/, '');
|
|
742
|
-
|
|
743
|
-
// Build set of directories that already have an index.html from a source index.md/txt/yml
|
|
744
|
-
const dirsWithSourceIndex = new Set();
|
|
745
|
-
for (const articlePath of generatedArticles) {
|
|
746
|
-
const base = basename(articlePath, extname(articlePath));
|
|
747
|
-
if (base === 'index') {
|
|
748
|
-
const dir = dirname(articlePath);
|
|
749
|
-
const outputDir = dir.replace(sourceNorm, outputNorm);
|
|
750
|
-
dirsWithSourceIndex.add(outputDir);
|
|
751
|
-
}
|
|
752
|
-
}
|
|
753
|
-
|
|
754
|
-
// Get all output directories (including root)
|
|
755
|
-
const outputDirs = new Set([outputNorm]);
|
|
756
|
-
for (const dir of directories) {
|
|
757
|
-
// Handle both with and without trailing slash in source
|
|
758
|
-
const outputDir = dir.replace(sourceNorm, outputNorm);
|
|
759
|
-
outputDirs.add(outputDir);
|
|
760
|
-
}
|
|
761
|
-
|
|
762
|
-
let generatedCount = 0;
|
|
763
|
-
let renamedCount = 0;
|
|
764
|
-
let skippedHtmlCount = 0;
|
|
765
|
-
|
|
766
|
-
for (const dir of outputDirs) {
|
|
767
|
-
const indexPath = join(dir, 'index.html');
|
|
768
|
-
|
|
769
|
-
// Skip if this directory had a source index.md/txt/yml that was already processed
|
|
770
|
-
if (dirsWithSourceIndex.has(dir)) {
|
|
771
|
-
continue;
|
|
772
|
-
}
|
|
773
|
-
|
|
774
|
-
// Check if there's an existing index.html in the source directory (don't overwrite it)
|
|
775
|
-
const sourceDir = dir.replace(outputNorm, sourceNorm);
|
|
776
|
-
const relativeIndexPath = join(sourceDir, 'index.html').replace(sourceNorm + '/', '');
|
|
777
|
-
if (existingHtmlFiles && existingHtmlFiles.has(relativeIndexPath)) {
|
|
778
|
-
skippedHtmlCount++;
|
|
779
|
-
continue; // Don't overwrite existing source HTML
|
|
780
|
-
}
|
|
781
|
-
|
|
782
|
-
// Skip if index.html already exists in output (e.g., created by previous run)
|
|
783
|
-
if (existsSync(indexPath)) {
|
|
784
|
-
continue;
|
|
785
|
-
}
|
|
786
|
-
|
|
787
|
-
// Get folder name for (foldername).html check
|
|
788
|
-
const folderName = basename(dir);
|
|
789
|
-
const folderNameAlternate = `${folderName}.html`;
|
|
790
|
-
|
|
791
|
-
// Check for alternate index files
|
|
792
|
-
let foundAlternate = null;
|
|
793
|
-
for (const alt of [...INDEX_ALTERNATES, folderNameAlternate]) {
|
|
794
|
-
const altPath = join(dir, alt);
|
|
795
|
-
if (existsSync(altPath)) {
|
|
796
|
-
foundAlternate = altPath;
|
|
797
|
-
break;
|
|
798
|
-
}
|
|
799
|
-
}
|
|
800
|
-
|
|
801
|
-
if (foundAlternate) {
|
|
802
|
-
// Rename/copy alternate to index.html
|
|
803
|
-
try {
|
|
804
|
-
const content = await readFile(foundAlternate, 'utf8');
|
|
805
|
-
await outputFile(indexPath, content);
|
|
806
|
-
renamedCount++;
|
|
807
|
-
progress.status('Auto-index', `Promoted ${basename(foundAlternate)} → index.html in ${dir.replace(outputNorm, '') || '/'}`);
|
|
808
|
-
} catch (e) {
|
|
809
|
-
progress.log(`Error promoting ${foundAlternate} to index.html: ${e.message}`);
|
|
810
|
-
}
|
|
811
|
-
} else {
|
|
812
|
-
// Generate a simple index listing direct children
|
|
813
|
-
try {
|
|
814
|
-
const children = await readdir(dir, { withFileTypes: true });
|
|
815
|
-
|
|
816
|
-
// Filter to only include relevant files and folders
|
|
817
|
-
const items = children
|
|
818
|
-
.filter(child => {
|
|
819
|
-
// Skip hidden files and index alternates we just checked
|
|
820
|
-
if (child.name.startsWith('.')) return false;
|
|
821
|
-
if (child.name === 'index.html') return false;
|
|
822
|
-
// Include directories and html files
|
|
823
|
-
return child.isDirectory() || child.name.endsWith('.html');
|
|
824
|
-
})
|
|
825
|
-
.map(child => {
|
|
826
|
-
const isDir = child.isDirectory();
|
|
827
|
-
const name = isDir ? child.name : child.name.replace('.html', '');
|
|
828
|
-
const href = isDir ? `${child.name}/` : child.name;
|
|
829
|
-
const displayName = toTitleCase(name);
|
|
830
|
-
const icon = isDir ? '📁' : '📄';
|
|
831
|
-
return `<li>${icon} <a href="${href}">${displayName}</a></li>`;
|
|
832
|
-
});
|
|
833
|
-
|
|
834
|
-
if (items.length === 0) {
|
|
835
|
-
// Empty folder, skip generating index
|
|
836
|
-
continue;
|
|
837
|
-
}
|
|
838
|
-
|
|
839
|
-
const folderDisplayName = dir === outputNorm ? 'Home' : toTitleCase(folderName);
|
|
840
|
-
const indexHtml = `<h1>${folderDisplayName}</h1>\n<ul class="auto-index">\n${items.join('\n')}\n</ul>`;
|
|
841
|
-
|
|
842
|
-
const template = templates["default-template"];
|
|
843
|
-
if (!template) {
|
|
844
|
-
progress.log(`Warning: No default template for auto-index in ${dir}`);
|
|
845
|
-
continue;
|
|
846
|
-
}
|
|
847
|
-
|
|
848
|
-
// Find nearest style.css for this directory
|
|
849
|
-
let styleLink = "";
|
|
850
|
-
try {
|
|
851
|
-
// Map output dir back to source dir to find style.css
|
|
852
|
-
const sourceDir = dir.replace(outputNorm, sourceNorm);
|
|
853
|
-
const cssPath = await findStyleCss(sourceDir);
|
|
854
|
-
if (cssPath) {
|
|
855
|
-
// Calculate output path for the CSS file (mirrors source structure)
|
|
856
|
-
const cssOutputPath = cssPath.replace(sourceNorm, outputNorm);
|
|
857
|
-
const cssUrlPath = '/' + cssPath.replace(sourceNorm, '');
|
|
858
|
-
|
|
859
|
-
// Copy CSS file if not already copied
|
|
860
|
-
if (!copiedCssFiles.has(cssPath)) {
|
|
861
|
-
const cssContent = await readFile(cssPath, 'utf8');
|
|
862
|
-
await outputFile(cssOutputPath, cssContent);
|
|
863
|
-
copiedCssFiles.add(cssPath);
|
|
864
|
-
}
|
|
865
|
-
|
|
866
|
-
// Generate link tag
|
|
867
|
-
styleLink = `<link rel="stylesheet" href="${cssUrlPath}" />`;
|
|
868
|
-
}
|
|
869
|
-
} catch (e) {
|
|
870
|
-
// ignore CSS lookup errors
|
|
871
|
-
}
|
|
872
|
-
|
|
873
|
-
let finalHtml = template;
|
|
874
|
-
const replacements = {
|
|
875
|
-
"${menu}": menu,
|
|
876
|
-
"${body}": indexHtml,
|
|
877
|
-
"${searchIndex}": "[]",
|
|
878
|
-
"${title}": folderDisplayName,
|
|
879
|
-
"${meta}": "{}",
|
|
880
|
-
"${transformedMetadata}": "",
|
|
881
|
-
"${styleLink}": styleLink,
|
|
882
|
-
"${footer}": footer
|
|
883
|
-
};
|
|
884
|
-
for (const [key, value] of Object.entries(replacements)) {
|
|
885
|
-
finalHtml = finalHtml.replace(key, value);
|
|
886
|
-
}
|
|
887
|
-
|
|
888
|
-
await outputFile(indexPath, finalHtml);
|
|
889
|
-
generatedCount++;
|
|
890
|
-
progress.status('Auto-index', `Generated index.html for ${dir.replace(outputNorm, '') || '/'}`);
|
|
891
|
-
} catch (e) {
|
|
892
|
-
progress.log(`Error generating auto-index for ${dir}: ${e.message}`);
|
|
893
|
-
}
|
|
894
|
-
}
|
|
895
|
-
}
|
|
896
|
-
|
|
897
|
-
if (generatedCount > 0 || renamedCount > 0 || skippedHtmlCount > 0) {
|
|
898
|
-
let summary = `${generatedCount} generated, ${renamedCount} promoted`;
|
|
899
|
-
if (skippedHtmlCount > 0) {
|
|
900
|
-
summary += `, ${skippedHtmlCount} skipped (existing HTML)`;
|
|
901
|
-
}
|
|
902
|
-
progress.done('Auto-index', summary);
|
|
903
|
-
} else {
|
|
904
|
-
progress.log(`Auto-index: All folders already have index.html`);
|
|
905
|
-
}
|
|
906
|
-
}
|
|
907
|
-
|
|
908
597
|
/**
|
|
909
598
|
* Regenerate a single file without scanning the entire source directory.
|
|
910
599
|
* This is much faster for watch mode - only regenerate what changed.
|
|
@@ -940,7 +629,7 @@ export async function regenerateSingleFile(changedFile, {
|
|
|
940
629
|
}
|
|
941
630
|
|
|
942
631
|
try {
|
|
943
|
-
const { templates, menu, footer, validPaths, hashCache } = watchModeCache;
|
|
632
|
+
const { templates, menu, footer, validPaths, hashCache, cacheBustTimestamp } = watchModeCache;
|
|
944
633
|
|
|
945
634
|
const rawBody = await readFile(changedFile, "utf8");
|
|
946
635
|
const type = parse(changedFile).ext;
|
|
@@ -1023,6 +712,9 @@ export async function regenerateSingleFile(changedFile, {
|
|
|
1023
712
|
// Mark broken links
|
|
1024
713
|
finalHtml = markInactiveLinks(finalHtml, validPaths, docUrlPath, false);
|
|
1025
714
|
|
|
715
|
+
// Add cache-busting timestamps to static file references
|
|
716
|
+
finalHtml = addTimestampToHtmlStaticRefs(finalHtml, cacheBustTimestamp);
|
|
717
|
+
|
|
1026
718
|
await outputFile(outputFilename, finalHtml);
|
|
1027
719
|
|
|
1028
720
|
// JSON output
|
|
@@ -1054,187 +746,4 @@ export async function regenerateSingleFile(changedFile, {
|
|
|
1054
746
|
} catch (e) {
|
|
1055
747
|
return { success: false, message: `Error: ${e.message}` };
|
|
1056
748
|
}
|
|
1057
|
-
}
|
|
1058
|
-
|
|
1059
|
-
/**
|
|
1060
|
-
* gets { [templateName:String]:[templateBody:String] }
|
|
1061
|
-
* meta: full path to meta files (default-template.html, etc)
|
|
1062
|
-
*/
|
|
1063
|
-
async function getTemplates(meta) {
|
|
1064
|
-
const allMetaFilenames = await recurse(meta);
|
|
1065
|
-
const allHtmlFilenames = allMetaFilenames.filter((filename) =>
|
|
1066
|
-
filename.match(/\.html/)
|
|
1067
|
-
);
|
|
1068
|
-
|
|
1069
|
-
let templates = {};
|
|
1070
|
-
const templatesArray = await Promise.all(
|
|
1071
|
-
allHtmlFilenames.map(async (filename) => {
|
|
1072
|
-
const { name } = parse(filename);
|
|
1073
|
-
const fileContent = await readFile(filename, "utf8");
|
|
1074
|
-
return [name, fileContent];
|
|
1075
|
-
})
|
|
1076
|
-
);
|
|
1077
|
-
templatesArray.forEach(
|
|
1078
|
-
([templateName, templateText]) => (templates[templateName] = templateText)
|
|
1079
|
-
);
|
|
1080
|
-
|
|
1081
|
-
return templates;
|
|
1082
|
-
}
|
|
1083
|
-
|
|
1084
|
-
async function getMenu(allSourceFilenames, source, validPaths) {
|
|
1085
|
-
// todo: handle various incarnations of menu filename
|
|
1086
|
-
|
|
1087
|
-
const rawMenu = await getAutomenu(source, validPaths);
|
|
1088
|
-
const menuBody = renderFile({ fileContents: rawMenu, type: ".md" });
|
|
1089
|
-
return menuBody;
|
|
1090
|
-
|
|
1091
|
-
// const allMenus = allSourceFilenames.filter((filename) =>
|
|
1092
|
-
// filename.match(/_?menu\.(html|yml|md|txt)/)
|
|
1093
|
-
// );
|
|
1094
|
-
// console.log({ allMenus });
|
|
1095
|
-
// if (allMenus.length === 0) return "";
|
|
1096
|
-
|
|
1097
|
-
// // pick best menu...TODO: actually apply logic here
|
|
1098
|
-
// const bestMenu = allMenus[0];
|
|
1099
|
-
// const rawBody = await readFile(bestMenu, "utf8");
|
|
1100
|
-
// const type = parse(bestMenu).ext;
|
|
1101
|
-
// const menuBody = renderFile({ fileContents: rawBody, type });
|
|
1102
|
-
|
|
1103
|
-
// return menuBody;
|
|
1104
|
-
}
|
|
1105
|
-
|
|
1106
|
-
async function getTransformedMetadata(dirname, metadata) {
|
|
1107
|
-
// console.log("getTransformedMetadata > ", { dirname });
|
|
1108
|
-
// custom transform? else, use default
|
|
1109
|
-
const customTransformFnFilename = join(dirname, "transformMetadata.js");
|
|
1110
|
-
let transformFn = defaultTransformFn;
|
|
1111
|
-
try {
|
|
1112
|
-
const customTransformFn = (await import(customTransformFnFilename)).default;
|
|
1113
|
-
if (typeof customTransformFn === "function")
|
|
1114
|
-
transformFn = customTransformFn;
|
|
1115
|
-
} catch (e) {
|
|
1116
|
-
// console.error(e);
|
|
1117
|
-
}
|
|
1118
|
-
try {
|
|
1119
|
-
return transformFn(metadata);
|
|
1120
|
-
} catch (e) {
|
|
1121
|
-
return "error transforming metadata";
|
|
1122
|
-
}
|
|
1123
|
-
|
|
1124
|
-
function defaultTransformFn(metadata) {
|
|
1125
|
-
return "default transform";
|
|
1126
|
-
}
|
|
1127
|
-
}
|
|
1128
|
-
|
|
1129
|
-
function addTrailingSlash(somePath) {
|
|
1130
|
-
if (typeof somePath !== "string") return somePath;
|
|
1131
|
-
if (somePath.length < 1) return somePath;
|
|
1132
|
-
if (somePath[somePath.length - 1] == "/") return somePath;
|
|
1133
|
-
return `${somePath}/`;
|
|
1134
|
-
}
|
|
1135
|
-
|
|
1136
|
-
/**
|
|
1137
|
-
* Generate footer HTML from footer.md and package.json
|
|
1138
|
-
* @param {string} source - resolved source path with trailing slash
|
|
1139
|
-
* @param {string} _source - original source path
|
|
1140
|
-
* @param {number} buildId - the current build ID
|
|
1141
|
-
*/
|
|
1142
|
-
async function getFooter(source, _source, buildId) {
|
|
1143
|
-
const footerParts = [];
|
|
1144
|
-
|
|
1145
|
-
// Try to read footer.md from source root
|
|
1146
|
-
const footerPath = join(source, 'footer.md');
|
|
1147
|
-
try {
|
|
1148
|
-
if (existsSync(footerPath)) {
|
|
1149
|
-
const footerMd = await readFile(footerPath, 'utf8');
|
|
1150
|
-
const footerHtml = renderFile({ fileContents: footerMd, type: '.md' });
|
|
1151
|
-
footerParts.push(`<div class="footer-content">${footerHtml}</div>`);
|
|
1152
|
-
}
|
|
1153
|
-
} catch (e) {
|
|
1154
|
-
console.error(`Error reading footer.md: ${e.message}`);
|
|
1155
|
-
}
|
|
1156
|
-
|
|
1157
|
-
// Try to read package.json from doc repo (check both source dir and parent)
|
|
1158
|
-
let docPackage = null;
|
|
1159
|
-
const sourceDir = resolve(_source);
|
|
1160
|
-
const packagePaths = [
|
|
1161
|
-
join(sourceDir, 'package.json'), // In source dir itself
|
|
1162
|
-
join(sourceDir, '..', 'package.json'), // One level up (if docs is a subfolder)
|
|
1163
|
-
];
|
|
1164
|
-
|
|
1165
|
-
for (const packagePath of packagePaths) {
|
|
1166
|
-
try {
|
|
1167
|
-
if (existsSync(packagePath)) {
|
|
1168
|
-
const packageJson = await readFile(packagePath, 'utf8');
|
|
1169
|
-
docPackage = JSON.parse(packageJson);
|
|
1170
|
-
console.log(`Found doc package.json at ${packagePath}`);
|
|
1171
|
-
break;
|
|
1172
|
-
}
|
|
1173
|
-
} catch (e) {
|
|
1174
|
-
// Continue to next path
|
|
1175
|
-
}
|
|
1176
|
-
}
|
|
1177
|
-
|
|
1178
|
-
// Get ursa version from ursa's own package.json
|
|
1179
|
-
// Use import.meta.url to find the package.json relative to this file
|
|
1180
|
-
let ursaVersion = 'unknown';
|
|
1181
|
-
try {
|
|
1182
|
-
// From src/jobs/generate.js, go up to package root
|
|
1183
|
-
const currentFileUrl = new URL(import.meta.url);
|
|
1184
|
-
const currentDir = dirname(currentFileUrl.pathname);
|
|
1185
|
-
const ursaPackagePath = resolve(currentDir, '..', '..', 'package.json');
|
|
1186
|
-
|
|
1187
|
-
if (existsSync(ursaPackagePath)) {
|
|
1188
|
-
const ursaPackageJson = await readFile(ursaPackagePath, 'utf8');
|
|
1189
|
-
const ursaPackage = JSON.parse(ursaPackageJson);
|
|
1190
|
-
ursaVersion = ursaPackage.version;
|
|
1191
|
-
console.log(`Found ursa package.json at ${ursaPackagePath}, version: ${ursaVersion}`);
|
|
1192
|
-
}
|
|
1193
|
-
} catch (e) {
|
|
1194
|
-
console.error(`Error reading ursa package.json: ${e.message}`);
|
|
1195
|
-
}
|
|
1196
|
-
|
|
1197
|
-
// Build meta line: version, build id, timestamp, "generated by ursa"
|
|
1198
|
-
const metaParts = [];
|
|
1199
|
-
if (docPackage?.version) {
|
|
1200
|
-
metaParts.push(`v${docPackage.version}`);
|
|
1201
|
-
}
|
|
1202
|
-
metaParts.push(`build ${buildId}`);
|
|
1203
|
-
|
|
1204
|
-
// Full date/time in a readable format
|
|
1205
|
-
const now = new Date();
|
|
1206
|
-
const timestamp = now.toISOString().replace('T', ' ').replace(/\.\d{3}Z$/, ' UTC');
|
|
1207
|
-
metaParts.push(timestamp);
|
|
1208
|
-
|
|
1209
|
-
metaParts.push(`Generated by <a href="https://www.npmjs.com/package/@kenjura/ursa">ursa</a> v${ursaVersion}`);
|
|
1210
|
-
|
|
1211
|
-
footerParts.push(`<div class="footer-meta">${metaParts.join(' • ')}</div>`);
|
|
1212
|
-
|
|
1213
|
-
// Copyright line from doc package.json
|
|
1214
|
-
if (docPackage?.copyright) {
|
|
1215
|
-
footerParts.push(`<div class="footer-copyright">${docPackage.copyright}</div>`);
|
|
1216
|
-
} else if (docPackage?.author) {
|
|
1217
|
-
const year = new Date().getFullYear();
|
|
1218
|
-
const author = typeof docPackage.author === 'string' ? docPackage.author : docPackage.author.name;
|
|
1219
|
-
if (author) {
|
|
1220
|
-
footerParts.push(`<div class="footer-copyright">© ${year} ${author}</div>`);
|
|
1221
|
-
}
|
|
1222
|
-
}
|
|
1223
|
-
|
|
1224
|
-
// Try to get git short hash of doc repo (as HTML comment)
|
|
1225
|
-
try {
|
|
1226
|
-
const { execSync } = await import('child_process');
|
|
1227
|
-
const gitHash = execSync('git rev-parse --short HEAD', {
|
|
1228
|
-
cwd: resolve(_source),
|
|
1229
|
-
encoding: 'utf8',
|
|
1230
|
-
stdio: ['pipe', 'pipe', 'pipe']
|
|
1231
|
-
}).trim();
|
|
1232
|
-
if (gitHash) {
|
|
1233
|
-
footerParts.push(`<!-- git: ${gitHash} -->`);
|
|
1234
|
-
}
|
|
1235
|
-
} catch (e) {
|
|
1236
|
-
// Not a git repo or git not available - silently skip
|
|
1237
|
-
}
|
|
1238
|
-
|
|
1239
|
-
return footerParts.join('\n');
|
|
1240
749
|
}
|