@kenjura/ursa 0.10.0 → 0.33.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +162 -0
- package/README.md +182 -19
- package/bin/ursa.js +208 -0
- package/lib/index.js +7 -2
- package/meta/character-sheet-template.html +2 -0
- package/meta/default-template.html +29 -5
- package/meta/default.css +451 -115
- package/meta/menu.js +387 -0
- package/meta/search.js +208 -0
- package/meta/sectionify.js +36 -0
- package/meta/sticky.js +73 -0
- package/meta/toc-generator.js +124 -0
- package/meta/toc.js +93 -0
- package/package.json +25 -4
- package/src/helper/WikiImage.js +138 -0
- package/src/helper/automenu.js +215 -55
- package/src/helper/contentHash.js +71 -0
- package/src/helper/findStyleCss.js +26 -0
- package/src/helper/linkValidator.js +246 -0
- package/src/helper/metadataExtractor.js +19 -8
- package/src/helper/whitelistFilter.js +66 -0
- package/src/helper/wikitextHelper.js +6 -3
- package/src/jobs/generate.js +353 -112
- package/src/serve.js +138 -37
- package/.nvmrc +0 -1
- package/.vscode/launch.json +0 -20
- package/TODO.md +0 -16
- package/nodemon.json +0 -16
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { readFile } from 'fs/promises';
|
|
2
|
+
import { resolve, relative } from 'path';
|
|
3
|
+
import { existsSync } from 'fs';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Creates a filter function based on a whitelist file
|
|
7
|
+
* @param {string} whitelistPath - Path to the whitelist file
|
|
8
|
+
* @param {string} sourceRoot - Root source directory for relative path matching
|
|
9
|
+
* @returns {Function} Filter function that returns true if file should be included
|
|
10
|
+
*/
|
|
11
|
+
export async function createWhitelistFilter(whitelistPath, sourceRoot) {
|
|
12
|
+
if (!whitelistPath || !existsSync(whitelistPath)) {
|
|
13
|
+
return () => true; // No whitelist = include all files
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
try {
|
|
17
|
+
const whitelistContent = await readFile(whitelistPath, 'utf8');
|
|
18
|
+
const patterns = whitelistContent
|
|
19
|
+
.split('\n')
|
|
20
|
+
.map(line => line.trim())
|
|
21
|
+
.filter(line => line && !line.startsWith('#')); // Remove empty lines and comments
|
|
22
|
+
|
|
23
|
+
if (patterns.length === 0) {
|
|
24
|
+
return () => true; // Empty whitelist = include all files
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
return (filePath) => {
|
|
28
|
+
const absolutePath = resolve(filePath);
|
|
29
|
+
const relativePath = relative(sourceRoot, absolutePath);
|
|
30
|
+
|
|
31
|
+
return patterns.some(pattern => {
|
|
32
|
+
// Full absolute path match
|
|
33
|
+
if (pattern.startsWith('/') && absolutePath === pattern) {
|
|
34
|
+
return true;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Relative path match (from source root)
|
|
38
|
+
if (relativePath === pattern || relativePath.includes(pattern)) {
|
|
39
|
+
return true;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Directory match (pattern ends with /)
|
|
43
|
+
if (pattern.endsWith('/')) {
|
|
44
|
+
const dirPattern = pattern.slice(0, -1);
|
|
45
|
+
return relativePath.startsWith(dirPattern + '/') || relativePath === dirPattern;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Filename match
|
|
49
|
+
const fileName = absolutePath.split('/').pop();
|
|
50
|
+
if (fileName === pattern) {
|
|
51
|
+
return true;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Partial path match (anywhere in the path)
|
|
55
|
+
if (absolutePath.includes(pattern) || relativePath.includes(pattern)) {
|
|
56
|
+
return true;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return false;
|
|
60
|
+
});
|
|
61
|
+
};
|
|
62
|
+
} catch (error) {
|
|
63
|
+
console.warn(`Warning: Could not read whitelist file ${whitelistPath}:`, error.message);
|
|
64
|
+
return () => true; // Fallback to include all files
|
|
65
|
+
}
|
|
66
|
+
}
|
|
@@ -1,11 +1,14 @@
|
|
|
1
|
+
import { getImageTag } from './WikiImage.js';
|
|
2
|
+
|
|
1
3
|
let instance = {};
|
|
2
4
|
|
|
3
5
|
export function wikiToHtml({ wikitext, articleName, args } = {}) {
|
|
4
6
|
if (!args) args = { db: "noDB", noSection: true, noTOC: true };
|
|
5
7
|
if (!wikitext) return "nothing to render";
|
|
6
8
|
|
|
7
|
-
const
|
|
8
|
-
const
|
|
9
|
+
const db = args.db || "noDB";
|
|
10
|
+
const linkbase = ("/" + db + "/").replace(/\/\//g, "/");
|
|
11
|
+
const imageroot = ("/" + db + "/img/").replace(/\/\//g, "/");
|
|
9
12
|
|
|
10
13
|
const allArticles = args.allArticles || [];
|
|
11
14
|
|
|
@@ -330,7 +333,7 @@ export function wikiToHtml({ wikitext, articleName, args } = {}) {
|
|
|
330
333
|
case "IFRAME":
|
|
331
334
|
return '<iframe src="' + articleName + '"' + getArg(0) + "></iframe>";
|
|
332
335
|
case "IMAGE":
|
|
333
|
-
return
|
|
336
|
+
return getImageTag({
|
|
334
337
|
name: articleName,
|
|
335
338
|
args: args,
|
|
336
339
|
imgUrl: imageroot + articleName,
|
package/src/jobs/generate.js
CHANGED
|
@@ -8,7 +8,51 @@ import {
|
|
|
8
8
|
extractMetadata,
|
|
9
9
|
extractRawMetadata,
|
|
10
10
|
} from "../helper/metadataExtractor.js";
|
|
11
|
+
import {
|
|
12
|
+
hashContent,
|
|
13
|
+
loadHashCache,
|
|
14
|
+
saveHashCache,
|
|
15
|
+
needsRegeneration,
|
|
16
|
+
updateHash,
|
|
17
|
+
} from "../helper/contentHash.js";
|
|
18
|
+
import {
|
|
19
|
+
buildValidPaths,
|
|
20
|
+
markInactiveLinks,
|
|
21
|
+
} from "../helper/linkValidator.js";
|
|
22
|
+
|
|
23
|
+
// Helper function to build search index from processed files
|
|
24
|
+
function buildSearchIndex(jsonCache, source, output) {
|
|
25
|
+
const searchIndex = [];
|
|
26
|
+
|
|
27
|
+
for (const [filePath, jsonObject] of jsonCache.entries()) {
|
|
28
|
+
// Generate URL path relative to output
|
|
29
|
+
const relativePath = filePath.replace(source, '').replace(/\.(md|txt|yml)$/, '.html');
|
|
30
|
+
const url = relativePath.startsWith('/') ? relativePath : '/' + relativePath;
|
|
31
|
+
|
|
32
|
+
// Extract text content from body (strip HTML tags for search)
|
|
33
|
+
const textContent = jsonObject.bodyHtml.replace(/<[^>]*>/g, ' ').replace(/\s+/g, ' ').trim();
|
|
34
|
+
const excerpt = textContent.substring(0, 200); // First 200 chars for preview
|
|
35
|
+
|
|
36
|
+
searchIndex.push({
|
|
37
|
+
title: toTitleCase(jsonObject.name),
|
|
38
|
+
path: relativePath,
|
|
39
|
+
url: url,
|
|
40
|
+
content: excerpt
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
return searchIndex;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Helper function to convert filename to title case
|
|
48
|
+
function toTitleCase(filename) {
|
|
49
|
+
return filename
|
|
50
|
+
.split(/[-_\s]+/) // Split on hyphens, underscores, and spaces
|
|
51
|
+
.map(word => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
|
|
52
|
+
.join(' ');
|
|
53
|
+
}
|
|
11
54
|
import { renderFile } from "../helper/fileRenderer.js";
|
|
55
|
+
import { findStyleCss } from "../helper/findStyleCss.js";
|
|
12
56
|
import { copy as copyDir, emptyDir, outputFile } from "fs-extra";
|
|
13
57
|
import { basename, dirname, extname, join, parse, resolve } from "path";
|
|
14
58
|
import { URL } from "url";
|
|
@@ -16,6 +60,8 @@ import o2x from "object-to-xml";
|
|
|
16
60
|
import { existsSync } from "fs";
|
|
17
61
|
import { fileExists } from "../helper/fileExists.js";
|
|
18
62
|
|
|
63
|
+
import { createWhitelistFilter } from "../helper/whitelistFilter.js";
|
|
64
|
+
|
|
19
65
|
const DEFAULT_TEMPLATE_NAME =
|
|
20
66
|
process.env.DEFAULT_TEMPLATE_NAME ?? "default-template";
|
|
21
67
|
|
|
@@ -23,18 +69,30 @@ export async function generate({
|
|
|
23
69
|
_source = join(process.cwd(), "."),
|
|
24
70
|
_meta = join(process.cwd(), "meta"),
|
|
25
71
|
_output = join(process.cwd(), "build"),
|
|
72
|
+
_whitelist = null,
|
|
73
|
+
_incremental = false, // Legacy flag, now ignored (always incremental)
|
|
74
|
+
_clean = false, // When true, ignore cache and regenerate all files
|
|
26
75
|
} = {}) {
|
|
27
|
-
console.log({ _source, _meta, _output });
|
|
76
|
+
console.log({ _source, _meta, _output, _whitelist, _clean });
|
|
28
77
|
const source = resolve(_source) + "/";
|
|
29
78
|
const meta = resolve(_meta);
|
|
30
79
|
const output = resolve(_output) + "/";
|
|
31
80
|
console.log({ source, meta, output });
|
|
32
81
|
|
|
33
82
|
const allSourceFilenamesUnfiltered = await recurse(source, [() => false]);
|
|
83
|
+
|
|
84
|
+
// Apply include filter (existing functionality)
|
|
34
85
|
const includeFilter = process.env.INCLUDE_FILTER
|
|
35
86
|
? (fileName) => fileName.match(process.env.INCLUDE_FILTER)
|
|
36
87
|
: Boolean;
|
|
37
|
-
|
|
88
|
+
let allSourceFilenames = allSourceFilenamesUnfiltered.filter(includeFilter);
|
|
89
|
+
|
|
90
|
+
// Apply whitelist filter if specified
|
|
91
|
+
if (_whitelist) {
|
|
92
|
+
const whitelistFilter = await createWhitelistFilter(_whitelist, source);
|
|
93
|
+
allSourceFilenames = allSourceFilenames.filter(whitelistFilter);
|
|
94
|
+
console.log(`Whitelist applied: ${allSourceFilenames.length} files after filtering`);
|
|
95
|
+
}
|
|
38
96
|
// console.log(allSourceFilenames);
|
|
39
97
|
|
|
40
98
|
// if (source.substr(-1) !== "/") source += "/"; // warning: might not work in windows
|
|
@@ -43,138 +101,263 @@ export async function generate({
|
|
|
43
101
|
const templates = await getTemplates(meta); // todo: error if no default template
|
|
44
102
|
// console.log({ templates });
|
|
45
103
|
|
|
46
|
-
const menu = await getMenu(allSourceFilenames, source);
|
|
47
|
-
|
|
48
|
-
// clean build directory
|
|
49
|
-
await emptyDir(output);
|
|
50
|
-
|
|
51
|
-
// create public folder
|
|
52
|
-
const pub = join(output, "public");
|
|
53
|
-
await mkdir(pub);
|
|
54
|
-
await copyDir(meta, pub);
|
|
55
|
-
|
|
56
104
|
// read all articles, process them, copy them to build
|
|
57
105
|
const articleExtensions = /\.(md|txt|yml)/;
|
|
106
|
+
const hiddenOrSystemDirs = /[\/\\]\.(?!\.)|[\/\\]node_modules[\/\\]/; // Matches hidden folders (starting with .) or node_modules
|
|
58
107
|
const allSourceFilenamesThatAreArticles = allSourceFilenames.filter(
|
|
59
|
-
(filename) => filename.match(articleExtensions)
|
|
108
|
+
(filename) => filename.match(articleExtensions) && !filename.match(hiddenOrSystemDirs)
|
|
60
109
|
);
|
|
61
|
-
const allSourceFilenamesThatAreDirectories = await filterAsync(
|
|
110
|
+
const allSourceFilenamesThatAreDirectories = (await filterAsync(
|
|
62
111
|
allSourceFilenames,
|
|
63
112
|
(filename) => isDirectory(filename)
|
|
64
|
-
);
|
|
113
|
+
)).filter((filename) => !filename.match(hiddenOrSystemDirs));
|
|
65
114
|
|
|
66
|
-
//
|
|
67
|
-
const
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
115
|
+
// Build set of valid internal paths for link validation (must be before menu)
|
|
116
|
+
const validPaths = buildValidPaths(allSourceFilenamesThatAreArticles, source);
|
|
117
|
+
console.log(`Built ${validPaths.size} valid paths for link validation`);
|
|
118
|
+
|
|
119
|
+
const menu = await getMenu(allSourceFilenames, source, validPaths);
|
|
120
|
+
|
|
121
|
+
// Load content hash cache from .ursa folder in source directory
|
|
122
|
+
let hashCache = new Map();
|
|
123
|
+
if (!_clean) {
|
|
124
|
+
hashCache = await loadHashCache(source);
|
|
125
|
+
console.log(`Loaded ${hashCache.size} cached content hashes from .ursa folder`);
|
|
126
|
+
} else {
|
|
127
|
+
console.log(`Clean build: ignoring cached hashes`);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// create public folder
|
|
131
|
+
const pub = join(output, "public");
|
|
132
|
+
await mkdir(pub, { recursive: true });
|
|
133
|
+
await copyDir(meta, pub);
|
|
71
134
|
|
|
135
|
+
// Track errors for error report
|
|
136
|
+
const errors = [];
|
|
137
|
+
|
|
138
|
+
// First pass: collect search index data
|
|
139
|
+
const searchIndex = [];
|
|
140
|
+
const jsonCache = new Map();
|
|
141
|
+
|
|
142
|
+
// Collect basic data for search index
|
|
143
|
+
for (const file of allSourceFilenamesThatAreArticles) {
|
|
144
|
+
try {
|
|
72
145
|
const rawBody = await readFile(file, "utf8");
|
|
73
146
|
const type = parse(file).ext;
|
|
74
|
-
const meta = extractMetadata(rawBody);
|
|
75
|
-
const rawMeta = extractRawMetadata(rawBody);
|
|
76
|
-
const bodyLessMeta = rawBody.replace(rawMeta, "");
|
|
77
|
-
const transformedMetadata = await getTransformedMetadata(
|
|
78
|
-
dirname(file),
|
|
79
|
-
meta
|
|
80
|
-
);
|
|
81
147
|
const ext = extname(file);
|
|
82
148
|
const base = basename(file, ext);
|
|
83
149
|
const dir = addTrailingSlash(dirname(file)).replace(source, "");
|
|
150
|
+
|
|
151
|
+
// Generate title from filename (in title case)
|
|
152
|
+
const title = toTitleCase(base);
|
|
153
|
+
|
|
154
|
+
// Generate URL path relative to output
|
|
155
|
+
const relativePath = file.replace(source, '').replace(/\.(md|txt|yml)$/, '.html');
|
|
156
|
+
const url = relativePath.startsWith('/') ? relativePath : '/' + relativePath;
|
|
157
|
+
|
|
158
|
+
// Basic content processing for search (without full rendering)
|
|
84
159
|
const body = renderFile({
|
|
85
160
|
fileContents: rawBody,
|
|
86
161
|
type,
|
|
87
162
|
dirname: dir,
|
|
88
163
|
basename: base,
|
|
89
164
|
});
|
|
165
|
+
|
|
166
|
+
// Extract text content from body (strip HTML tags for search)
|
|
167
|
+
const textContent = body && body.replace && body.replace(/<[^>]*>/g, ' ').replace(/\s+/g, ' ').trim() || 'body is undefined for some reason'
|
|
168
|
+
const excerpt = textContent.substring(0, 200); // First 200 chars for preview
|
|
169
|
+
|
|
170
|
+
searchIndex.push({
|
|
171
|
+
title: title,
|
|
172
|
+
path: relativePath,
|
|
173
|
+
url: url,
|
|
174
|
+
content: excerpt
|
|
175
|
+
});
|
|
176
|
+
} catch (e) {
|
|
177
|
+
console.error(`Error processing ${file} (first pass): ${e.message}`);
|
|
178
|
+
errors.push({ file, phase: 'search-index', error: e });
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
console.log(`Built search index with ${searchIndex.length} entries`);
|
|
183
|
+
|
|
184
|
+
// Track files that were regenerated (for incremental mode stats)
|
|
185
|
+
let regeneratedCount = 0;
|
|
186
|
+
let skippedCount = 0;
|
|
90
187
|
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
.
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
188
|
+
// Second pass: process individual articles with search data available
|
|
189
|
+
await Promise.all(
|
|
190
|
+
allSourceFilenamesThatAreArticles.map(async (file) => {
|
|
191
|
+
try {
|
|
192
|
+
const rawBody = await readFile(file, "utf8");
|
|
193
|
+
|
|
194
|
+
// Skip files that haven't changed (unless --clean flag is set)
|
|
195
|
+
if (!_clean && !needsRegeneration(file, rawBody, hashCache)) {
|
|
196
|
+
skippedCount++;
|
|
197
|
+
return; // Skip this file
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
console.log(`processing article ${file}`);
|
|
201
|
+
regeneratedCount++;
|
|
202
|
+
|
|
203
|
+
const type = parse(file).ext;
|
|
204
|
+
const meta = extractMetadata(rawBody);
|
|
205
|
+
const rawMeta = extractRawMetadata(rawBody);
|
|
206
|
+
const bodyLessMeta = rawMeta ? rawBody.replace(rawMeta, "") : rawBody;
|
|
207
|
+
const transformedMetadata = await getTransformedMetadata(
|
|
208
|
+
dirname(file),
|
|
209
|
+
meta
|
|
210
|
+
);
|
|
211
|
+
const ext = extname(file);
|
|
212
|
+
const base = basename(file, ext);
|
|
213
|
+
const dir = addTrailingSlash(dirname(file)).replace(source, "");
|
|
214
|
+
|
|
215
|
+
// Calculate the document's URL path (e.g., "/character/index.html")
|
|
216
|
+
const docUrlPath = '/' + dir + base + '.html';
|
|
217
|
+
|
|
218
|
+
// Generate title from filename (in title case)
|
|
219
|
+
const title = toTitleCase(base);
|
|
220
|
+
|
|
221
|
+
const body = renderFile({
|
|
222
|
+
fileContents: rawBody,
|
|
223
|
+
type,
|
|
224
|
+
dirname: dir,
|
|
225
|
+
basename: base,
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
// Find nearest style.css or _style.css up the tree
|
|
229
|
+
let embeddedStyle = "";
|
|
230
|
+
try {
|
|
231
|
+
const css = await findStyleCss(resolve(_source, dir));
|
|
232
|
+
if (css) {
|
|
233
|
+
embeddedStyle = css;
|
|
234
|
+
}
|
|
235
|
+
} catch (e) {
|
|
236
|
+
// ignore
|
|
237
|
+
console.error(e);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
const requestedTemplateName = meta && meta.template;
|
|
241
|
+
const template =
|
|
242
|
+
templates[requestedTemplateName] || templates[DEFAULT_TEMPLATE_NAME];
|
|
243
|
+
|
|
244
|
+
if (!template) {
|
|
245
|
+
throw new Error(`Template not found. Requested: "${requestedTemplateName || DEFAULT_TEMPLATE_NAME}". Available templates: ${Object.keys(templates).join(', ') || 'none'}`);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Insert embeddedStyle just before </head> if present, else at top
|
|
249
|
+
let finalHtml = template
|
|
250
|
+
.replace("${title}", title)
|
|
251
|
+
.replace("${menu}", menu)
|
|
252
|
+
.replace("${meta}", JSON.stringify(meta))
|
|
253
|
+
.replace("${transformedMetadata}", transformedMetadata)
|
|
254
|
+
.replace("${body}", body)
|
|
255
|
+
.replace("${embeddedStyle}", embeddedStyle)
|
|
256
|
+
.replace("${searchIndex}", JSON.stringify(searchIndex));
|
|
257
|
+
|
|
258
|
+
// Resolve links and mark broken internal links as inactive (debug mode on)
|
|
259
|
+
// Pass docUrlPath so relative links can be resolved correctly
|
|
260
|
+
finalHtml = markInactiveLinks(finalHtml, validPaths, docUrlPath, false);
|
|
261
|
+
|
|
262
|
+
const outputFilename = file
|
|
263
|
+
.replace(source, output)
|
|
264
|
+
.replace(parse(file).ext, ".html");
|
|
265
|
+
|
|
266
|
+
console.log(`writing article to ${outputFilename}`);
|
|
267
|
+
|
|
268
|
+
await outputFile(outputFilename, finalHtml);
|
|
269
|
+
|
|
270
|
+
// json
|
|
271
|
+
|
|
272
|
+
const jsonOutputFilename = outputFilename.replace(".html", ".json");
|
|
273
|
+
const url = '/' + outputFilename.replace(output, '');
|
|
274
|
+
const jsonObject = {
|
|
275
|
+
name: base,
|
|
276
|
+
url,
|
|
277
|
+
contents: rawBody,
|
|
278
|
+
// bodyLessMeta: bodyLessMeta,
|
|
279
|
+
bodyHtml: body,
|
|
280
|
+
metadata: meta,
|
|
281
|
+
transformedMetadata,
|
|
282
|
+
// html: finalHtml,
|
|
283
|
+
};
|
|
284
|
+
jsonCache.set(file, jsonObject);
|
|
285
|
+
const json = JSON.stringify(jsonObject);
|
|
286
|
+
console.log(`writing article to ${jsonOutputFilename}`);
|
|
287
|
+
await outputFile(jsonOutputFilename, json);
|
|
288
|
+
|
|
289
|
+
// xml
|
|
290
|
+
|
|
291
|
+
const xmlOutputFilename = outputFilename.replace(".html", ".xml");
|
|
292
|
+
const xml = `<article>${o2x(jsonObject)}</article>`;
|
|
293
|
+
await outputFile(xmlOutputFilename, xml);
|
|
294
|
+
|
|
295
|
+
// Update the content hash for this file
|
|
296
|
+
updateHash(file, rawBody, hashCache);
|
|
297
|
+
} catch (e) {
|
|
298
|
+
console.error(`Error processing ${file} (second pass): ${e.message}`);
|
|
299
|
+
errors.push({ file, phase: 'article-generation', error: e });
|
|
300
|
+
}
|
|
132
301
|
})
|
|
133
302
|
);
|
|
134
303
|
|
|
304
|
+
// Log build stats
|
|
305
|
+
console.log(`Build: ${regeneratedCount} regenerated, ${skippedCount} unchanged`);
|
|
306
|
+
|
|
135
307
|
console.log(jsonCache.keys());
|
|
308
|
+
|
|
136
309
|
// process directory indices
|
|
137
310
|
await Promise.all(
|
|
138
311
|
allSourceFilenamesThatAreDirectories.map(async (dir) => {
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
const pathsInThisDirectory = allSourceFilenames.filter((filename) =>
|
|
142
|
-
filename.match(new RegExp(`${dir}.+`))
|
|
143
|
-
);
|
|
144
|
-
|
|
145
|
-
const jsonObjects = pathsInThisDirectory
|
|
146
|
-
.map((path) => {
|
|
147
|
-
const object = jsonCache.get(path);
|
|
148
|
-
return typeof object === "object" ? object : null;
|
|
149
|
-
})
|
|
150
|
-
.filter((a) => a);
|
|
312
|
+
try {
|
|
313
|
+
console.log(`processing directory ${dir}`);
|
|
151
314
|
|
|
152
|
-
|
|
315
|
+
const pathsInThisDirectory = allSourceFilenames.filter((filename) =>
|
|
316
|
+
filename.match(new RegExp(`${dir}.+`))
|
|
317
|
+
);
|
|
153
318
|
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
console.log(`writing directory index to ${outputFilename}`);
|
|
157
|
-
await outputFile(outputFilename, json);
|
|
158
|
-
|
|
159
|
-
// html
|
|
160
|
-
const htmlOutputFilename = dir.replace(source, output) + ".html";
|
|
161
|
-
const indexAlreadyExists = fileExists(htmlOutputFilename);
|
|
162
|
-
if (!indexAlreadyExists) {
|
|
163
|
-
const template = templates["default-template"]; // TODO: figure out a way to specify template for a directory index
|
|
164
|
-
const indexHtml = `<ul>${pathsInThisDirectory
|
|
319
|
+
const jsonObjects = pathsInThisDirectory
|
|
165
320
|
.map((path) => {
|
|
166
|
-
const
|
|
167
|
-
|
|
168
|
-
.replace(parse(path).ext, ".html");
|
|
169
|
-
const name = basename(path, parse(path).ext);
|
|
170
|
-
return `<li><a href="${partialPath}">${name}</a></li>`;
|
|
321
|
+
const object = jsonCache.get(path);
|
|
322
|
+
return typeof object === "object" ? object : null;
|
|
171
323
|
})
|
|
172
|
-
.
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
324
|
+
.filter((a) => a);
|
|
325
|
+
|
|
326
|
+
const json = JSON.stringify(jsonObjects);
|
|
327
|
+
|
|
328
|
+
const outputFilename = dir.replace(source, output) + ".json";
|
|
329
|
+
|
|
330
|
+
console.log(`writing directory index to ${outputFilename}`);
|
|
331
|
+
await outputFile(outputFilename, json);
|
|
332
|
+
|
|
333
|
+
// html
|
|
334
|
+
const htmlOutputFilename = dir.replace(source, output) + ".html";
|
|
335
|
+
const indexAlreadyExists = fileExists(htmlOutputFilename);
|
|
336
|
+
if (!indexAlreadyExists) {
|
|
337
|
+
const template = templates["default-template"]; // TODO: figure out a way to specify template for a directory index
|
|
338
|
+
const indexHtml = `<ul>${pathsInThisDirectory
|
|
339
|
+
.map((path) => {
|
|
340
|
+
const partialPath = path
|
|
341
|
+
.replace(source, "")
|
|
342
|
+
.replace(parse(path).ext, ".html");
|
|
343
|
+
const name = basename(path, parse(path).ext);
|
|
344
|
+
return `<li><a href="${partialPath}">${name}</a></li>`;
|
|
345
|
+
})
|
|
346
|
+
.join("")}</ul>`;
|
|
347
|
+
const finalHtml = template
|
|
348
|
+
.replace("${menu}", menu)
|
|
349
|
+
.replace("${body}", indexHtml)
|
|
350
|
+
.replace("${searchIndex}", JSON.stringify(searchIndex))
|
|
351
|
+
.replace("${title}", "Index")
|
|
352
|
+
.replace("${meta}", "{}")
|
|
353
|
+
.replace("${transformedMetadata}", "")
|
|
354
|
+
.replace("${embeddedStyle}", "");
|
|
355
|
+
console.log(`writing directory index to ${htmlOutputFilename}`);
|
|
356
|
+
await outputFile(htmlOutputFilename, finalHtml);
|
|
357
|
+
}
|
|
358
|
+
} catch (e) {
|
|
359
|
+
console.error(`Error processing directory ${dir}: ${e.message}`);
|
|
360
|
+
errors.push({ file: dir, phase: 'directory-index', error: e });
|
|
178
361
|
}
|
|
179
362
|
})
|
|
180
363
|
);
|
|
@@ -186,15 +369,73 @@ export async function generate({
|
|
|
186
369
|
);
|
|
187
370
|
await Promise.all(
|
|
188
371
|
allSourceFilenamesThatAreImages.map(async (file) => {
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
372
|
+
try {
|
|
373
|
+
// For incremental mode, check if file has changed using file stat as a quick check
|
|
374
|
+
if (_incremental) {
|
|
375
|
+
const fileStat = await stat(file);
|
|
376
|
+
const statKey = `${file}:stat`;
|
|
377
|
+
const newStatHash = `${fileStat.size}:${fileStat.mtimeMs}`;
|
|
378
|
+
if (hashCache.get(statKey) === newStatHash) {
|
|
379
|
+
return; // Skip unchanged static file
|
|
380
|
+
}
|
|
381
|
+
hashCache.set(statKey, newStatHash);
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
console.log(`processing static file ${file}`);
|
|
385
|
+
|
|
386
|
+
const outputFilename = file.replace(source, output);
|
|
387
|
+
|
|
388
|
+
console.log(`writing static file to ${outputFilename}`);
|
|
389
|
+
|
|
390
|
+
await mkdir(dirname(outputFilename), { recursive: true });
|
|
391
|
+
return await copyFile(file, outputFilename);
|
|
392
|
+
} catch (e) {
|
|
393
|
+
console.error(`Error processing static file ${file}: ${e.message}`);
|
|
394
|
+
errors.push({ file, phase: 'static-file', error: e });
|
|
395
|
+
}
|
|
196
396
|
})
|
|
197
397
|
);
|
|
398
|
+
|
|
399
|
+
// Save the hash cache to .ursa folder in source directory
|
|
400
|
+
if (hashCache.size > 0) {
|
|
401
|
+
await saveHashCache(source, hashCache);
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
// Write error report if there were any errors
|
|
405
|
+
if (errors.length > 0) {
|
|
406
|
+
const errorReportPath = join(output, '_errors.log');
|
|
407
|
+
const failedFiles = errors.map(e => e.file);
|
|
408
|
+
|
|
409
|
+
let report = `URSA GENERATION ERROR REPORT\n`;
|
|
410
|
+
report += `Generated: ${new Date().toISOString()}\n`;
|
|
411
|
+
report += `Total errors: ${errors.length}\n\n`;
|
|
412
|
+
report += `${'='.repeat(60)}\n`;
|
|
413
|
+
report += `FAILED FILES:\n`;
|
|
414
|
+
report += `${'='.repeat(60)}\n\n`;
|
|
415
|
+
failedFiles.forEach(f => {
|
|
416
|
+
report += ` - ${f}\n`;
|
|
417
|
+
});
|
|
418
|
+
report += `\n${'='.repeat(60)}\n`;
|
|
419
|
+
report += `ERROR DETAILS:\n`;
|
|
420
|
+
report += `${'='.repeat(60)}\n\n`;
|
|
421
|
+
|
|
422
|
+
errors.forEach(({ file, phase, error }) => {
|
|
423
|
+
report += `${'─'.repeat(60)}\n`;
|
|
424
|
+
report += `File: ${file}\n`;
|
|
425
|
+
report += `Phase: ${phase}\n`;
|
|
426
|
+
report += `Error: ${error.message}\n`;
|
|
427
|
+
if (error.stack) {
|
|
428
|
+
report += `Stack:\n${error.stack}\n`;
|
|
429
|
+
}
|
|
430
|
+
report += `\n`;
|
|
431
|
+
});
|
|
432
|
+
|
|
433
|
+
await outputFile(errorReportPath, report);
|
|
434
|
+
console.log(`\n⚠️ ${errors.length} error(s) occurred during generation.`);
|
|
435
|
+
console.log(` Error report written to: ${errorReportPath}\n`);
|
|
436
|
+
} else {
|
|
437
|
+
console.log(`\n✅ Generation complete with no errors.\n`);
|
|
438
|
+
}
|
|
198
439
|
}
|
|
199
440
|
|
|
200
441
|
/**
|
|
@@ -222,10 +463,10 @@ async function getTemplates(meta) {
|
|
|
222
463
|
return templates;
|
|
223
464
|
}
|
|
224
465
|
|
|
225
|
-
async function getMenu(allSourceFilenames, source) {
|
|
466
|
+
async function getMenu(allSourceFilenames, source, validPaths) {
|
|
226
467
|
// todo: handle various incarnations of menu filename
|
|
227
468
|
|
|
228
|
-
const rawMenu = await getAutomenu(source);
|
|
469
|
+
const rawMenu = await getAutomenu(source, validPaths);
|
|
229
470
|
const menuBody = renderFile({ fileContents: rawMenu, type: ".md" });
|
|
230
471
|
return menuBody;
|
|
231
472
|
|