@rettangoli/sites 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +103 -0
- package/package.json +54 -0
- package/src/cli/build.js +166 -0
- package/src/cli/components/core/articlelist.html +19 -0
- package/src/cli/components/core/cta1.html +9 -0
- package/src/cli/components/core/features1.html +24 -0
- package/src/cli/components/core/hero1.html +4 -0
- package/src/cli/components/core/hero2.html +31 -0
- package/src/cli/components/core/sectionlist1.html +15 -0
- package/src/cli/components/core/spacer.html +1 -0
- package/src/cli/components/core/table1.html +2 -0
- package/src/cli/index.js +5 -0
- package/src/cli/templates/core/admin.html +21 -0
- package/src/cli/templates/core/agreementList.html +102 -0
- package/src/cli/templates/core/article.html +35 -0
- package/src/cli/templates/core/base.html +28 -0
- package/src/cli/templates/core/documentation.html +28 -0
- package/src/cli/templates/core/footer.html +34 -0
- package/src/cli/templates/core/htmlHeader.html +41 -0
- package/src/cli/templates/core/htmlHeaderTable.html +113 -0
- package/src/cli/templates/core/navbar.html +11 -0
- package/src/common.js +803 -0
- package/src/index.js +0 -0
- package/src/markdownItAsync.js +101 -0
package/src/common.js
ADDED
|
@@ -0,0 +1,803 @@
|
|
|
1
|
+
import {
|
|
2
|
+
readFile,
|
|
3
|
+
mkdir,
|
|
4
|
+
readdir,
|
|
5
|
+
writeFile,
|
|
6
|
+
copyFile,
|
|
7
|
+
rm,
|
|
8
|
+
} from "node:fs/promises";
|
|
9
|
+
import { readFileSync, readdirSync, existsSync } from "node:fs";
|
|
10
|
+
import { join } from "node:path";
|
|
11
|
+
|
|
12
|
+
import { minify } from "html-minifier-terser";
|
|
13
|
+
import yaml from "js-yaml";
|
|
14
|
+
import { Liquid } from "liquidjs";
|
|
15
|
+
import MarkdownIt from "./markdownItAsync.js";
|
|
16
|
+
import { codeToHtml } from "shiki";
|
|
17
|
+
|
|
18
|
+
// Try to get CleanCSS from html-minifier-terser's dependencies
|
|
19
|
+
import CleanCSS from "clean-css";
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Helper function to safely load YAML
|
|
23
|
+
* @param {string} content - YAML content to parse
|
|
24
|
+
* @param {object} defaultValue - Default value to return on error
|
|
25
|
+
* @returns {object} Parsed YAML object or default value
|
|
26
|
+
*/
|
|
27
|
+
export const safeYamlLoad = (content, defaultValue = {}) => {
|
|
28
|
+
try {
|
|
29
|
+
return yaml.load(content) || defaultValue;
|
|
30
|
+
} catch (error) {
|
|
31
|
+
console.error("Error parsing YAML:", error);
|
|
32
|
+
return defaultValue;
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Helper function to read a file safely
|
|
38
|
+
* @param {string} filePath - Path to the file
|
|
39
|
+
* @param {string} encoding - File encoding
|
|
40
|
+
* @returns {Promise<string>} File contents or empty string on error
|
|
41
|
+
*/
|
|
42
|
+
export const safeReadFile = async (filePath, encoding = "utf8") => {
|
|
43
|
+
try {
|
|
44
|
+
// @ts-ignore - Node.js fs types issue
|
|
45
|
+
return await readFile(filePath, encoding);
|
|
46
|
+
} catch (error) {
|
|
47
|
+
console.error(`Error reading file ${filePath}:`, error);
|
|
48
|
+
return "";
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
// Helper function to convert text to a URL-friendly ID
|
|
53
|
+
const generateSlug = (text) => {
|
|
54
|
+
return text
|
|
55
|
+
.toLowerCase()
|
|
56
|
+
.replace(/[^\w\s-]/g, "") // Remove special characters
|
|
57
|
+
.replace(/\s+/g, "-") // Replace spaces with hyphens
|
|
58
|
+
.trim();
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Extract frontmatter from content
|
|
63
|
+
* @param {string} content - Content with potential frontmatter
|
|
64
|
+
* @returns {object} Object with frontmatter and remaining content
|
|
65
|
+
*/
|
|
66
|
+
const extractFrontmatter = (content) => {
|
|
67
|
+
const frontmatterMatch = content.match(/^---\r?\n([\s\S]+?)\r?\n---\r?\n/m);
|
|
68
|
+
let frontmatter = {};
|
|
69
|
+
let contentWithoutFrontmatter = content;
|
|
70
|
+
|
|
71
|
+
if (frontmatterMatch) {
|
|
72
|
+
frontmatter = safeYamlLoad(frontmatterMatch[1]);
|
|
73
|
+
contentWithoutFrontmatter = content
|
|
74
|
+
.substring(frontmatterMatch[0].length)
|
|
75
|
+
.trim();
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
return { frontmatter, content: contentWithoutFrontmatter };
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
// Function to generate table of contents from markdown content
|
|
82
|
+
const generateTableOfContents = (content) => {
|
|
83
|
+
// Regular expression to match headings (# Heading1, ## Heading2, etc.)
|
|
84
|
+
const headingRegex = /^(#{1,4})\s+(.+)$/gm;
|
|
85
|
+
const matches = [...content.matchAll(headingRegex)];
|
|
86
|
+
|
|
87
|
+
// Root of the table of contents
|
|
88
|
+
const tableOfContents = [];
|
|
89
|
+
|
|
90
|
+
// Stack to keep track of the current path in the hierarchy
|
|
91
|
+
const stack = [{ level: 0, items: tableOfContents }];
|
|
92
|
+
|
|
93
|
+
for (const match of matches) {
|
|
94
|
+
const level = match[1].length; // Number of # symbols
|
|
95
|
+
const title = match[2].trim();
|
|
96
|
+
const id = generateSlug(title); // Generate ID from title
|
|
97
|
+
|
|
98
|
+
// Find the appropriate parent for this heading
|
|
99
|
+
while (stack[stack.length - 1].level >= level) {
|
|
100
|
+
stack.pop();
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Create new heading item with title and id
|
|
104
|
+
const newItem = { title, id, items: [] };
|
|
105
|
+
|
|
106
|
+
// Add to parent's items
|
|
107
|
+
stack[stack.length - 1].items.push(newItem);
|
|
108
|
+
|
|
109
|
+
// Push this item to stack so its children can be added to it
|
|
110
|
+
stack.push({ level, items: newItem.items });
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return tableOfContents;
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
// Helper to check if value is an object
|
|
117
|
+
export const isObject = (item) => {
|
|
118
|
+
return item && typeof item === "object" && !Array.isArray(item);
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
// Helper function for deep merging objects
|
|
122
|
+
export const deepMerge = (target, ...sources) => {
|
|
123
|
+
// Create a clone of the target to avoid mutation
|
|
124
|
+
const result = isObject(target) ? { ...target } : target;
|
|
125
|
+
|
|
126
|
+
if (!sources.length) return result;
|
|
127
|
+
const source = sources.shift();
|
|
128
|
+
|
|
129
|
+
if (isObject(result) && isObject(source)) {
|
|
130
|
+
for (const key in source) {
|
|
131
|
+
if (isObject(source[key])) {
|
|
132
|
+
// Create or use existing object (without mutation)
|
|
133
|
+
result[key] = result[key] || {};
|
|
134
|
+
// Recursively merge the nested object
|
|
135
|
+
result[key] = deepMerge(result[key], source[key]);
|
|
136
|
+
} else {
|
|
137
|
+
// Simple assignment for non-objects
|
|
138
|
+
result[key] = source[key];
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// Process any remaining sources recursively
|
|
144
|
+
return sources.length ? deepMerge(result, ...sources) : result;
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
export const createTemplateRenderer = (options = {}) => {
|
|
148
|
+
const { filters, templates } = options;
|
|
149
|
+
|
|
150
|
+
// Setup LiquidJS engine
|
|
151
|
+
const engine = new Liquid({
|
|
152
|
+
templates,
|
|
153
|
+
strictFilters: true,
|
|
154
|
+
cache: true,
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
Object.entries(filters).forEach(([key, value]) => {
|
|
158
|
+
engine.registerFilter(key, value);
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
return (template, data) => {
|
|
162
|
+
const tpl = engine.parse(template);
|
|
163
|
+
|
|
164
|
+
return engine.renderSync(tpl, data);
|
|
165
|
+
};
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
export const createFolderIfNotExists = async (folder) => {
|
|
169
|
+
try {
|
|
170
|
+
await mkdir(folder, { recursive: true });
|
|
171
|
+
console.log(`Created folder: ${folder}`);
|
|
172
|
+
} catch (error) {
|
|
173
|
+
console.error(`Error creating folder ${folder}:`, error);
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
// Helper function to generate URL from file path
|
|
178
|
+
const generateUrlFromPath = (basePath, filePath) => {
|
|
179
|
+
// Normalize basePath to ensure consistent handling
|
|
180
|
+
const normalizedBasePath = basePath.endsWith("/") ? basePath : basePath + "/";
|
|
181
|
+
|
|
182
|
+
// Calculate URL using path operations for robustness
|
|
183
|
+
let relativePath = "";
|
|
184
|
+
|
|
185
|
+
// Remove any reference to the basePath directory
|
|
186
|
+
if (filePath.includes(normalizedBasePath)) {
|
|
187
|
+
relativePath = filePath.split(normalizedBasePath)[1] || "";
|
|
188
|
+
} else if (filePath.includes(normalizedBasePath.substring(1))) {
|
|
189
|
+
// Handle case without leading dot or slash
|
|
190
|
+
relativePath = filePath.split(normalizedBasePath.substring(1))[1] || "";
|
|
191
|
+
} else if (filePath.includes(normalizedBasePath.replace("./", ""))) {
|
|
192
|
+
// Handle case without leading ./
|
|
193
|
+
relativePath =
|
|
194
|
+
filePath.split(normalizedBasePath.replace("./", ""))[1] || "";
|
|
195
|
+
} else {
|
|
196
|
+
relativePath = filePath;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// Process the path and ensure it starts with '/'
|
|
200
|
+
let url = relativePath
|
|
201
|
+
.replace(/\.md$/, "") // Remove .md extension
|
|
202
|
+
.replace(/\/index$/, ""); // Remove /index from end
|
|
203
|
+
|
|
204
|
+
// Ensure URL starts with '/'
|
|
205
|
+
if (!url.startsWith("/")) {
|
|
206
|
+
url = "/" + url;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// Handle root case
|
|
210
|
+
if (url === "") {
|
|
211
|
+
url = "/";
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
if (url === "/index") {
|
|
215
|
+
url = "/";
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// Ensure URL ends with '/' unless it's the root URL which already has it
|
|
219
|
+
if (url !== "/" && !url.endsWith("/")) {
|
|
220
|
+
url = url + "/";
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
return url;
|
|
224
|
+
};
|
|
225
|
+
|
|
226
|
+
/**
|
|
227
|
+
* Configure Markdown renderer with custom elements and styling
|
|
228
|
+
*/
|
|
229
|
+
export const configureMarkdown = ({ yamlComponentRenderer }) => {
|
|
230
|
+
const md = MarkdownIt({
|
|
231
|
+
async highlight(code, lang, attrs) {
|
|
232
|
+
if (attrs.includes("components")) {
|
|
233
|
+
try {
|
|
234
|
+
return yamlComponentRenderer(code);
|
|
235
|
+
} catch (error) {
|
|
236
|
+
console.error(error);
|
|
237
|
+
process.exit(1);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
if (attrs.includes("codePreview")) {
|
|
241
|
+
const formattedCode = await codeToHtml(code, {
|
|
242
|
+
lang,
|
|
243
|
+
theme: "slack-dark",
|
|
244
|
+
});
|
|
245
|
+
return `
|
|
246
|
+
<rtgl-view w="f" bw="xs" br="md">
|
|
247
|
+
<rtgl-view w="f" p="lg">
|
|
248
|
+
${code}
|
|
249
|
+
</rtgl-view>
|
|
250
|
+
<rtgl-view h="1" w="f" bgc="bo"></rtgl-view>
|
|
251
|
+
<rtgl-view w="f" d="h">
|
|
252
|
+
${formattedCode}
|
|
253
|
+
</rtgl-view>
|
|
254
|
+
</rtgl-view>`
|
|
255
|
+
;
|
|
256
|
+
}
|
|
257
|
+
return await codeToHtml(code, { lang, theme: "slack-dark" });
|
|
258
|
+
},
|
|
259
|
+
warnOnSyncRender: true,
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
// Header configuration
|
|
263
|
+
md.renderer.rules.heading_open = (tokens, idx, options, env, self) => {
|
|
264
|
+
const token = tokens[idx];
|
|
265
|
+
const level = token.markup.length;
|
|
266
|
+
const inlineToken = tokens[idx + 1];
|
|
267
|
+
const headingText = inlineToken.content;
|
|
268
|
+
const id = generateSlug(headingText);
|
|
269
|
+
|
|
270
|
+
// Map heading levels to size values
|
|
271
|
+
const sizes = { 1: "h1", 2: "h2", 3: "h3", 4: "h4" };
|
|
272
|
+
const size = sizes[level] || "md";
|
|
273
|
+
|
|
274
|
+
return `<rtgl-text id="${id}" mt="lg" s="${size}" mb="md"> <a href="#${id}" style="display: contents;">`;
|
|
275
|
+
};
|
|
276
|
+
|
|
277
|
+
md.renderer.rules.heading_close = () => "</a></rtgl-text>\n";
|
|
278
|
+
|
|
279
|
+
// Paragraph configuration
|
|
280
|
+
md.renderer.rules.paragraph_open = () => `<rtgl-text s="bl" mb="lg">`;
|
|
281
|
+
md.renderer.rules.paragraph_close = () => "</rtgl-text>\n";
|
|
282
|
+
|
|
283
|
+
// Table configuration
|
|
284
|
+
md.renderer.rules.table_open = () => '<rtgl-view w="f">\n<table>';
|
|
285
|
+
md.renderer.rules.table_close = () => "</table>\n</rtgl-view>";
|
|
286
|
+
|
|
287
|
+
// Link configuration - add target="_blank" to all external links
|
|
288
|
+
md.renderer.rules.link_open = (tokens, idx, options, env, self) => {
|
|
289
|
+
const token = tokens[idx];
|
|
290
|
+
const targetIndex = token.attrIndex("target");
|
|
291
|
+
const href =
|
|
292
|
+
(token.attrs && token.attrs.find((attr) => attr[0] === "href")?.[1]) ||
|
|
293
|
+
"";
|
|
294
|
+
const isExternal = href.startsWith("http") || href.startsWith("//");
|
|
295
|
+
|
|
296
|
+
// If this is an external link or already has target="_blank"
|
|
297
|
+
if (isExternal || targetIndex >= 0) {
|
|
298
|
+
if (targetIndex < 0) {
|
|
299
|
+
token.attrPush(["target", "_blank"]);
|
|
300
|
+
}
|
|
301
|
+
token.attrPush(["rel", "noreferrer"]);
|
|
302
|
+
|
|
303
|
+
// Find the next text token to use for the aria-label
|
|
304
|
+
let nextIdx = idx + 1;
|
|
305
|
+
let textContent = "";
|
|
306
|
+
while (nextIdx < tokens.length && tokens[nextIdx].type !== "link_close") {
|
|
307
|
+
if (tokens[nextIdx].type === "text") {
|
|
308
|
+
textContent += tokens[nextIdx].content;
|
|
309
|
+
}
|
|
310
|
+
nextIdx++;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// Add aria-label for external links
|
|
314
|
+
if (textContent.trim() && token.attrIndex("aria-label") < 0) {
|
|
315
|
+
token.attrPush([
|
|
316
|
+
"aria-label",
|
|
317
|
+
`${textContent.trim()} (opens in new tab)`,
|
|
318
|
+
]);
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
return self.renderToken(tokens, idx, options);
|
|
323
|
+
};
|
|
324
|
+
|
|
325
|
+
return md;
|
|
326
|
+
};
|
|
327
|
+
|
|
328
|
+
/**
|
|
329
|
+
* Loads files from a directory and returns their contents as an object (synchronous version)
|
|
330
|
+
* Supports recursive loading of nested directories when recursive=true
|
|
331
|
+
*
|
|
332
|
+
* @param {Object} options - Configuration options
|
|
333
|
+
* @param {string} options.path - Directory path to load files from
|
|
334
|
+
* @param {string} options.name - Name of the collection for logging purposes
|
|
335
|
+
* @param {boolean} options.isYaml - Whether to parse files as YAML
|
|
336
|
+
* @param {boolean} [options.recursive=false] - Whether to recursively load nested directories
|
|
337
|
+
* @param {boolean} [options.keepExtension=false] - Whether to keep file extensions in the keys
|
|
338
|
+
* @returns {Object} Object with path-based keys (e.g. 'core/t1' or 'core/t1.html' if keepExtension is true)
|
|
339
|
+
*
|
|
340
|
+
* @example
|
|
341
|
+
* // Load all HTML files from /templates directory
|
|
342
|
+
* const templates = loadItemsSync({
|
|
343
|
+
* path: './templates',
|
|
344
|
+
* name: 'templates',
|
|
345
|
+
* isYaml: false
|
|
346
|
+
* });
|
|
347
|
+
*
|
|
348
|
+
* @example
|
|
349
|
+
* // Load all YAML files from /data directory recursively
|
|
350
|
+
* const data = loadItemsSync({
|
|
351
|
+
* path: './data',
|
|
352
|
+
* name: 'data files',
|
|
353
|
+
* isYaml: true,
|
|
354
|
+
* recursive: true,
|
|
355
|
+
* keepExtension: true
|
|
356
|
+
* });
|
|
357
|
+
* // Result with keepExtension=false: { 'core/t1': "<html content>" }
|
|
358
|
+
* // Result with keepExtension=true: { 'core/t1.html': "<html content>" }
|
|
359
|
+
*/
|
|
360
|
+
export const loadItems = ({
|
|
361
|
+
path,
|
|
362
|
+
name,
|
|
363
|
+
isYaml,
|
|
364
|
+
recursive = true,
|
|
365
|
+
keepExtension = false,
|
|
366
|
+
}) => {
|
|
367
|
+
const result = {};
|
|
368
|
+
|
|
369
|
+
// Normalize path to remove trailing slash
|
|
370
|
+
const basePath = path.endsWith("/") ? path.slice(0, -1) : path;
|
|
371
|
+
|
|
372
|
+
/**
|
|
373
|
+
* Helper function to recursively process directories and files
|
|
374
|
+
* @param {string} currentPath - Current directory path being processed
|
|
375
|
+
* @param {string[]} pathSegments - Array of path segments for creating keys
|
|
376
|
+
*/
|
|
377
|
+
const processDirectory = (currentPath, pathSegments = []) => {
|
|
378
|
+
try {
|
|
379
|
+
// Check if directory exists
|
|
380
|
+
if (!existsSync(currentPath)) {
|
|
381
|
+
console.error(`Directory not found: ${currentPath}`);
|
|
382
|
+
return;
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// Use synchronous versions of fs functions
|
|
386
|
+
const entries = readdirSync(currentPath, { withFileTypes: true });
|
|
387
|
+
|
|
388
|
+
for (const entry of entries) {
|
|
389
|
+
const entryPath = join(currentPath, entry.name);
|
|
390
|
+
|
|
391
|
+
if (entry.isDirectory() && recursive) {
|
|
392
|
+
// Process the subdirectory recursively with updated path segments
|
|
393
|
+
processDirectory(entryPath, [...pathSegments, entry.name]);
|
|
394
|
+
} else if (!entry.isDirectory()) {
|
|
395
|
+
let keyName;
|
|
396
|
+
if (keepExtension) {
|
|
397
|
+
// Keep the full filename with extension
|
|
398
|
+
keyName = entry.name;
|
|
399
|
+
} else {
|
|
400
|
+
// Remove file extension
|
|
401
|
+
keyName = entry.name.replace(/\.[^/.]+$/, "");
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
// Use synchronous file read with error handling
|
|
405
|
+
let content;
|
|
406
|
+
try {
|
|
407
|
+
content = readFileSync(entryPath, "utf8");
|
|
408
|
+
} catch (err) {
|
|
409
|
+
console.error(`Error reading file ${entryPath}:`, err);
|
|
410
|
+
continue;
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
// Create path-based key
|
|
414
|
+
const key =
|
|
415
|
+
pathSegments.length > 0
|
|
416
|
+
? [...pathSegments, keyName].join("/")
|
|
417
|
+
: keyName;
|
|
418
|
+
|
|
419
|
+
// Store content with the path-based key
|
|
420
|
+
result[key] = isYaml ? safeYamlLoad(content) : content;
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
} catch (error) {
|
|
424
|
+
console.error(`Error processing directory ${currentPath}:`, error);
|
|
425
|
+
}
|
|
426
|
+
};
|
|
427
|
+
|
|
428
|
+
// Start the recursive processing
|
|
429
|
+
processDirectory(basePath);
|
|
430
|
+
|
|
431
|
+
// Log keys for debugging
|
|
432
|
+
console.log(
|
|
433
|
+
`Loaded ${Object.keys(result).length} ${name}: ${Object.keys(result).join(
|
|
434
|
+
", "
|
|
435
|
+
)}`
|
|
436
|
+
);
|
|
437
|
+
|
|
438
|
+
return result;
|
|
439
|
+
};
|
|
440
|
+
|
|
441
|
+
/**
|
|
442
|
+
* Parse throught all nested files under ./pages and that have .md file extension
|
|
443
|
+
* Extract front matter into an object
|
|
444
|
+
* Add 2 things to the frontmatter:
|
|
445
|
+
* - content: the content of the file without frontmatter
|
|
446
|
+
* - url: the url of the file without file extension
|
|
447
|
+
* Parse throught frontMatter.tags (which is an array of strings)
|
|
448
|
+
* collections[tag] = [] should have all the frontmatter with that tag
|
|
449
|
+
*/
|
|
450
|
+
export const loadCollections = async (basePath) => {
|
|
451
|
+
/** @type {{ all: any[], [key: string]: any[] }} */
|
|
452
|
+
const collections = {
|
|
453
|
+
all: [], // Special collection for all items
|
|
454
|
+
};
|
|
455
|
+
|
|
456
|
+
// Helper function to recursively find all .md files
|
|
457
|
+
const findMarkdownFiles = async (dir) => {
|
|
458
|
+
const files = [];
|
|
459
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
460
|
+
|
|
461
|
+
for (const entry of entries) {
|
|
462
|
+
const fullPath = join(dir, entry.name);
|
|
463
|
+
if (entry.isDirectory()) {
|
|
464
|
+
files.push(...(await findMarkdownFiles(fullPath)));
|
|
465
|
+
} else if (entry.name.endsWith(".md")) {
|
|
466
|
+
files.push(fullPath);
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
return files;
|
|
470
|
+
};
|
|
471
|
+
|
|
472
|
+
try {
|
|
473
|
+
// Find all markdown files
|
|
474
|
+
const markdownFiles = await findMarkdownFiles(basePath);
|
|
475
|
+
|
|
476
|
+
for (const filePath of markdownFiles) {
|
|
477
|
+
// Read file content
|
|
478
|
+
const content = await safeReadFile(filePath);
|
|
479
|
+
|
|
480
|
+
// Extract frontmatter and content
|
|
481
|
+
const {
|
|
482
|
+
frontmatter: frontmatterData,
|
|
483
|
+
content: contentWithoutFrontmatter,
|
|
484
|
+
} = extractFrontmatter(content);
|
|
485
|
+
|
|
486
|
+
// Calculate URL using the helper function
|
|
487
|
+
const url = generateUrlFromPath(basePath, filePath);
|
|
488
|
+
|
|
489
|
+
// Add content and url to frontmatter
|
|
490
|
+
const pageData = {
|
|
491
|
+
...frontmatterData,
|
|
492
|
+
content: contentWithoutFrontmatter,
|
|
493
|
+
url,
|
|
494
|
+
};
|
|
495
|
+
|
|
496
|
+
// Add to the 'all' collection
|
|
497
|
+
collections.all.push(pageData);
|
|
498
|
+
|
|
499
|
+
// Process tags if they exist
|
|
500
|
+
if (Array.isArray(pageData.tags)) {
|
|
501
|
+
for (const tag of pageData.tags) {
|
|
502
|
+
if (!collections[tag]) {
|
|
503
|
+
collections[tag] = [];
|
|
504
|
+
}
|
|
505
|
+
collections[tag].push(pageData);
|
|
506
|
+
}
|
|
507
|
+
} else {
|
|
508
|
+
// Add to 'untagged' collection if no tags
|
|
509
|
+
if (!collections.untagged) {
|
|
510
|
+
collections.untagged = [];
|
|
511
|
+
}
|
|
512
|
+
collections.untagged.push(pageData);
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
// Sort collections by date if available
|
|
517
|
+
for (const tag in collections) {
|
|
518
|
+
collections[tag].sort((a, b) => {
|
|
519
|
+
// If both items have dates, sort by date (descending)
|
|
520
|
+
if (a.date && b.date) {
|
|
521
|
+
return new Date(b.date).getTime() - new Date(a.date).getTime();
|
|
522
|
+
}
|
|
523
|
+
return 0;
|
|
524
|
+
});
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
console.log(`Processed ${markdownFiles.length} markdown files`);
|
|
528
|
+
console.log(`Found ${Object.keys(collections).length} collections`);
|
|
529
|
+
|
|
530
|
+
return collections;
|
|
531
|
+
} catch (error) {
|
|
532
|
+
console.error("Error loading collections:", error);
|
|
533
|
+
return { all: [] };
|
|
534
|
+
}
|
|
535
|
+
};
|
|
536
|
+
|
|
537
|
+
export const createFileFormatHandlers = ({
|
|
538
|
+
basePath,
|
|
539
|
+
templates,
|
|
540
|
+
liquidParse,
|
|
541
|
+
collections,
|
|
542
|
+
data,
|
|
543
|
+
md,
|
|
544
|
+
}) => {
|
|
545
|
+
/**
|
|
546
|
+
* Define handlers for different file formats
|
|
547
|
+
*/
|
|
548
|
+
return {
|
|
549
|
+
md: {
|
|
550
|
+
// Process function for converting markdown content
|
|
551
|
+
process: async (content, srcPath) => {
|
|
552
|
+
// Extract the frontmatter and content
|
|
553
|
+
const {
|
|
554
|
+
frontmatter: frontmatterData,
|
|
555
|
+
content: contentWithoutFrontmatter,
|
|
556
|
+
} = extractFrontmatter(content);
|
|
557
|
+
|
|
558
|
+
// Convert markdown to HTML
|
|
559
|
+
const htmlContent = await md.renderAsync(contentWithoutFrontmatter);
|
|
560
|
+
|
|
561
|
+
// Determine layout to use
|
|
562
|
+
let layoutName; // Default to base template
|
|
563
|
+
|
|
564
|
+
if (frontmatterData.layout) {
|
|
565
|
+
// Remove file extension if present from specified layout
|
|
566
|
+
layoutName = frontmatterData.layout.replace(/\.[^/.]+$/, "");
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
if (!layoutName) {
|
|
570
|
+
throw new Error(
|
|
571
|
+
`Layout not found for ${srcPath}, ${JSON.stringify(
|
|
572
|
+
frontmatterData
|
|
573
|
+
)}`
|
|
574
|
+
);
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
// Generate table of contents from markdown content
|
|
578
|
+
const tableOfContents = generateTableOfContents(
|
|
579
|
+
contentWithoutFrontmatter
|
|
580
|
+
);
|
|
581
|
+
|
|
582
|
+
const layoutContent = templates[`${layoutName}.html`];
|
|
583
|
+
|
|
584
|
+
if (!layoutContent) {
|
|
585
|
+
throw new Error(`Layout not found for ${srcPath}`);
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
// Ensure URL is set if not already in frontmatter
|
|
589
|
+
if (!frontmatterData.url && srcPath) {
|
|
590
|
+
frontmatterData.url = generateUrlFromPath(basePath, srcPath);
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
// Create merged data for the layout with content, frontmatter data, and global data
|
|
594
|
+
const layoutData = deepMerge(
|
|
595
|
+
{},
|
|
596
|
+
{ ...data, collections }, // Global data
|
|
597
|
+
frontmatterData, // Frontmatter data
|
|
598
|
+
{
|
|
599
|
+
content: htmlContent, // Rendered markdown content
|
|
600
|
+
tableOfContents, // Table of contents data structure
|
|
601
|
+
url: frontmatterData.url || "/", // Explicitly include the URL with default
|
|
602
|
+
},
|
|
603
|
+
{
|
|
604
|
+
siticEnv: process.env,
|
|
605
|
+
}
|
|
606
|
+
);
|
|
607
|
+
|
|
608
|
+
// Render the content within the layout
|
|
609
|
+
const renderedHtml = await liquidParse(layoutContent, layoutData);
|
|
610
|
+
|
|
611
|
+
// Minify the HTML
|
|
612
|
+
const minifiedHtml = await minify(renderedHtml, {
|
|
613
|
+
collapseWhitespace: true,
|
|
614
|
+
removeComments: true,
|
|
615
|
+
minifyCSS: true,
|
|
616
|
+
minifyJS: true,
|
|
617
|
+
removeRedundantAttributes: true,
|
|
618
|
+
removeScriptTypeAttributes: true,
|
|
619
|
+
removeStyleLinkTypeAttributes: true,
|
|
620
|
+
useShortDoctype: true,
|
|
621
|
+
});
|
|
622
|
+
|
|
623
|
+
return minifiedHtml;
|
|
624
|
+
},
|
|
625
|
+
// Output extension for the processed file
|
|
626
|
+
outputExt: "html",
|
|
627
|
+
// Always create folder with index.html for markdown files
|
|
628
|
+
forceFolderWithIndex: true,
|
|
629
|
+
},
|
|
630
|
+
|
|
631
|
+
// Handler for HTML files
|
|
632
|
+
html: {
|
|
633
|
+
process: async (content) => {
|
|
634
|
+
// Minify HTML files
|
|
635
|
+
const minified = await minify(content, {
|
|
636
|
+
collapseWhitespace: true,
|
|
637
|
+
removeComments: true,
|
|
638
|
+
minifyCSS: true,
|
|
639
|
+
minifyJS: true,
|
|
640
|
+
});
|
|
641
|
+
return minified;
|
|
642
|
+
},
|
|
643
|
+
outputExt: "html",
|
|
644
|
+
// Always create folder with index.html for HTML files
|
|
645
|
+
forceFolderWithIndex: true,
|
|
646
|
+
},
|
|
647
|
+
|
|
648
|
+
// Handler for CSS files
|
|
649
|
+
css: {
|
|
650
|
+
process: async (content) => {
|
|
651
|
+
// Create a new CleanCSS instance
|
|
652
|
+
const cleanCSS = new CleanCSS();
|
|
653
|
+
// Minify the CSS
|
|
654
|
+
return cleanCSS.minify(content).styles;
|
|
655
|
+
},
|
|
656
|
+
outputExt: "css",
|
|
657
|
+
},
|
|
658
|
+
|
|
659
|
+
// Add more handlers as needed, e.g.:
|
|
660
|
+
// 'scss': { process: async (content) => { /* process scss */ }, outputExt: 'css' }
|
|
661
|
+
};
|
|
662
|
+
};
|
|
663
|
+
|
|
664
|
+
/**
|
|
665
|
+
* Process a file and convert it based on its extension
|
|
666
|
+
* @param {string} srcPath - Source file path
|
|
667
|
+
* @param {string} destDir - Destination directory
|
|
668
|
+
* @param {boolean} isIndex - Whether the file is an index file
|
|
669
|
+
* @param {object} fileFormatHandlers - Handlers for different file formats
|
|
670
|
+
* @returns {Promise<boolean>} - Whether the file was processed
|
|
671
|
+
*/
|
|
672
|
+
const processFile = async (
|
|
673
|
+
srcPath,
|
|
674
|
+
destDir,
|
|
675
|
+
isIndex = false,
|
|
676
|
+
fileFormatHandlers
|
|
677
|
+
) => {
|
|
678
|
+
try {
|
|
679
|
+
const content = await safeReadFile(srcPath);
|
|
680
|
+
const ext = srcPath.split(".").pop()?.toLowerCase() || "";
|
|
681
|
+
const handler = fileFormatHandlers[ext];
|
|
682
|
+
|
|
683
|
+
if (!handler) {
|
|
684
|
+
return false;
|
|
685
|
+
}
|
|
686
|
+
|
|
687
|
+
const processedContent = await handler.process(content, srcPath);
|
|
688
|
+
const outputExt = handler.outputExt;
|
|
689
|
+
|
|
690
|
+
// Determine the destination path
|
|
691
|
+
let destPath;
|
|
692
|
+
if (isIndex) {
|
|
693
|
+
// If it's an index file, output to index.{outputExt} in the same directory
|
|
694
|
+
destPath = join(destDir, `index.${outputExt}`);
|
|
695
|
+
} else {
|
|
696
|
+
// Check if this handler should use folder with index.html approach
|
|
697
|
+
if (handler.forceFolderWithIndex) {
|
|
698
|
+
// Create a directory with the file name
|
|
699
|
+
const baseName =
|
|
700
|
+
srcPath
|
|
701
|
+
.split("/")
|
|
702
|
+
.pop()
|
|
703
|
+
?.replace(new RegExp(`\\.${ext}$`), "") || "";
|
|
704
|
+
const newDestDir = join(destDir, baseName);
|
|
705
|
+
|
|
706
|
+
// Create directory if it doesn't exist
|
|
707
|
+
await createFolderIfNotExists(newDestDir);
|
|
708
|
+
|
|
709
|
+
destPath = join(newDestDir, `index.${outputExt}`);
|
|
710
|
+
} else {
|
|
711
|
+
// For other file types, just output to the destination directory with the same name
|
|
712
|
+
const baseName =
|
|
713
|
+
srcPath
|
|
714
|
+
.split("/")
|
|
715
|
+
.pop()
|
|
716
|
+
?.replace(new RegExp(`\\.${ext}$`), "") || "";
|
|
717
|
+
destPath = join(destDir, `${baseName}.${outputExt}`);
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
// Write the processed content to the new file
|
|
722
|
+
await writeFile(destPath, processedContent);
|
|
723
|
+
console.log(`Converted ${srcPath} to ${destPath}`);
|
|
724
|
+
return true;
|
|
725
|
+
} catch (error) {
|
|
726
|
+
console.error(`Error processing file ${srcPath}:`, error);
|
|
727
|
+
return false;
|
|
728
|
+
}
|
|
729
|
+
};
|
|
730
|
+
|
|
731
|
+
/**
|
|
732
|
+
* Copy a file to the destination, with processing if needed
|
|
733
|
+
* @param {string} srcPath - Source file path
|
|
734
|
+
* @param {string} destPath - Destination file path
|
|
735
|
+
* @returns {Promise<boolean>} - Whether the file was copied successfully
|
|
736
|
+
*/
|
|
737
|
+
const copyFileWithProcessing = async (
|
|
738
|
+
srcPath,
|
|
739
|
+
destPath,
|
|
740
|
+
fileFormatHandlers
|
|
741
|
+
) => {
|
|
742
|
+
try {
|
|
743
|
+
// Get file extension
|
|
744
|
+
const ext = srcPath.split(".").pop()?.toLowerCase() || "";
|
|
745
|
+
const fileName = srcPath.split("/").pop() || "";
|
|
746
|
+
|
|
747
|
+
// Handle based on file extension
|
|
748
|
+
const isIndex = fileName.startsWith("index.");
|
|
749
|
+
const destDir = isIndex
|
|
750
|
+
? destPath.replace(new RegExp(`/index\\.${ext}$`), "")
|
|
751
|
+
: destPath.replace(/\/[^/]+$/, "");
|
|
752
|
+
|
|
753
|
+
if (fileFormatHandlers[ext]) {
|
|
754
|
+
try {
|
|
755
|
+
return await processFile(srcPath, destDir, isIndex, fileFormatHandlers);
|
|
756
|
+
} catch (error) {
|
|
757
|
+
console.error(`Error processing file ${srcPath}:`, error);
|
|
758
|
+
return false;
|
|
759
|
+
}
|
|
760
|
+
} else {
|
|
761
|
+
// For files without handlers, copy as-is
|
|
762
|
+
try {
|
|
763
|
+
await copyFile(srcPath, destPath);
|
|
764
|
+
console.log(`Copied ${srcPath} to ${destPath}`);
|
|
765
|
+
return true;
|
|
766
|
+
} catch (error) {
|
|
767
|
+
console.error(`Error copying file ${srcPath}:`, error);
|
|
768
|
+
return false;
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
} catch (error) {
|
|
772
|
+
console.error(`Error processing file ${srcPath}:`, error);
|
|
773
|
+
return false;
|
|
774
|
+
}
|
|
775
|
+
};
|
|
776
|
+
|
|
777
|
+
/**
|
|
778
|
+
* Recursively copy a directory with processing
|
|
779
|
+
* @param {string} src - Source directory
|
|
780
|
+
* @param {string} dest - Destination directory
|
|
781
|
+
*/
|
|
782
|
+
export const copyDirRecursive = async (src, dest, fileFormatHandlers) => {
|
|
783
|
+
await createFolderIfNotExists(dest);
|
|
784
|
+
|
|
785
|
+
try {
|
|
786
|
+
const entries = await readdir(src, { withFileTypes: true });
|
|
787
|
+
|
|
788
|
+
for (const entry of entries) {
|
|
789
|
+
const srcPath = join(src, entry.name);
|
|
790
|
+
const destPath = join(dest, entry.name);
|
|
791
|
+
|
|
792
|
+
if (entry.isDirectory()) {
|
|
793
|
+
// Recursive call for directories
|
|
794
|
+
await copyDirRecursive(srcPath, destPath, fileFormatHandlers);
|
|
795
|
+
} else {
|
|
796
|
+
// Process and copy the file
|
|
797
|
+
await copyFileWithProcessing(srcPath, destPath, fileFormatHandlers);
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
} catch (error) {
|
|
801
|
+
console.error(`Error copying directory ${src}:`, error);
|
|
802
|
+
}
|
|
803
|
+
};
|