hdoc-tools 0.19.7 → 0.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/hdoc-build.js CHANGED
@@ -1,1503 +1,1480 @@
1
- (function () {
2
- "use strict";
3
-
4
- const crypto = require("crypto"),
5
- dree = require("dree"),
6
- fs = require("fs-extra"),
7
- mdfm = require("markdown-it-front-matter"),
8
- path = require("path"),
9
- puppeteer = require("puppeteer"),
10
- URL = require("url").URL,
11
- hdoc_validate = require(path.join(__dirname, "hdoc-validate.js")),
12
- hdoc = require(path.join(__dirname, "hdoc-module.js")),
13
- hdoc_build_db = require(path.join(__dirname, "hdoc-build-db.js")),
14
- hdoc_build_pdf = require(path.join(__dirname, "hdoc-build-pdf.js")),
15
- hdoc_index = require(path.join(__dirname, "hdoc-db.js")),
16
- archiver = require("archiver"),
17
- xmlFormat = require("xml-formatter");
18
-
19
- const h_tags_to_search = ["h1", "h2", "h3"],
20
- image_extensions = [ 'png', 'svg', 'jpg' ],
21
- doc_header_template_path = path.join(
22
- __dirname,
23
- "templates",
24
- "doc-header.html"
25
- ),
26
- non_git_doc_header_template_path = path.join(
27
- __dirname,
28
- "templates",
29
- "doc-header-non-git.html"
30
- ),
31
- pdf_header_template_path = path.join(
32
- __dirname,
33
- "templates",
34
- "pdf-header.html"
35
- ),
36
- non_git_pdf_header_template_path = path.join(
37
- __dirname,
38
- "templates",
39
- "pdf-header-non-git.html"
40
- ),
41
- pdf_template_path = path.join(__dirname, "templates", "pdf"),
42
- ui_css_path = path.join(__dirname, "ui", "css"),
43
- pdf_template_file_path = path.join(pdf_template_path, "template.html"),
44
- regex_version = /^[0-9]{1,3}[.][0-9]{1,3}[.][0-9]{1,6}$/,
45
- h1_pattern = /(<h1.*?>)\s*.*\s*(.*<\/h1>)/,
46
- regex_filename = /^[a-z]+-{0,1}([-a-z0-9]+)*$/;
47
-
48
- let bc = {}, // Breadcrumbs map
49
- built_file_hashes = [],
50
- book_read_time = 0,
51
- browser = {},
52
- conversion_attempted = 0,
53
- conversion_success = 0,
54
- conversion_failed = 0,
55
- css_templates = [],
56
- doc_header_template = "",
57
- doc_header_template_non_git = "",
58
- errors_filename = [],
59
- global_source_path = "",
60
- pdf_created = 0,
61
- pdf_enable = false,
62
- pdf_header_template = "",
63
- pdf_header_template_non_git = "",
64
- pdf_template = "",
65
- prod_families = {},
66
- prods_supported = [],
67
- doc_id = "",
68
- git_token =
69
- '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', // Github fine-grained personal access token that has minimum read-only access to Hornbill Docs metadata
70
- hdocbook_config = {},
71
- hdocbook_project,
72
- includes_found = 0,
73
- includes_success = 0,
74
- includes_failed = 0,
75
- index_records = [],
76
- md_files = [],
77
- md_files_delete = [],
78
- redirects = {},
79
- static_html_files = [],
80
- work_path_content = "",
81
- verbose = false;
82
-
83
- const pdf_path_excluded = function (relative_path) {
84
- if (
85
- !hdocbook_project.pdfGeneration ||
86
- hdocbook_project.pdfGeneration.exclude_paths === undefined
87
- ) {
88
- return false;
89
- }
90
- if (relative_path.startsWith("/")) {
91
- relative_path = relative_path.slice(1, relative_path.length);
92
- }
93
- for (
94
- let i = 0;
95
- i < hdocbook_project.pdfGeneration.exclude_paths.length;
96
- i++
97
- ) {
98
- const exclude_path = hdocbook_project.pdfGeneration.exclude_paths[i];
99
- if (relative_path === exclude_path) return true;
100
- if (exclude_path.at(-1) === "*") {
101
- if (relative_path.startsWith(exclude_path.slice(0, -1))) {
102
- return true;
103
- }
104
- }
105
- }
106
- return false;
107
- };
108
-
109
- const transform_static_html = async function (file_path) {
110
- if (fs.existsSync(file_path.path)) {
111
- // Load HTML file
112
- let html_txt = fs.readFileSync(file_path.path, "utf8");
113
- html_txt = html_txt.replace(/\r/gm, ""); // Remove CR's so we're just dealing with newlines
114
-
115
- let fm_headers = [];
116
- let existing_fm_headers = false;
117
- let doc_type = "Article";
118
- let doc_title = "";
119
-
120
- // Check if we have a frontmatter comment
121
- const fm_header = hdoc.getHTMLFrontmatterHeader(html_txt);
122
- if (Object.keys(fm_header.fm_properties).length > 0) {
123
- existing_fm_headers = true;
124
-
125
- // We have some frontmatter headers, check if title is one of them
126
- let fm_title_found = false;
127
- if (
128
- fm_header.fm_properties &&
129
- fm_header.fm_properties.title !== undefined
130
- ) {
131
- // We have a title - but does the title have a value
132
- if (fm_header.fm_properties.title === "") {
133
- // No value - remove title from the properties map so we don't end up with 2 title properties, one empty and one with a value
134
- delete fm_header.fm_properties.title;
135
- } else {
136
- // We have a value for the title property
137
- fm_title_found = true;
138
- doc_title = fm_header.fm_properties.title.trim();
139
- }
140
- }
141
-
142
- // Is reading-time in the fm headers?
143
- if (fm_header.fm_properties["reading-time"] === undefined) {
144
- const read_time_mins = hdoc.get_html_read_time(html_txt);
145
- book_read_time += read_time_mins;
146
- fm_header.fm_properties["reading-time"] = read_time_mins;
147
- }
148
-
149
- for (const key in fm_header.fm_properties) {
150
- if (fm_header.fm_properties.hasOwnProperty(key)) {
151
- if (key === "type") doc_type = fm_header.fm_properties[key];
152
- else {
153
- fm_headers.push({
154
- id: key,
155
- value: fm_header.fm_properties[key],
156
- });
157
- }
158
- }
159
- }
160
-
161
- if (
162
- !fm_title_found &&
163
- file_path.name !== "description_ext.md" &&
164
- file_path.name !== "article_ext.md"
165
- ) {
166
- // No frontmatter title found in properties
167
- // Go get title from h tags in html
168
- const html_heading = hdoc.getFirstHTMLHeading(
169
- html_txt,
170
- h_tags_to_search
171
- );
172
-
173
- if (
174
- html_heading &&
175
- html_heading[0] &&
176
- html_heading[0].children &&
177
- html_heading[0].children[0] &&
178
- html_heading[0].children[0].data
179
- ) {
180
- // We've found a heading tag, add that as a title to the existing frontmatter properties
181
- fm_headers.push({
182
- id: "title",
183
- value: html_heading[0].children[0].data,
184
- });
185
- doc_title = html_heading[0].children[0].data;
186
- } else {
187
- // No header tag, no frontmatter title, output a warning
188
- console.info(
189
- `[WARNING] No frontmatter title property, or ${h_tags_to_search.join(
190
- ", "
191
- )} tags detected in ${file_path.path}`
192
- );
193
- }
194
- }
195
-
196
- // Do we have a description header?
197
- if (
198
- fm_header.fm_properties &&
199
- fm_header.fm_properties.description !== undefined
200
- ) {
201
- if (fm_header.fm_properties.description === "") {
202
- const html_p_tag = hdoc.getFirstHTMLHeading(html_txt, ["p"]);
203
- if (
204
- html_p_tag &&
205
- html_p_tag[0] &&
206
- html_p_tag[0].children &&
207
- html_p_tag[0].children[0] &&
208
- html_p_tag[0].children[0].data
209
- ) {
210
- fm_headers.push({
211
- id: "description",
212
- value: `${doc_title}: ${
213
- html_p_tag[0].children[0].data.split(".")[0] + "."
214
- }`.trim(),
215
- });
216
- }
217
- } else {
218
- fm_headers.push({
219
- id: "description",
220
- value: fm_header.fm_properties.description.trim(),
221
- });
222
- }
223
- } else {
224
- const html_p_tag = hdoc.getFirstHTMLHeading(html_txt, ["p"]);
225
- if (
226
- html_p_tag &&
227
- html_p_tag[0] &&
228
- html_p_tag[0].children &&
229
- html_p_tag[0].children[0] &&
230
- html_p_tag[0].children[0].data
231
- ) {
232
- fm_headers.push({
233
- id: "description",
234
- value: `${doc_title}: ${
235
- html_p_tag[0].children[0].data.split(".")[0] + "."
236
- }`.trim(),
237
- });
238
- }
239
- }
240
- } else {
241
- // We have no frontmatter headers, get and build one from the html headings
242
- const html_heading = hdoc.getFirstHTMLHeading(
243
- html_txt,
244
- h_tags_to_search
245
- );
246
- let doc_title = "";
247
- // Add the title
248
- if (
249
- html_heading &&
250
- html_heading[0] &&
251
- html_heading[0].children &&
252
- html_heading[0].children[0] &&
253
- html_heading[0].children[0].data
254
- ) {
255
- // We've found a heading tag, add that as a title to the frontmatter content
256
- fm_headers.push({
257
- id: "title",
258
- value: html_heading[0].children[0].data,
259
- });
260
- doc_title = html_heading[0].children[0].data;
261
- } else if (
262
- file_path.name !== "description_ext.md" &&
263
- file_path.name !== "article_ext.md"
264
- ) {
265
- // No header tag, no frontmatter title, output a warning
266
- console.info(
267
- `[WARNING] No frontmatter title property, or ${h_tags_to_search.join(
268
- ", "
269
- )} tags detected in ${file_path.path}`
270
- );
271
- }
272
-
273
- // Add the reading time
274
- const read_time_mins = hdoc.get_html_read_time(html_txt);
275
- book_read_time += read_time_mins;
276
- fm_headers.push({
277
- id: "reading-time",
278
- value: read_time_mins,
279
- });
280
-
281
- const html_p_tag = hdoc.getFirstHTMLHeading(html_txt, ["p"]);
282
- if (
283
- html_p_tag &&
284
- html_p_tag[0] &&
285
- html_p_tag[0].children &&
286
- html_p_tag[0].children[0] &&
287
- html_p_tag[0].children[0].data
288
- ) {
289
- fm_headers.push({
290
- id: "description",
291
- value: `${doc_title}: ${
292
- html_p_tag[0].children[0].data.split(".")[0] + "."
293
- }`.trim(),
294
- });
295
- }
296
- }
297
-
298
- // Add doc type
299
- fm_headers.push({
300
- id: "type",
301
- value: doc_type,
302
- });
303
-
304
- let metadata = {};
305
-
306
- // Remove the first <h1>title</h1> from the HTML as we'll add that in the document header
307
- let html_h1 = h1_pattern.exec(html_txt);
308
- if (html_h1 && html_h1[0])
309
- html_h1 = html_h1[0].replace(/(<h1.*?>)/, "").replace(/(<\/h1>)/, "");
310
-
311
- html_txt = html_txt.replace(h1_pattern, "");
312
-
313
- // Get contributor data from Github, if exists
314
- let contribs = [];
315
- let last_commit = null;
316
- if (
317
- hdocbook_config.publicSource &&
318
- hdocbook_config.publicSource !== "" &&
319
- hdocbook_config.publicSource.includes("github.com/Hornbill-Docs")
320
- ) {
321
- const github_paths = hdoc.get_github_api_path(
322
- hdocbook_config.publicSource,
323
- file_path.relativePath
324
- );
325
- const contributors = hdoc.get_github_contributors(
326
- github_paths.api_path,
327
- git_token
328
- );
329
-
330
- if (!contributors.success) {
331
- console.error(
332
- `Error retrieving contributors from Github: ${contributors.error}`
333
- );
334
- } else {
335
- last_commit = contributors.last_commit_date;
336
- metadata.last_commit = contributors.last_commit_date;
337
- metadata.contributor_count = contributors.contributor_count;
338
- metadata.edit_url = github_paths.edit_path;
339
- contribs = contributors.contributors;
340
- contributors.editPath = github_paths.edit_path;
341
- fm_headers.push({
342
- id: "contributor-count",
343
- value: contributors.contributor_count,
344
- });
345
- fm_headers.push({
346
- id: "last-commit",
347
- value: contributors.last_commit_date,
348
- });
349
- const target_file = file_path.path.replace(
350
- path.extname(file_path.path),
351
- "._info.json"
352
- );
353
- delete contributors.success;
354
- delete contributors.error;
355
- contributors.editPath = github_paths.edit_path;
356
- }
357
- fm_headers.push({
358
- id: "edit-path",
359
- value: github_paths.edit_path,
360
- });
361
- }
362
-
363
- if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
364
- fm_headers.push({
365
- id: "pdf-path",
366
- value: file_path.relativePath.replace(
367
- path.extname(file_path.relativePath),
368
- ".pdf"
369
- ),
370
- });
371
- }
372
-
373
- let fm_header_content = "<!--[[FRONTMATTER\n";
374
- if (fm_headers.length > 0) {
375
- for (let i = 0; i < fm_headers.length; i++) {
376
- fm_header_content += `${fm_headers[i].id}: ${fm_headers[i].value}\n`;
377
- }
378
- fm_header_content += "]]-->";
379
-
380
- if (existing_fm_headers) {
381
- html_txt = html_txt.replace("<!--" + fm_header.fm_header + "-->", "");
382
- }
383
- }
384
-
385
- let doc_header = "";
386
- let pdf_header = "";
387
- const inline_content = file_path.relativePath.startsWith(
388
- `${hdocbook_config.docId}/_inline/`
389
- );
390
- if (
391
- hdocbook_config.publicSource &&
392
- hdocbook_config.publicSource.includes("github.com/Hornbill-Docs")
393
- ) {
394
- // Build doc header from template and frontmatter tags
395
- if (!inline_content)
396
- doc_header = process_doc_header(
397
- fm_headers,
398
- file_path.relativePath,
399
- doc_header_template,
400
- html_h1
401
- );
402
-
403
- if (pdf_enable && !pdf_path_excluded(file_path.relativePath))
404
- pdf_header = process_doc_header(
405
- fm_headers,
406
- file_path.relativePath,
407
- pdf_header_template,
408
- html_h1
409
- );
410
- } else {
411
- if (!inline_content)
412
- doc_header = process_doc_header(
413
- fm_headers,
414
- file_path.relativePath,
415
- doc_header_template_non_git,
416
- html_h1
417
- );
418
-
419
- if (pdf_enable && !pdf_path_excluded(file_path.relativePath))
420
- pdf_header = process_doc_header(
421
- fm_headers,
422
- file_path.relativePath,
423
- pdf_header_template_non_git,
424
- html_h1
425
- );
426
- }
427
-
428
- let pdf_size = 0;
429
- if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
430
- let pdf_txt = await hdoc_build_pdf.process_images(file_path, html_txt);
431
- pdf_txt = `${pdf_header}\n${pdf_txt}`;
432
-
433
- // Generate PDF file from HTML
434
- const pdf_file_path = file_path.path.replace(
435
- path.extname(file_path.path),
436
- ".pdf"
437
- );
438
- pdf_size = await hdoc_build_pdf.generate_pdf(
439
- browser,
440
- pdf_template_path,
441
- pdf_template,
442
- hdocbook_config,
443
- pdf_txt,
444
- pdf_file_path,
445
- css_templates,
446
- verbose
447
- );
448
- }
449
- if (pdf_size > 0) pdf_created++;
450
-
451
- // Wrap h2 and h3 tags, plus content, in id'd divs
452
- html_txt = hdoc.wrapHContent(html_txt);
453
-
454
- if (inline_content) html_txt = `${fm_header_content}\n${html_txt}`;
455
- else html_txt = `${fm_header_content}\n${doc_header}\n${html_txt}`;
456
-
457
- let relative_path = file_path.relativePath;
458
- if (
459
- !bc[relative_path.replace(".html", "")] &&
460
- bc[relative_path.replace("/index.html", "")]
461
- ) {
462
- relative_path = relative_path.replace("/index.html", "");
463
- }
464
-
465
- const index_data = hdoc_index.transform_html_for_index(html_txt);
466
-
467
- index_data.sections.forEach(section => {
468
- index_records.push({
469
- relative_path: relative_path,
470
- index_html: { fm_props: index_data.fm_props, text: section.text, preview: section.preview, id: section.id ? section.id : null},
471
- metadata: metadata,
472
- contributors: contribs,
473
- pdf_size: pdf_size,
474
- md5: file_path.hash,
475
- lastmod: last_commit !== null ? last_commit : file_path.hb_lastmod,
476
- inline: inline_content,
477
- });
478
- });
479
-
480
- // Save HTML into HTML file
481
- try {
482
- fs.writeFileSync(file_path.path, html_txt);
483
- } catch (err) {
484
- console.error("Error writing:", target_file, "\n", err);
485
- }
486
- }
487
- };
488
-
489
- const transform_markdown_and_save_html = async function (file_path) {
490
- conversion_attempted++;
491
-
492
- if (fs.existsSync(file_path.path)) {
493
- // Load markdown file
494
- let md_txt = hdoc.expand_variables(
495
- fs.readFileSync(file_path.path, "utf8")
496
- );
497
-
498
- // Pull in external includes
499
- const includes_processed = await hdoc.process_includes(
500
- file_path.path,
501
- md_txt,
502
- global_source_path
503
- );
504
- md_txt = includes_processed.body.toString();
505
- includes_found += includes_processed.found;
506
- includes_success += includes_processed.success;
507
- includes_failed += includes_processed.failed;
508
- if (includes_processed.errors.length > 0) {
509
- for (let i = 0; i < includes_processed.errors.length; i++) {
510
- console.error(includes_processed.errors[i]);
511
- }
512
- }
513
-
514
- // One markdown parser per file. Seems wrong, but doesn't work with a global one once past the first md file
515
- // Steve - revisit this
516
- const md = require("markdown-it")({
517
- html: true,
518
- linkify: true,
519
- typographer: true,
520
- });
521
- md.linkify.set({
522
- fuzzyEmail: false,
523
- fuzzyLink: false,
524
- fuzzyIP: false,
525
- });
526
-
527
- // Process Frontmatter tags
528
- let frontmatter_content = "";
529
- md.use(mdfm, function (fm) {
530
- frontmatter_content = fm;
531
- });
532
-
533
- // Process tips
534
- const tips = require(__dirname + "/custom_modules/tips.js");
535
- md.use(tips, {
536
- links: true,
537
- });
538
-
539
- // Tidy up ```json and ```xml code tags
540
-
541
- if (md_txt.includes("```json") || md_txt.includes("```xml"))
542
- md_txt = tidy_code_tags(md_txt, file_path.relativePath);
543
-
544
- // Render markdown into HTML
545
- let html_txt = md.render(md_txt);
546
-
547
- // Prepare frontmatter headers
548
- let fm_headers = [];
549
- let fm_content = frontmatter_content.split(/\r?\n/);
550
-
551
- let fm_contains_title = false,
552
- fm_contains_reading_time = false,
553
- fm_contains_description = false,
554
- doc_title = "",
555
- doc_type = "Article";
556
-
557
- if (fm_content.length >= 0) {
558
- fm_content.forEach(function (fm_prop) {
559
- const fm_id = fm_prop.slice(0, fm_prop.indexOf(":"));
560
- const fm_val = fm_prop.slice(fm_prop.indexOf(":") + 1);
561
-
562
- if (
563
- fm_id &&
564
- fm_id.trim().length > 0 &&
565
- fm_val &&
566
- fm_val.trim().length > 0
567
- ) {
568
- fm_headers.push({
569
- id: fm_id.trim(),
570
- value: fm_val.trim(),
571
- });
572
-
573
- if (fm_id.trim() === "title") {
574
- fm_contains_title = true;
575
- doc_title = fm_val.trim();
576
- }
577
- if (fm_id.trim() === "type") {
578
- doc_type = fm_val.trim();
579
- }
580
- if (fm_id.trim() === "reading-time") {
581
- book_read_time += parseInt(fm_val.trim(), 10);
582
- fm_contains_reading_time = true;
583
- }
584
- if (fm_id.trim() === "description") {
585
- fm_contains_description = true;
586
- }
587
- }
588
- });
589
- }
590
-
591
- // Add doc type
592
- fm_headers.push({
593
- id: "type",
594
- value: doc_type,
595
- });
596
-
597
- // Does frontmatter tag contain a title property
598
- if (!fm_contains_title) {
599
- // Frontmatter tags don't contain a title property - go pull the first one from the html heading tags
600
- const html_heading = hdoc.getFirstHTMLHeading(
601
- html_txt,
602
- h_tags_to_search
603
- );
604
-
605
- if (
606
- html_heading &&
607
- html_heading[0] &&
608
- html_heading[0].children &&
609
- html_heading[0].children[0] &&
610
- html_heading[0].children[0].data
611
- ) {
612
- // We've found a heading tag, add that as a title to the frontmatter content
613
- fm_headers.push({
614
- id: "title",
615
- value: html_heading[0].children[0].data.trim(),
616
- });
617
- doc_title = html_heading[0].children[0].data.trim();
618
- } else if (
619
- file_path.name !== "description_ext.md" &&
620
- file_path.name !== "article_ext.md"
621
- ) {
622
- // No header tag, no frontmatter title, output a warning
623
- console.info(
624
- `[WARNING] No frontmatter title property, or h1, h2 or h3 header tags detected in ${file_path.path}`
625
- );
626
- }
627
- }
628
-
629
- // Does frontmatter contain a description header, generate one if not
630
- if (!fm_contains_description) {
631
- const html_p_tag = hdoc.getFirstHTMLHeading(html_txt, ["p"]);
632
- if (
633
- html_p_tag &&
634
- html_p_tag[0] &&
635
- html_p_tag[0].children &&
636
- html_p_tag[0].children[0] &&
637
- html_p_tag[0].children[0].data
638
- ) {
639
- fm_headers.push({
640
- id: "description",
641
- value: `${doc_title}: ${
642
- html_p_tag[0].children[0].data.split(".")[0] + "."
643
- }`,
644
- });
645
- }
646
- }
647
-
648
- // Does frontmatter tag contain a reading-time property
649
- if (!fm_contains_reading_time) {
650
- const read_time_mins = hdoc.get_html_read_time(html_txt);
651
- book_read_time += read_time_mins;
652
- fm_headers.push({
653
- id: "reading-time",
654
- value: read_time_mins,
655
- });
656
- }
657
- let metadata = {};
658
-
659
- // Remove the first <h1>title</h1> from the HTML as we'll add that in the document header
660
- let html_h1 = h1_pattern.exec(html_txt);
661
- if (html_h1 && html_h1[0])
662
- html_h1 = html_h1[0].replace(/(<h1.*?>)/, "").replace(/(<\/h1>)/, "");
663
-
664
- html_txt = html_txt.replace(h1_pattern, "");
665
-
666
- // Get contributor data from Github, if exists
667
- let contribs = [];
668
- let last_commit = null;
669
- if (
670
- hdocbook_config.publicSource &&
671
- hdocbook_config.publicSource !== "" &&
672
- hdocbook_config.publicSource.includes("github.com/Hornbill-Docs")
673
- ) {
674
- const github_paths = hdoc.get_github_api_path(
675
- hdocbook_config.publicSource,
676
- file_path.relativePath
677
- );
678
- const contributors = await hdoc.get_github_contributors(
679
- github_paths.api_path,
680
- git_token,
681
- hdocbook_config.publicSource
682
- );
683
-
684
- if (!contributors.success) {
685
- console.error(
686
- `Error retrieving contributors from Github: ${contributors.error}`
687
- );
688
- } else {
689
- last_commit = contributors.last_commit_date;
690
- metadata.last_commit = contributors.last_commit_date;
691
- metadata.contributor_count = contributors.contributor_count;
692
- metadata.edit_url = github_paths.edit_path;
693
- contribs = contributors.contributors;
694
- contributors.editPath = github_paths.edit_path;
695
-
696
- fm_headers.push({
697
- id: "contributor-count",
698
- value: contributors.contributor_count,
699
- });
700
- fm_headers.push({
701
- id: "last-commit",
702
- value: contributors.last_commit_date,
703
- });
704
- const target_file = file_path.path.replace(
705
- path.extname(file_path.path),
706
- "._info.json"
707
- );
708
- delete contributors.success;
709
- delete contributors.error;
710
- contributors.editPath = github_paths.edit_path;
711
- try {
712
- fs.writeFileSync(
713
- target_file,
714
- JSON.stringify(contributors, null, 2)
715
- );
716
- } catch (err) {
717
- console.error("Error writing:", target_file, "\n", err);
718
- }
719
- }
720
- fm_headers.push({
721
- id: "edit-path",
722
- value: github_paths.edit_path,
723
- });
724
- }
725
-
726
- if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
727
- fm_headers.push({
728
- id: "pdf-path",
729
- value: file_path.relativePath.replace(
730
- path.extname(file_path.relativePath),
731
- ".pdf"
732
- ),
733
- });
734
- }
735
-
736
- // Add frontmatter tags as comment
737
- let fm_header = "<!--[[FRONTMATTER\n";
738
- if (fm_headers.length > 0) {
739
- for (let i = 0; i < fm_headers.length; i++) {
740
- fm_header += `${fm_headers[i].id}: ${fm_headers[i].value}\n`;
741
- }
742
- }
743
- fm_header += "]]-->";
744
-
745
- let doc_header = "";
746
- let pdf_header = "";
747
- const inline_content = file_path.relativePath.startsWith(
748
- `${hdocbook_config.docId}/_inline/`
749
- );
750
- if (
751
- hdocbook_config.publicSource &&
752
- hdocbook_config.publicSource.includes("github.com/Hornbill-Docs")
753
- ) {
754
- // Build doc header from template and frontmatter tags
755
- if (!inline_content)
756
- doc_header = process_doc_header(
757
- fm_headers,
758
- file_path.relativePath,
759
- doc_header_template,
760
- html_h1
761
- );
762
-
763
- if (pdf_enable && !pdf_path_excluded(file_path.relativePath))
764
- pdf_header = process_doc_header(
765
- fm_headers,
766
- file_path.relativePath,
767
- pdf_header_template,
768
- html_h1
769
- );
770
- } else {
771
- // Build doc header from template and frontmatter tags
772
- if (!inline_content)
773
- doc_header = process_doc_header(
774
- fm_headers,
775
- file_path.relativePath,
776
- doc_header_template_non_git,
777
- html_h1
778
- );
779
-
780
- if (pdf_enable && !pdf_path_excluded(file_path.relativePath))
781
- pdf_header = process_doc_header(
782
- fm_headers,
783
- file_path.relativePath,
784
- pdf_header_template_non_git,
785
- html_h1
786
- );
787
- }
788
-
789
- let pdf_size = 0;
790
- if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
791
- let pdf_txt = await hdoc_build_pdf.process_images(file_path, html_txt);
792
- pdf_txt = `${pdf_header}\n${pdf_txt}`;
793
-
794
- // Generate PDF file from HTML
795
- const pdf_file_path = file_path.path.replace(
796
- path.extname(file_path.path),
797
- ".pdf"
798
- );
799
- pdf_size = await hdoc_build_pdf.generate_pdf(
800
- browser,
801
- pdf_template_path,
802
- pdf_template,
803
- hdocbook_config,
804
- pdf_txt,
805
- pdf_file_path,
806
- css_templates,
807
- verbose
808
- );
809
- }
810
- if (pdf_size > 0) pdf_created++;
811
-
812
- // Wrap h2 and h3 tags, plus content, in id'd divs
813
- html_txt = hdoc.wrapHContent(html_txt);
814
-
815
- if (inline_content) html_txt = `${fm_header}\n${html_txt}`;
816
- else html_txt = `${fm_header}\n${doc_header}\n${html_txt}`;
817
-
818
-
819
- // Save HTML into HTML file
820
- const target_file = file_path.path.replace(
821
- path.extname(file_path.path),
822
- ".html"
823
- );
824
- let relative_path = file_path.relativePath.replace(
825
- path.extname(file_path.path),
826
- ".html"
827
- );
828
- try {
829
- fs.writeFileSync(target_file, html_txt);
830
- } catch (err) {
831
- console.error("Error writing:", target_file, "\n", err);
832
- }
833
-
834
- if (
835
- !bc[relative_path.replace(".html", "")] &&
836
- bc[relative_path.replace("/index.html", "")]
837
- ) {
838
- relative_path = relative_path.replace("/index.html", "");
839
- }
840
-
841
- const index_data = hdoc_index.transform_html_for_index(html_txt);
842
-
843
- index_data.sections.forEach(section => {
844
- index_records.push({
845
- relative_path: relative_path,
846
- index_html: { fm_props: index_data.fm_props, text: section.text, preview: section.preview, id: section.id ? section.id : null},
847
- metadata: metadata,
848
- contributors: contribs,
849
- pdf_size: pdf_size,
850
- md5: file_path.hash,
851
- lastmod: last_commit !== null ? last_commit : file_path.hb_lastmod,
852
- inline: inline_content,
853
- });
854
- });
855
-
856
- // Add MD file to delete queue
857
- md_files_delete.push(file_path.path);
858
-
859
- conversion_success++;
860
- return true;
861
- }
862
- conversion_failed++;
863
- console.error("MD file does not exist:", file_path.path);
864
- return false;
865
- };
866
-
867
- const tidy_code_tags = function (markdown, file) {
868
- const json_to_tidy = markdown.match(/```json[\r\n](\s|.)*?```/g);
869
- if (json_to_tidy && json_to_tidy.length > 0) {
870
- for (let i = 0; i < json_to_tidy.length; i++) {
871
- if (json_to_tidy[i] !== "") {
872
- let json_tidy = json_to_tidy[i]
873
- .replace("```json", "")
874
- .replace("```", "");
875
- try {
876
- json_tidy = JSON.stringify(JSON.parse(json_tidy), null, 2);
877
- } catch (e) {
878
- console.info(
879
- `[WARNING] Could not tidy JSON in file [${file}]: ${e}`
880
- );
881
- }
882
- markdown = markdown.replace(
883
- json_to_tidy[i],
884
- "```json\n" + json_tidy + "\n```"
885
- );
886
- }
887
- }
888
- }
889
-
890
- const xml_to_tidy = markdown.match(/```xml[\r\n](\s|.)*?```/g);
891
- if (xml_to_tidy && xml_to_tidy.length > 0) {
892
- for (let i = 0; i < xml_to_tidy.length; i++) {
893
- if (xml_to_tidy[i] !== "") {
894
- const xml_tidy = xml_to_tidy[i]
895
- .replace("```xml", "")
896
- .replace("```", "");
897
- let new_xml_string = xml_tidy;
898
- try {
899
- new_xml_string = xmlFormat(xml_tidy, {
900
- indentation: " ",
901
- collapseContent: true,
902
- lineSeparator: "\n",
903
- });
904
- } catch (e) {
905
- console.info(`[WARNING] Could not tidy XML in file [${file}]: ${e}`);
906
- }
907
- markdown = markdown.replace(
908
- xml_to_tidy[i],
909
- "```xml\n" + new_xml_string + "\n```"
910
- );
911
- }
912
- }
913
- }
914
- return markdown;
915
- };
916
-
917
- const process_doc_header = function (fm_headers, doc_path, template, h1) {
918
- let wip_doc_header = template;
919
- let used_h1 = false;
920
- if (h1 && h1 !== "") {
921
- wip_doc_header = wip_doc_header.replaceAll("{{title}}", h1);
922
- used_h1 = true;
923
- }
924
- // Process fm_headers properties first
925
- for (let i = 0; i < fm_headers.length; i++) {
926
- switch (fm_headers[i].id) {
927
- case "title":
928
- if (!used_h1)
929
- wip_doc_header = wip_doc_header.replaceAll(
930
- "{{title}}",
931
- fm_headers[i].value
932
- );
933
- break;
934
- case "reading-time":
935
- wip_doc_header = wip_doc_header.replaceAll(
936
- "{{reading-time}}",
937
- fm_headers[i].value
938
- );
939
- break;
940
- case "contributor-count":
941
- wip_doc_header = wip_doc_header.replaceAll(
942
- "{{contributor-count}}",
943
- fm_headers[i].value
944
- );
945
- break;
946
- case "type":
947
- wip_doc_header = wip_doc_header.replaceAll(
948
- "{{doc-type}}",
949
- fm_headers[i].value
950
- );
951
- break;
952
- case "edit-path":
953
- wip_doc_header = wip_doc_header.replaceAll(
954
- "{{edit-url}}",
955
- fm_headers[i].value
956
- );
957
- break;
958
- case "last-commit":
959
- let last_commit_date = fm_headers[i].value;
960
- if (last_commit_date !== "No Commit Date Available") {
961
- last_commit_date = new Date(fm_headers[i].value).toDateString();
962
- }
963
- wip_doc_header = wip_doc_header.replaceAll(
964
- "{{last-update}}",
965
- last_commit_date
966
- );
967
- break;
968
- }
969
- }
970
-
971
- // Now sort out breadcrumbs
972
- const logical_path = doc_path.replace(path.extname(doc_path), "");
973
- const bc_for_path = bc[logical_path];
974
- let bc_tags = "\n";
975
- if (bc_for_path) {
976
- for (let i = 0; i < bc_for_path.length - 1; i++) {
977
- let bc_link = "/";
978
- if (redirects[bc_for_path[i].link]) {
979
- if (redirects[bc_for_path[i].link].location) {
980
- bc_link += redirects[bc_for_path[i].link].location;
981
- }
982
- } else {
983
- if (bc_for_path[i].link) {
984
- bc_link = bc_for_path[i].link.startsWith("/")
985
- ? bc_for_path[i].link
986
- : `/${bc_for_path[i].link}`;
987
- } else {
988
- bc_link = "";
989
- }
990
- }
991
- if (bc_link !== "") {
992
- bc_tags += `\t\t\t\t<li class="mt-0 nav-bar-item"><a href="${bc_link}" class="ps-0 pe-0 text-decoration-none">${bc_for_path[i].text}</a></li>\n`;
993
- } else {
994
- bc_tags += `\t\t\t\t<li class="mt-0 nav-bar-item">${bc_for_path[i].text}</li>\n`;
995
- }
996
- }
997
- } else {
998
- if (verbose) {
999
- console.info(
1000
- `[WARNING] Path is not present in navigation items: ${logical_path}`
1001
- );
1002
- }
1003
- }
1004
- bc_tags += "\t\t\t";
1005
- wip_doc_header = wip_doc_header.replaceAll("{{breadcrumbs}}", bc_tags);
1006
- return wip_doc_header;
1007
- };
1008
-
1009
- // File callback for build scan
1010
- const build_file_callback = function (element) {
1011
- if (element.extension === "md") {
1012
- element.hb_source_path = path.join(
1013
- global_source_path,
1014
- element.relativePath
1015
- );
1016
- const fstats = fs.statSync(element.hb_source_path);
1017
- element.hb_lastmod = `${fstats.mtime.toISOString().slice(0, 19)}Z`;
1018
- md_files.push(element);
1019
- } else {
1020
- // File is html, see if there's a matching md file and if there is then ignore the html
1021
- const md_path = element.path.replace(path.extname(element.path), ".md");
1022
- if (fs.existsSync(md_path)) {
1023
- return;
1024
- }
1025
- element.hb_source_path = path.join(
1026
- global_source_path,
1027
- element.relativePath
1028
- );
1029
- const fstats = fs.statSync(element.hb_source_path);
1030
- element.hb_lastmod = `${fstats.mtime.toISOString().slice(0, 19)}Z`;
1031
- static_html_files.push(element);
1032
- }
1033
- };
1034
-
1035
- // File & folder callback for MD5 hash of built content
1036
- const hash_callback = function (element) {
1037
- if (element.extension !== "db") {
1038
- built_file_hashes.push({
1039
- path: element.relativePath,
1040
- hash: element.hash,
1041
- });
1042
- }
1043
- };
1044
-
1045
- // File scan callback for filename validation
1046
- const filename_validation_callback = function (element) {
1047
- if (element.relativePath.startsWith('_inline/')) return;
1048
- if (element.name.toLowerCase() === '.ds_store') return;
1049
- if (element.name === 'article_ext.md' || element.name === 'description_ext.md') return;
1050
- if (image_extensions.includes(element.extension)) return;
1051
- const file_no_ext = element.name.replace(`.${element.extension}`, '');
1052
- if (!file_no_ext.match(regex_filename)) errors_filename.push(element.relativePath);
1053
- };
1054
-
1055
-
1056
- const dreeOptions = {
1057
- hash: true,
1058
- extensions: ["md", "html", "htm"],
1059
- normalize: true,
1060
- stat: true,
1061
- };
1062
-
1063
- const dreeOptionsAllFiles = {
1064
- descendants: true,
1065
- excludeEmptyDirectories: true,
1066
- hash: false,
1067
- normalize: true,
1068
- size: false,
1069
- sizeInBytes: false,
1070
- symbolicLinks: false,
1071
- };
1072
-
1073
- const md5DreeOptions = {
1074
- hash: true,
1075
- normalize: true,
1076
- sorted: true,
1077
- };
1078
-
1079
- exports.run = async function (
1080
- source_path,
1081
- verbose_output,
1082
- github_api_token,
1083
- validate,
1084
- gen_exclude,
1085
- build_version = ""
1086
- ) {
1087
- if (github_api_token !== "") {
1088
- git_token = github_api_token;
1089
- }
1090
- global_source_path = source_path;
1091
- verbose = verbose_output;
1092
-
1093
- const start_time = Date.now();
1094
- // GERRY: The purpose of this function is to create a zip file containing the hdocbook content,
1095
- // * Create a _work folder
1096
- // * copy the hdocbook content to the work folder
1097
- // * Render all markdown into side-by-side HTML file
1098
- // * Replace SERVER_VARS embedded in documents with the right version information etc.
1099
- // * Build an index (sqlite FTS5) by extracting text from all HTML content in the work
1100
- // folder, conceptually we are making a little mini website crawler to index all of the content
1101
- // within the book.
1102
- // * Package everything up into a ZIP file, ready for the build controller to package and publish
1103
-
1104
- console.log("Hornbill HDocBook Build", "\n");
1105
- console.log(" Document Path:", source_path, "\n");
1106
- const build_start_dt = new Date().toLocaleString();
1107
-
1108
- // Load the hdocbook-project.json file to get the docId
1109
- // use the docId to get the book config
1110
- const hdocbook_project_config_path = path.join(
1111
- source_path,
1112
- "hdocbook-project.json"
1113
- );
1114
- try {
1115
- hdocbook_project = require(hdocbook_project_config_path);
1116
- } catch (e) {
1117
- console.error("File not found: hdocbook-project.json\n");
1118
- console.error(
1119
- "hdoc build/validate needs to be run in the root of a HDoc Book.\n"
1120
- );
1121
- process.exit(1);
1122
- }
1123
- doc_id = hdocbook_project.docId;
1124
-
1125
- if (
1126
- !validate &&
1127
- hdocbook_project.pdfGeneration !== undefined &&
1128
- hdocbook_project.pdfGeneration.enable !== undefined
1129
- ) {
1130
- pdf_enable = hdocbook_project.pdfGeneration.enable;
1131
- }
1132
-
1133
- if (
1134
- hdocbook_project.redirects &&
1135
- hdocbook_project.redirects instanceof Array
1136
- ) {
1137
- for (let i = 0; i < hdocbook_project.redirects.length; i++) {
1138
- const redirect_key =
1139
- hdocbook_project.redirects[i].url.indexOf("/") == 0
1140
- ? hdocbook_project.redirects[i].url.substring(1)
1141
- : hdocbook_project.redirects[i].url;
1142
- redirects[redirect_key] = hdocbook_project.redirects[i];
1143
- }
1144
- }
1145
-
1146
- console.log(`Loading hdocbook config...`);
1147
-
1148
- const book_path = path.join(source_path, doc_id),
1149
- hdocbook_path = path.join(book_path, "hdocbook.json"),
1150
- work_path = path.join(source_path, "_work"),
1151
- work_hdocbook_path = path.join(work_path, doc_id, "hdocbook.json");
1152
-
1153
- hdocbook_config = require(hdocbook_path);
1154
- if (build_version !== "") {
1155
- if (build_version.match(regex_version)) {
1156
- hdocbook_config.version = build_version;
1157
- } else {
1158
- console.info(
1159
- `\n[WARNING] Argument build version [${build_version}] does not match expected pattern, defaulting to version specified in book [${hdocbook_config.version}]\n`
1160
- );
1161
- }
1162
- }
1163
-
1164
- if (!hdocbook_config.version.match(regex_version)) {
1165
- console.error(
1166
- `ERROR: Version number does not match required format - ${hdocbook_config.version}\n`
1167
- );
1168
- process.exit(1);
1169
- }
1170
-
1171
- if (
1172
- hdocbook_config.publicSource &&
1173
- hdocbook_config.publicSource.endsWith(".git")
1174
- )
1175
- hdocbook_config.publicSource = hdocbook_config.publicSource.substring(
1176
- 0,
1177
- hdocbook_config.publicSource.length - 4
1178
- );
1179
-
1180
- console.log(`Loading product families...`);
1181
- const prods = await hdoc.load_product_families();
1182
- if (!prods.success) {
1183
- console.error(`${prods.errors}\n`);
1184
- process.exit(1);
1185
- } else {
1186
- prod_families = prods.prod_families;
1187
- prods_supported = prods.prods_supported;
1188
- }
1189
-
1190
- if (!validate) {
1191
- console.log("Caching CSS for PDF generation...");
1192
- const css_files = [
1193
- path.join(pdf_template_path, "css", "custom-block.css"),
1194
- path.join(pdf_template_path, "css", "hdocs-pdf.css"),
1195
- path.join(pdf_template_path, "css", "vars.css"),
1196
- path.join(ui_css_path, 'theme-default', 'styles', 'components', 'api-doc.css')
1197
- ];
1198
- for (let i = 0; i < css_files.length; i++) {
1199
- try {
1200
- css_templates.push(fs.readFileSync(css_files[i], "utf8"));
1201
- } catch (e) {
1202
- console.error(`Error reading file[${css_files[i]}]: ${e}`);
1203
- }
1204
- }
1205
- }
1206
-
1207
- // Validate all filenames first
1208
- console.log(`Validating book filenames meet kebab-case requirements...`);
1209
- dree.scan(book_path, dreeOptionsAllFiles, filename_validation_callback);
1210
- if (errors_filename.length > 0) {
1211
- console.log("\r\n-----------------------");
1212
- console.log(" Validation Output ");
1213
- console.log("-----------------------");
1214
- console.error(`${errors_filename.length} files do not meet filename requirements:`);
1215
- console.error(` - ${errors_filename.join("\n - ")}`);
1216
- console.log()
1217
- process.exit(1);
1218
- }
1219
-
1220
- console.log(`Building: ${doc_id} v${hdocbook_config.version}...\n`);
1221
-
1222
- // Make _work folder to copy everything into
1223
- work_path_content = path.join(work_path, doc_id);
1224
- if (fs.existsSync(work_path)) {
1225
- fs.rmSync(work_path, {
1226
- recursive: true,
1227
- force: true,
1228
- });
1229
- }
1230
- fs.mkdirSync(work_path);
1231
-
1232
- const file_filter = (src) => {
1233
- return !src.toLowerCase().endsWith('.ds_store');
1234
- };
1235
-
1236
- // Copy files from book into _work-doc_id folder
1237
- console.log(`Copying content into work folder...`);
1238
- try {
1239
- fs.copySync(path.join(source_path, doc_id), work_path_content, { filter: file_filter });
1240
- } catch (e) {
1241
- console.error("Error copying from source_path:\n", e);
1242
- process.exit(1);
1243
- }
1244
-
1245
- // Create MD5 hash of content before build
1246
- console.log(`Creating Hash...`);
1247
-
1248
- dree.scan(work_path_content, md5DreeOptions, hash_callback);
1249
- let concat_hash = "|";
1250
- for (let i = 0; i < built_file_hashes.length; i++) {
1251
- concat_hash +=
1252
- built_file_hashes[i].path + ":" + built_file_hashes[i].hash + "|";
1253
- }
1254
- if (concat_hash === "|") {
1255
- console.error("No hash of content has been returned.");
1256
- process.exit(1);
1257
- }
1258
-
1259
- // Create hash and write file
1260
- const hash = crypto.createHash("md5").update(concat_hash).digest("hex");
1261
- const checksum_path = path.join(work_path_content, "checksum.md5");
1262
- try {
1263
- fs.writeFileSync(checksum_path, hash);
1264
- console.log("Hash file creation success:", checksum_path);
1265
- } catch (e) {
1266
- console.error("\nError creating", checksum_path, ":", e);
1267
- process.exit(1);
1268
- }
1269
-
1270
- // Load document header templates
1271
- console.log(`Loading templates...`);
1272
- try {
1273
- doc_header_template = fs.readFileSync(doc_header_template_path, "utf8");
1274
- doc_header_template_non_git = fs.readFileSync(
1275
- non_git_doc_header_template_path,
1276
- "utf8"
1277
- );
1278
- pdf_header_template = fs.readFileSync(pdf_header_template_path, "utf8");
1279
- pdf_header_template_non_git = fs.readFileSync(
1280
- non_git_pdf_header_template_path,
1281
- "utf8"
1282
- );
1283
- } catch (err) {
1284
- console.error(`Error reading document header template: ${err}`);
1285
- process.exit(1);
1286
- }
1287
-
1288
- if (pdf_enable) {
1289
- // Load PDF templates
1290
- try {
1291
- pdf_template = fs.readFileSync(pdf_template_file_path, "utf8");
1292
- } catch (err) {
1293
- console.error(`Error reading PDF template: ${err}`);
1294
- process.exit(1);
1295
- }
1296
- }
1297
- console.log(`Processing navigation breadcrumbs...`);
1298
- const bc_build = hdoc.build_breadcrumbs(hdocbook_config.navigation.items);
1299
- if (bc_build.errors.length > 0) {
1300
- console.log("\r\n-----------------------");
1301
- console.log(" Validation Output ");
1302
- console.log("-----------------------");
1303
- console.log(
1304
- `\n${bc_build.errors.length} errors found when processing navigation:\n`
1305
- );
1306
- console.error(` - ${bc_build.errors.join("\n\n - ")}`);
1307
- console.log("\n");
1308
- process.exit(1);
1309
- }
1310
- bc = bc_build.bc;
1311
- console.log(`Processing content...`);
1312
- // Get a list of MD files in work_path
1313
- dree.scan(work_path, dreeOptions, build_file_callback);
1314
-
1315
- if (pdf_enable) {
1316
- // Create a Chromium browser instance generate PDFs with
1317
- browser = await puppeteer.launch({ headless: 'shell' });
1318
- }
1319
-
1320
- // Work through MD files and convert to HTML
1321
- let mdPromiseArray = [];
1322
- for (let i = 0; i < md_files.length; i++) {
1323
- mdPromiseArray.push(md_files[i]);
1324
- }
1325
- const chunkSize = 8;
1326
- for (let i = 0; i < mdPromiseArray.length; i += chunkSize) {
1327
- const chunk = mdPromiseArray.slice(i, i + chunkSize);
1328
- // do whatever
1329
- await Promise.all(
1330
- chunk.map(async (file) => {
1331
- await transform_markdown_and_save_html(file);
1332
- })
1333
- );
1334
- }
1335
-
1336
- // Work through Static HTML files and add Frontmatter tags
1337
- let htmlPromiseArray = [];
1338
- for (let i = 0; i < static_html_files.length; i++) {
1339
- htmlPromiseArray.push(static_html_files[i]);
1340
- }
1341
- for (let i = 0; i < htmlPromiseArray.length; i += chunkSize) {
1342
- const chunk = htmlPromiseArray.slice(i, i + chunkSize);
1343
- await Promise.all(
1344
- chunk.map(async (file) => {
1345
- await transform_static_html(file);
1346
- })
1347
- );
1348
- }
1349
-
1350
- if (pdf_enable) {
1351
- // Close the Chromium browser instance
1352
- await browser.close();
1353
- }
1354
-
1355
- // Output to console
1356
- console.log(`\n MD files found: ${conversion_attempted}`);
1357
- console.log(`Successfully converted to HTML: ${conversion_success}`);
1358
- console.error(` Failed to convert: ${conversion_failed}\n`);
1359
- console.log(` Includes Found: ${includes_found}`);
1360
- console.log(` Includes Success: ${includes_success}`);
1361
- console.error(` Includes Failed: ${includes_failed}\n`);
1362
- console.log(
1363
- ` Static HTML Files Found: ${static_html_files.length}\n`
1364
- );
1365
- if (!validate) {
1366
- console.log(` PDF Files Created: ${pdf_created}\n`);
1367
- }
1368
-
1369
- // Validate content
1370
- const validation_success = await hdoc_validate.run(
1371
- work_path,
1372
- doc_id,
1373
- verbose,
1374
- hdocbook_config,
1375
- hdocbook_project,
1376
- bc,
1377
- prod_families,
1378
- prods_supported,
1379
- gen_exclude,
1380
- redirects
1381
- );
1382
- if (!validation_success) {
1383
- const end_time = Date.now();
1384
- console.log(`\nTime Taken: ${get_duration(start_time, end_time)}\n`);
1385
- process.exit(1);
1386
- }
1387
-
1388
- // Delete markdown files
1389
- console.log(`Performing Markdown Cleanup`);
1390
-
1391
- let filePromiseArray = [];
1392
- for (let i = 0; i < md_files_delete.length; i++) {
1393
- filePromiseArray.push(md_files_delete[i]);
1394
- }
1395
- await Promise.all(
1396
- filePromiseArray.map(async (file) => {
1397
- fs.unlink(file, (err) => {
1398
- if (err) console.error(`Error deleting ${file}: ${e}`);
1399
- });
1400
- })
1401
- );
1402
-
1403
- // Add book read timing to the hdocbook.json
1404
- hdocbook_config.readingTime = Math.ceil(
1405
- book_read_time + (book_read_time / 100) * 10
1406
- );
1407
- hdocbook_config.navigation.items = hdoc.strip_drafts(
1408
- hdocbook_config.navigation.items
1409
- );
1410
- try {
1411
- fs.writeFileSync(
1412
- work_hdocbook_path,
1413
- JSON.stringify(hdocbook_config, null, 2)
1414
- );
1415
- console.log("\nhdocbook.json update success:", work_hdocbook_path);
1416
- } catch (e) {
1417
- console.error("\nError creating", work_hdocbook_path, ":", e);
1418
- process.exit(1);
1419
- }
1420
-
1421
- // Build the index
1422
- // Create the DB and tables
1423
- console.log(`Building the Index`);
1424
- let db = hdoc_build_db.create_db(work_path, doc_id);
1425
- if (db.error && db.error !== null) {
1426
- console.error(db.error);
1427
- process.exit(1);
1428
- }
1429
- // Populate primary index tables
1430
- const index = await hdoc_build_db.populate_index(
1431
- db.db,
1432
- doc_id,
1433
- hdocbook_config,
1434
- index_records,
1435
- verbose
1436
- );
1437
- if (!index.success) {
1438
- console.error(index.error);
1439
- process.exit(1);
1440
- }
1441
-
1442
- // Populate redirect index table records
1443
- if (
1444
- hdocbook_project.redirects &&
1445
- hdocbook_project.redirects instanceof Array &&
1446
- hdocbook_project.redirects.length > 0
1447
- ) {
1448
- const redirects_index = hdoc_build_db.populate_redirects(
1449
- db.db,
1450
- hdocbook_project.redirects,
1451
- verbose
1452
- );
1453
- if (!redirects_index.success) {
1454
- for (let i = 0; i < index.errors.length; i++) {
1455
- console.error(index.errors[i]);
1456
- }
1457
- process.exit(1);
1458
- }
1459
- }
1460
-
1461
- if (!validate) {
1462
- try {
1463
- const zip_path = path.join(work_path, doc_id + ".zip");
1464
-
1465
- var output = fs.createWriteStream(zip_path);
1466
- var archive = archiver("zip");
1467
- archive.on("error", function (err) {
1468
- throw err;
1469
- });
1470
- archive.pipe(output);
1471
-
1472
- // append files from a sub-directory, putting its contents at the root of archive
1473
- archive.directory(work_path_content, false);
1474
- archive.finalize();
1475
-
1476
- //await zip(work_path_content, zip_path);
1477
- console.log(`\nZIP Creation Success: ${zip_path}\n`);
1478
- console.log(" Build Started:", build_start_dt);
1479
- console.log(`Build Completed: ${new Date().toLocaleString()}\n`);
1480
- } catch (e) {
1481
- console.error("\nError creating ZIP: " + e);
1482
- }
1483
- } else {
1484
- console.log("\nValidation Complete\n");
1485
- }
1486
- const end_time = Date.now();
1487
- console.log(`Time Taken: ${get_duration(start_time, end_time)}\n`);
1488
- };
1489
-
1490
- const get_duration = function (start, end) {
1491
- const total_time = new Date(end - start).toISOString().slice(11, 19);
1492
- const duration_arr = total_time.split(":");
1493
- let duration = "";
1494
- if (parseInt(duration_arr[0], 10) > 0) {
1495
- duration += parseInt(duration_arr[0], 10) + "h ";
1496
- }
1497
- if (duration !== "" || parseInt(duration_arr[1], 10)) {
1498
- duration += parseInt(duration_arr[1], 10) + "m ";
1499
- }
1500
- duration += parseInt(duration_arr[2], 10) + "s";
1501
- return duration;
1502
- };
1503
- })();
1
+ (() => {
2
+ const crypto = require("node:crypto");
3
+ const dree = require("dree");
4
+ const fs = require("fs-extra");
5
+ const mdfm = require("markdown-it-front-matter");
6
+ const path = require("node:path");
7
+ const puppeteer = require("puppeteer");
8
+ const URL = require("node:url").URL;
9
+ const hdoc_validate = require(path.join(__dirname, "hdoc-validate.js"));
10
+ const hdoc = require(path.join(__dirname, "hdoc-module.js"));
11
+ const hdoc_build_db = require(path.join(__dirname, "hdoc-build-db.js"));
12
+ const hdoc_build_pdf = require(path.join(__dirname, "hdoc-build-pdf.js"));
13
+ const hdoc_index = require(path.join(__dirname, "hdoc-db.js"));
14
+ const archiver = require("archiver");
15
+ const xmlFormat = require("xml-formatter");
16
+
17
+ const h_tags_to_search = ["h1", "h2", "h3"];
18
+ const image_extensions = ["png", "svg", "jpg"];
19
+ const doc_header_template_path = path.join(
20
+ __dirname,
21
+ "templates",
22
+ "doc-header.html",
23
+ );
24
+ const non_git_doc_header_template_path = path.join(
25
+ __dirname,
26
+ "templates",
27
+ "doc-header-non-git.html",
28
+ );
29
+ const pdf_header_template_path = path.join(
30
+ __dirname,
31
+ "templates",
32
+ "pdf-header.html",
33
+ );
34
+ const non_git_pdf_header_template_path = path.join(
35
+ __dirname,
36
+ "templates",
37
+ "pdf-header-non-git.html",
38
+ );
39
+ const pdf_template_path = path.join(__dirname, "templates", "pdf");
40
+ const ui_css_path = path.join(__dirname, "ui", "css");
41
+ const pdf_template_file_path = path.join(pdf_template_path, "template.html");
42
+ const regex_version = /^[0-9]{1,3}[.][0-9]{1,3}[.][0-9]{1,6}$/;
43
+ const h1_pattern = /(<h1.*?>)\s*.*\s*(.*<\/h1>)/;
44
+ const regex_filename = /^[a-z]+-{0,1}([-a-z0-9]+)*$/;
45
+
46
+ const built_file_hashes = [];
47
+ const css_templates = [];
48
+ const errors_filename = [];
49
+ const index_records = [];
50
+ const md_files = [];
51
+ const md_files_delete = [];
52
+ const redirects = {};
53
+ const static_html_files = [];
54
+
55
+ let bc = {}; // Breadcrumbs map
56
+ let book_read_time = 0;
57
+ let browser = {};
58
+ let conversion_attempted = 0;
59
+ let conversion_success = 0;
60
+ let conversion_failed = 0;
61
+ let doc_header_template = "";
62
+ let doc_header_template_non_git = "";
63
+ let global_source_path = "";
64
+ let pdf_created = 0;
65
+ let pdf_enable = false;
66
+ let pdf_header_template = "";
67
+ let pdf_header_template_non_git = "";
68
+ let pdf_template = "";
69
+ let prod_families = {};
70
+ let prods_supported = [];
71
+ let doc_id = "";
72
+ let git_token =
73
+ "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"; // Github fine-grained personal access token that has minimum read-only access to Hornbill Docs metadata
74
+ let hdocbook_config = {};
75
+ let hdocbook_project;
76
+ let includes_found = 0;
77
+ let includes_success = 0;
78
+ let includes_failed = 0;
79
+ let work_path_content = "";
80
+ let verbose = false;
81
+
82
+ const pdf_path_excluded = (relative_path) => {
83
+ if (
84
+ !hdocbook_project.pdfGeneration ||
85
+ hdocbook_project.pdfGeneration.exclude_paths === undefined
86
+ ) {
87
+ return false;
88
+ }
89
+ let clean_rel_path = relative_path;
90
+ if (relative_path.startsWith("/")) {
91
+ clean_rel_path = clean_rel_path.slice(1, clean_rel_path.length);
92
+ }
93
+ for (
94
+ let i = 0;
95
+ i < hdocbook_project.pdfGeneration.exclude_paths.length;
96
+ i++
97
+ ) {
98
+ const exclude_path = hdocbook_project.pdfGeneration.exclude_paths[i];
99
+ if (clean_rel_path === exclude_path) return true;
100
+ if (exclude_path.at(-1) === "*") {
101
+ if (clean_rel_path.startsWith(exclude_path.slice(0, -1))) {
102
+ return true;
103
+ }
104
+ }
105
+ }
106
+ return false;
107
+ };
108
+
109
+ const transform_static_html = async (file_path) => {
110
+ if (fs.existsSync(file_path.path)) {
111
+ // Load HTML file
112
+ let html_txt = fs.readFileSync(file_path.path, "utf8");
113
+ html_txt = html_txt.replace(/\r/gm, ""); // Remove CR's so we're just dealing with newlines
114
+
115
+ const fm_headers = [];
116
+ let existing_fm_headers = false;
117
+ let doc_type = "Article";
118
+ let doc_title = "";
119
+
120
+ // Check if we have a frontmatter comment
121
+ const fm_header = hdoc.getHTMLFrontmatterHeader(html_txt);
122
+ if (Object.keys(fm_header.fm_properties).length > 0) {
123
+ existing_fm_headers = true;
124
+
125
+ // We have some frontmatter headers, check if title is one of them
126
+ let fm_title_found = false;
127
+ if (
128
+ fm_header.fm_properties &&
129
+ fm_header.fm_properties.title !== undefined
130
+ ) {
131
+ // We have a title - but does the title have a value
132
+ if (fm_header.fm_properties.title === "") {
133
+ // No value - remove title from the properties map so we don't end up with 2 title properties, one empty and one with a value
134
+ fm_header.fm_properties.title = undefined;
135
+ } else {
136
+ // We have a value for the title property
137
+ fm_title_found = true;
138
+ doc_title = fm_header.fm_properties.title.trim();
139
+ }
140
+ }
141
+
142
+ // Is reading-time in the fm headers?
143
+ if (fm_header.fm_properties["reading-time"] === undefined) {
144
+ const read_time_mins = hdoc.get_html_read_time(html_txt);
145
+ book_read_time += read_time_mins;
146
+ fm_header.fm_properties["reading-time"] = read_time_mins;
147
+ }
148
+
149
+ for (const key in fm_header.fm_properties) {
150
+ if (Object.hasOwn(fm_header.fm_properties, key)) {
151
+ if (key === "type") doc_type = fm_header.fm_properties[key];
152
+ else {
153
+ fm_headers.push({
154
+ id: key,
155
+ value: fm_header.fm_properties[key],
156
+ });
157
+ }
158
+ }
159
+ }
160
+
161
+ if (
162
+ !fm_title_found &&
163
+ file_path.name !== "description_ext.md" &&
164
+ file_path.name !== "article_ext.md"
165
+ ) {
166
+ // No frontmatter title found in properties
167
+ // Go get title from h tags in html
168
+ const html_heading = hdoc.getFirstHTMLHeading(
169
+ html_txt,
170
+ h_tags_to_search,
171
+ );
172
+
173
+ if (html_heading?.[0]?.children?.[0]?.data) {
174
+ // We've found a heading tag, add that as a title to the existing frontmatter properties
175
+ fm_headers.push({
176
+ id: "title",
177
+ value: html_heading[0].children[0].data,
178
+ });
179
+ doc_title = html_heading[0].children[0].data;
180
+ } else {
181
+ // No header tag, no frontmatter title, output a warning
182
+ console.info(
183
+ `[WARNING] No frontmatter title property, or ${h_tags_to_search.join(
184
+ ", ",
185
+ )} tags detected in ${file_path.path}`,
186
+ );
187
+ }
188
+ }
189
+
190
+ // Do we have a description header?
191
+ if (
192
+ fm_header.fm_properties &&
193
+ fm_header.fm_properties.description !== undefined
194
+ ) {
195
+ if (fm_header.fm_properties.description === "") {
196
+ const html_p_tag = hdoc.getFirstHTMLHeading(html_txt, ["p"]);
197
+ if (html_p_tag?.[0]?.children?.[0]?.data) {
198
+ fm_headers.push({
199
+ id: "description",
200
+ value:
201
+ `${doc_title}: ${html_p_tag[0].children[0].data.split(".")[0]}.`.trim(),
202
+ });
203
+ }
204
+ } else {
205
+ fm_headers.push({
206
+ id: "description",
207
+ value: fm_header.fm_properties.description.trim(),
208
+ });
209
+ }
210
+ } else {
211
+ const html_p_tag = hdoc.getFirstHTMLHeading(html_txt, ["p"]);
212
+ if (html_p_tag?.[0]?.children?.[0]?.data) {
213
+ fm_headers.push({
214
+ id: "description",
215
+ value:
216
+ `${doc_title}: ${html_p_tag[0].children[0].data.split(".")[0]}.`.trim(),
217
+ });
218
+ }
219
+ }
220
+ } else {
221
+ // We have no frontmatter headers, get and build one from the html headings
222
+ const html_heading = hdoc.getFirstHTMLHeading(
223
+ html_txt,
224
+ h_tags_to_search,
225
+ );
226
+ let doc_title = "";
227
+ // Add the title
228
+ if (html_heading?.[0]?.children?.[0]?.data) {
229
+ // We've found a heading tag, add that as a title to the frontmatter content
230
+ fm_headers.push({
231
+ id: "title",
232
+ value: html_heading[0].children[0].data,
233
+ });
234
+ doc_title = html_heading[0].children[0].data;
235
+ } else if (
236
+ file_path.name !== "description_ext.md" &&
237
+ file_path.name !== "article_ext.md"
238
+ ) {
239
+ // No header tag, no frontmatter title, output a warning
240
+ console.info(
241
+ `[WARNING] No frontmatter title property, or ${h_tags_to_search.join(
242
+ ", ",
243
+ )} tags detected in ${file_path.path}`,
244
+ );
245
+ }
246
+
247
+ // Add the reading time
248
+ const read_time_mins = hdoc.get_html_read_time(html_txt);
249
+ book_read_time += read_time_mins;
250
+ fm_headers.push({
251
+ id: "reading-time",
252
+ value: read_time_mins,
253
+ });
254
+
255
+ const html_p_tag = hdoc.getFirstHTMLHeading(html_txt, ["p"]);
256
+ if (html_p_tag?.[0]?.children?.[0]?.data) {
257
+ fm_headers.push({
258
+ id: "description",
259
+ value:
260
+ `${doc_title}: ${html_p_tag[0].children[0].data.split(".")[0]}.`.trim(),
261
+ });
262
+ }
263
+ }
264
+
265
+ // Add doc type
266
+ fm_headers.push({
267
+ id: "type",
268
+ value: doc_type,
269
+ });
270
+
271
+ const metadata = {};
272
+
273
+ // Remove the first <h1>title</h1> from the HTML as we'll add that in the document header
274
+ let html_h1 = h1_pattern.exec(html_txt);
275
+ if (html_h1?.[0])
276
+ html_h1 = html_h1[0].replace(/(<h1.*?>)/, "").replace(/(<\/h1>)/, "");
277
+
278
+ html_txt = html_txt.replace(h1_pattern, "");
279
+
280
+ // Get contributor data from Github, if exists
281
+ let contribs = [];
282
+ let last_commit = null;
283
+ if (
284
+ hdocbook_config.publicSource &&
285
+ hdocbook_config.publicSource !== "" &&
286
+ hdocbook_config.publicSource.includes("github.com/Hornbill-Docs")
287
+ ) {
288
+ const github_paths = hdoc.get_github_api_path(
289
+ hdocbook_config.publicSource,
290
+ file_path.relativePath,
291
+ );
292
+ const contributors = hdoc.get_github_contributors(
293
+ github_paths.api_path,
294
+ git_token,
295
+ );
296
+
297
+ if (!contributors.success) {
298
+ console.error(
299
+ `Error retrieving contributors from Github: ${contributors.error}`,
300
+ );
301
+ } else {
302
+ last_commit = contributors.last_commit_date;
303
+ metadata.last_commit = contributors.last_commit_date;
304
+ metadata.contributor_count = contributors.contributor_count;
305
+ metadata.edit_url = github_paths.edit_path;
306
+ contribs = contributors.contributors;
307
+ contributors.editPath = github_paths.edit_path;
308
+ fm_headers.push({
309
+ id: "contributor-count",
310
+ value: contributors.contributor_count,
311
+ });
312
+ fm_headers.push({
313
+ id: "last-commit",
314
+ value: contributors.last_commit_date,
315
+ });
316
+ const target_file = file_path.path.replace(
317
+ path.extname(file_path.path),
318
+ "._info.json",
319
+ );
320
+ contributors.success = undefined;
321
+ contributors.error = undefined;
322
+ contributors.editPath = github_paths.edit_path;
323
+ }
324
+ fm_headers.push({
325
+ id: "edit-path",
326
+ value: github_paths.edit_path,
327
+ });
328
+ }
329
+
330
+ if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
331
+ fm_headers.push({
332
+ id: "pdf-path",
333
+ value: file_path.relativePath.replace(
334
+ path.extname(file_path.relativePath),
335
+ ".pdf",
336
+ ),
337
+ });
338
+ }
339
+
340
+ let fm_header_content = "<!--[[FRONTMATTER\n";
341
+ if (fm_headers.length > 0) {
342
+ for (let i = 0; i < fm_headers.length; i++) {
343
+ fm_header_content += `${fm_headers[i].id}: ${fm_headers[i].value}\n`;
344
+ }
345
+ fm_header_content += "]]-->";
346
+
347
+ if (existing_fm_headers) {
348
+ html_txt = html_txt.replace(`<!--${fm_header.fm_header}-->`, "");
349
+ }
350
+ }
351
+
352
+ let doc_header = "";
353
+ let pdf_header = "";
354
+ const inline_content = file_path.relativePath.startsWith(
355
+ `${hdocbook_config.docId}/_inline/`,
356
+ );
357
+ if (hdocbook_config.publicSource?.includes("github.com/Hornbill-Docs")) {
358
+ // Build doc header from template and frontmatter tags
359
+ if (!inline_content)
360
+ doc_header = process_doc_header(
361
+ fm_headers,
362
+ file_path.relativePath,
363
+ doc_header_template,
364
+ html_h1,
365
+ );
366
+
367
+ if (pdf_enable && !pdf_path_excluded(file_path.relativePath))
368
+ pdf_header = process_doc_header(
369
+ fm_headers,
370
+ file_path.relativePath,
371
+ pdf_header_template,
372
+ html_h1,
373
+ );
374
+ } else {
375
+ if (!inline_content)
376
+ doc_header = process_doc_header(
377
+ fm_headers,
378
+ file_path.relativePath,
379
+ doc_header_template_non_git,
380
+ html_h1,
381
+ );
382
+
383
+ if (pdf_enable && !pdf_path_excluded(file_path.relativePath))
384
+ pdf_header = process_doc_header(
385
+ fm_headers,
386
+ file_path.relativePath,
387
+ pdf_header_template_non_git,
388
+ html_h1,
389
+ );
390
+ }
391
+
392
+ let pdf_size = 0;
393
+ if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
394
+ let pdf_txt = await hdoc_build_pdf.process_images(file_path, html_txt);
395
+ pdf_txt = `${pdf_header}\n${pdf_txt}`;
396
+
397
+ // Generate PDF file from HTML
398
+ const pdf_file_path = file_path.path.replace(
399
+ path.extname(file_path.path),
400
+ ".pdf",
401
+ );
402
+ pdf_size = await hdoc_build_pdf.generate_pdf(
403
+ browser,
404
+ pdf_template_path,
405
+ pdf_template,
406
+ hdocbook_config,
407
+ pdf_txt,
408
+ pdf_file_path,
409
+ css_templates,
410
+ verbose,
411
+ );
412
+ }
413
+ if (pdf_size > 0) pdf_created++;
414
+
415
+ // Wrap h2 and h3 tags, plus content, in id'd divs
416
+ html_txt = hdoc.wrapHContent(html_txt);
417
+
418
+ if (inline_content) html_txt = `${fm_header_content}\n${html_txt}`;
419
+ else html_txt = `${fm_header_content}\n${doc_header}\n${html_txt}`;
420
+
421
+ let relative_path = file_path.relativePath;
422
+ if (
423
+ !bc[relative_path.replace(".html", "")] &&
424
+ bc[relative_path.replace("/index.html", "")]
425
+ ) {
426
+ relative_path = relative_path.replace("/index.html", "");
427
+ }
428
+
429
+ const index_data = hdoc_index.transform_html_for_index(html_txt);
430
+
431
+ for (const section of index_data.sections) {
432
+ index_records.push({
433
+ relative_path: relative_path,
434
+ index_html: {
435
+ fm_props: index_data.fm_props,
436
+ text: section.text,
437
+ preview: section.preview,
438
+ id: section.id ? section.id : null,
439
+ },
440
+ metadata: metadata,
441
+ contributors: contribs,
442
+ pdf_size: pdf_size,
443
+ md5: file_path.hash,
444
+ lastmod: last_commit !== null ? last_commit : file_path.hb_lastmod,
445
+ inline: inline_content,
446
+ });
447
+ }
448
+
449
+ // Save HTML into HTML file
450
+ try {
451
+ fs.writeFileSync(file_path.path, html_txt);
452
+ } catch (err) {
453
+ console.error("Error writing:", target_file, "\n", err);
454
+ }
455
+ }
456
+ };
457
+
458
+ const transform_markdown_and_save_html = async (file_path) => {
459
+ conversion_attempted++;
460
+
461
+ if (fs.existsSync(file_path.path)) {
462
+ // Load markdown file
463
+ let md_txt = hdoc.expand_variables(
464
+ fs.readFileSync(file_path.path, "utf8"),
465
+ );
466
+
467
+ // Pull in external includes
468
+ const includes_processed = await hdoc.process_includes(
469
+ file_path.path,
470
+ md_txt,
471
+ global_source_path,
472
+ );
473
+ md_txt = includes_processed.body.toString();
474
+ includes_found += includes_processed.found;
475
+ includes_success += includes_processed.success;
476
+ includes_failed += includes_processed.failed;
477
+ if (includes_processed.errors.length > 0) {
478
+ for (let i = 0; i < includes_processed.errors.length; i++) {
479
+ console.error(includes_processed.errors[i]);
480
+ }
481
+ }
482
+
483
+ // One markdown parser per file. Seems wrong, but doesn't work with a global one once past the first md file
484
+ // Steve - revisit this
485
+ const md = require("markdown-it")({
486
+ html: true,
487
+ linkify: true,
488
+ typographer: true,
489
+ });
490
+ md.linkify.set({
491
+ fuzzyEmail: false,
492
+ fuzzyLink: false,
493
+ fuzzyIP: false,
494
+ });
495
+
496
+ // Process Frontmatter tags
497
+ let frontmatter_content = "";
498
+ md.use(mdfm, (fm) => {
499
+ frontmatter_content = fm;
500
+ });
501
+
502
+ // Process tips
503
+ const tips = require(`${__dirname}/custom_modules/tips.js`);
504
+ md.use(tips, {
505
+ links: true,
506
+ });
507
+
508
+ // Tidy up ```json and ```xml code tags
509
+
510
+ if (md_txt.includes("```json") || md_txt.includes("```xml"))
511
+ md_txt = tidy_code_tags(md_txt, file_path.relativePath);
512
+
513
+ // Render markdown into HTML
514
+ let html_txt = md.render(md_txt);
515
+
516
+ // Prepare frontmatter headers
517
+ const fm_headers = [];
518
+ const fm_content = frontmatter_content.split(/\r?\n/);
519
+
520
+ let fm_contains_title = false;
521
+ let fm_contains_reading_time = false;
522
+ let fm_contains_description = false;
523
+ let doc_title = "";
524
+ let doc_type = "Article";
525
+
526
+ if (fm_content.length >= 0) {
527
+ for (fm_prop of fm_content) {
528
+ const fm_id = fm_prop.slice(0, fm_prop.indexOf(":"));
529
+ const fm_val = fm_prop.slice(fm_prop.indexOf(":") + 1);
530
+
531
+ if (
532
+ fm_id &&
533
+ fm_id.trim().length > 0 &&
534
+ fm_val &&
535
+ fm_val.trim().length > 0
536
+ ) {
537
+ fm_headers.push({
538
+ id: fm_id.trim(),
539
+ value: fm_val.trim(),
540
+ });
541
+
542
+ if (fm_id.trim() === "title") {
543
+ fm_contains_title = true;
544
+ doc_title = fm_val.trim();
545
+ }
546
+ if (fm_id.trim() === "type") {
547
+ doc_type = fm_val.trim();
548
+ }
549
+ if (fm_id.trim() === "reading-time") {
550
+ book_read_time += Number.parseInt(fm_val.trim(), 10);
551
+ fm_contains_reading_time = true;
552
+ }
553
+ if (fm_id.trim() === "description") {
554
+ fm_contains_description = true;
555
+ }
556
+ }
557
+ }
558
+ }
559
+
560
+ // Add doc type
561
+ fm_headers.push({
562
+ id: "type",
563
+ value: doc_type,
564
+ });
565
+
566
+ // Does frontmatter tag contain a title property
567
+ if (!fm_contains_title) {
568
+ // Frontmatter tags don't contain a title property - go pull the first one from the html heading tags
569
+ const html_heading = hdoc.getFirstHTMLHeading(
570
+ html_txt,
571
+ h_tags_to_search,
572
+ );
573
+
574
+ if (html_heading?.[0]?.children?.[0]?.data) {
575
+ // We've found a heading tag, add that as a title to the frontmatter content
576
+ fm_headers.push({
577
+ id: "title",
578
+ value: html_heading[0].children[0].data.trim(),
579
+ });
580
+ doc_title = html_heading[0].children[0].data.trim();
581
+ } else if (
582
+ file_path.name !== "description_ext.md" &&
583
+ file_path.name !== "article_ext.md"
584
+ ) {
585
+ // No header tag, no frontmatter title, output a warning
586
+ console.info(
587
+ `[WARNING] No frontmatter title property, or h1, h2 or h3 header tags detected in ${file_path.path}`,
588
+ );
589
+ }
590
+ }
591
+
592
+ // Does frontmatter contain a description header, generate one if not
593
+ if (!fm_contains_description) {
594
+ const html_p_tag = hdoc.getFirstHTMLHeading(html_txt, ["p"]);
595
+ if (html_p_tag?.[0]?.children?.[0]?.data) {
596
+ fm_headers.push({
597
+ id: "description",
598
+ value:
599
+ `${doc_title}: ${html_p_tag[0].children[0].data.split(".")[0]}.`.trim(),
600
+ });
601
+ }
602
+ }
603
+
604
+ // Does frontmatter tag contain a reading-time property
605
+ if (!fm_contains_reading_time) {
606
+ const read_time_mins = hdoc.get_html_read_time(html_txt);
607
+ book_read_time += read_time_mins;
608
+ fm_headers.push({
609
+ id: "reading-time",
610
+ value: read_time_mins,
611
+ });
612
+ }
613
+ const metadata = {};
614
+
615
+ // Remove the first <h1>title</h1> from the HTML as we'll add that in the document header
616
+ let html_h1 = h1_pattern.exec(html_txt);
617
+ if (html_h1?.[0])
618
+ html_h1 = html_h1[0].replace(/(<h1.*?>)/, "").replace(/(<\/h1>)/, "");
619
+
620
+ html_txt = html_txt.replace(h1_pattern, "");
621
+
622
+ // Get contributor data from Github, if exists
623
+ let contribs = [];
624
+ let last_commit = null;
625
+ if (
626
+ hdocbook_config.publicSource &&
627
+ hdocbook_config.publicSource !== "" &&
628
+ hdocbook_config.publicSource.includes("github.com/Hornbill-Docs")
629
+ ) {
630
+ const github_paths = hdoc.get_github_api_path(
631
+ hdocbook_config.publicSource,
632
+ file_path.relativePath,
633
+ );
634
+ const contributors = await hdoc.get_github_contributors(
635
+ github_paths.api_path,
636
+ git_token,
637
+ hdocbook_config.publicSource,
638
+ );
639
+
640
+ if (!contributors.success) {
641
+ console.error(
642
+ `Error retrieving contributors from Github: ${contributors.error}`,
643
+ );
644
+ } else {
645
+ last_commit = contributors.last_commit_date;
646
+ metadata.last_commit = contributors.last_commit_date;
647
+ metadata.contributor_count = contributors.contributor_count;
648
+ metadata.edit_url = github_paths.edit_path;
649
+ contribs = contributors.contributors;
650
+ contributors.editPath = github_paths.edit_path;
651
+
652
+ fm_headers.push({
653
+ id: "contributor-count",
654
+ value: contributors.contributor_count,
655
+ });
656
+ fm_headers.push({
657
+ id: "last-commit",
658
+ value: contributors.last_commit_date,
659
+ });
660
+ const target_file = file_path.path.replace(
661
+ path.extname(file_path.path),
662
+ "._info.json",
663
+ );
664
+ contributors.success = undefined;
665
+ contributors.error = undefined;
666
+ contributors.editPath = github_paths.edit_path;
667
+ try {
668
+ fs.writeFileSync(
669
+ target_file,
670
+ JSON.stringify(contributors, null, 2),
671
+ );
672
+ } catch (err) {
673
+ console.error("Error writing:", target_file, "\n", err);
674
+ }
675
+ }
676
+ fm_headers.push({
677
+ id: "edit-path",
678
+ value: github_paths.edit_path,
679
+ });
680
+ }
681
+
682
+ if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
683
+ fm_headers.push({
684
+ id: "pdf-path",
685
+ value: file_path.relativePath.replace(
686
+ path.extname(file_path.relativePath),
687
+ ".pdf",
688
+ ),
689
+ });
690
+ }
691
+
692
+ // Add frontmatter tags as comment
693
+ let fm_header = "<!--[[FRONTMATTER\n";
694
+ if (fm_headers.length > 0) {
695
+ for (let i = 0; i < fm_headers.length; i++) {
696
+ fm_header += `${fm_headers[i].id}: ${fm_headers[i].value}\n`;
697
+ }
698
+ }
699
+ fm_header += "]]-->";
700
+
701
+ let doc_header = "";
702
+ let pdf_header = "";
703
+ const inline_content = file_path.relativePath.startsWith(
704
+ `${hdocbook_config.docId}/_inline/`,
705
+ );
706
+ if (hdocbook_config.publicSource?.includes("github.com/Hornbill-Docs")) {
707
+ // Build doc header from template and frontmatter tags
708
+ if (!inline_content)
709
+ doc_header = process_doc_header(
710
+ fm_headers,
711
+ file_path.relativePath,
712
+ doc_header_template,
713
+ html_h1,
714
+ );
715
+
716
+ if (pdf_enable && !pdf_path_excluded(file_path.relativePath))
717
+ pdf_header = process_doc_header(
718
+ fm_headers,
719
+ file_path.relativePath,
720
+ pdf_header_template,
721
+ html_h1,
722
+ );
723
+ } else {
724
+ // Build doc header from template and frontmatter tags
725
+ if (!inline_content)
726
+ doc_header = process_doc_header(
727
+ fm_headers,
728
+ file_path.relativePath,
729
+ doc_header_template_non_git,
730
+ html_h1,
731
+ );
732
+
733
+ if (pdf_enable && !pdf_path_excluded(file_path.relativePath))
734
+ pdf_header = process_doc_header(
735
+ fm_headers,
736
+ file_path.relativePath,
737
+ pdf_header_template_non_git,
738
+ html_h1,
739
+ );
740
+ }
741
+
742
+ let pdf_size = 0;
743
+ if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
744
+ let pdf_txt = await hdoc_build_pdf.process_images(file_path, html_txt);
745
+ pdf_txt = `${pdf_header}\n${pdf_txt}`;
746
+
747
+ // Generate PDF file from HTML
748
+ const pdf_file_path = file_path.path.replace(
749
+ path.extname(file_path.path),
750
+ ".pdf",
751
+ );
752
+ pdf_size = await hdoc_build_pdf.generate_pdf(
753
+ browser,
754
+ pdf_template_path,
755
+ pdf_template,
756
+ hdocbook_config,
757
+ pdf_txt,
758
+ pdf_file_path,
759
+ css_templates,
760
+ verbose,
761
+ );
762
+ }
763
+ if (pdf_size > 0) pdf_created++;
764
+
765
+ // Wrap h2 and h3 tags, plus content, in id'd divs
766
+ html_txt = hdoc.wrapHContent(html_txt);
767
+
768
+ if (inline_content) html_txt = `${fm_header}\n${html_txt}`;
769
+ else html_txt = `${fm_header}\n${doc_header}\n${html_txt}`;
770
+
771
+ // Save HTML into HTML file
772
+ const target_file = file_path.path.replace(
773
+ path.extname(file_path.path),
774
+ ".html",
775
+ );
776
+ let relative_path = file_path.relativePath.replace(
777
+ path.extname(file_path.path),
778
+ ".html",
779
+ );
780
+ try {
781
+ fs.writeFileSync(target_file, html_txt);
782
+ } catch (err) {
783
+ console.error("Error writing:", target_file, "\n", err);
784
+ }
785
+
786
+ if (
787
+ !bc[relative_path.replace(".html", "")] &&
788
+ bc[relative_path.replace("/index.html", "")]
789
+ ) {
790
+ relative_path = relative_path.replace("/index.html", "");
791
+ }
792
+
793
+ const index_data = hdoc_index.transform_html_for_index(html_txt);
794
+
795
+ for (section of index_data.sections) {
796
+ index_records.push({
797
+ relative_path: relative_path,
798
+ index_html: {
799
+ fm_props: index_data.fm_props,
800
+ text: section.text,
801
+ preview: section.preview,
802
+ id: section.id ? section.id : null,
803
+ },
804
+ metadata: metadata,
805
+ contributors: contribs,
806
+ pdf_size: pdf_size,
807
+ md5: file_path.hash,
808
+ lastmod: last_commit !== null ? last_commit : file_path.hb_lastmod,
809
+ inline: inline_content,
810
+ });
811
+ }
812
+
813
+ // Add MD file to delete queue
814
+ md_files_delete.push(file_path.path);
815
+
816
+ conversion_success++;
817
+ return true;
818
+ }
819
+ conversion_failed++;
820
+ console.error("MD file does not exist:", file_path.path);
821
+ return false;
822
+ };
823
+
824
+ const tidy_code_tags = (markdown, file) => {
825
+ let clean_markdown = markdown;
826
+ const json_to_tidy = clean_markdown.match(/```json[\r\n](\s|.)*?```/g);
827
+ if (json_to_tidy && json_to_tidy.length > 0) {
828
+ for (let i = 0; i < json_to_tidy.length; i++) {
829
+ if (json_to_tidy[i] !== "") {
830
+ let json_tidy = json_to_tidy[i]
831
+ .replace("```json", "")
832
+ .replace("```", "");
833
+ try {
834
+ json_tidy = JSON.stringify(JSON.parse(json_tidy), null, 2);
835
+ } catch (e) {
836
+ console.info(
837
+ `[WARNING] Could not tidy JSON in file [${file}]: ${e}`,
838
+ );
839
+ }
840
+ clean_markdown = clean_markdown.replace(
841
+ json_to_tidy[i],
842
+ `\`\`\`json\n${json_tidy}\n\`\`\``,
843
+ );
844
+ }
845
+ }
846
+ }
847
+
848
+ const xml_to_tidy = clean_markdown.match(/```xml[\r\n](\s|.)*?```/g);
849
+ if (xml_to_tidy && xml_to_tidy.length > 0) {
850
+ for (let i = 0; i < xml_to_tidy.length; i++) {
851
+ if (xml_to_tidy[i] !== "") {
852
+ const xml_tidy = xml_to_tidy[i]
853
+ .replace("```xml", "")
854
+ .replace("```", "");
855
+ let new_xml_string = xml_tidy;
856
+ try {
857
+ new_xml_string = xmlFormat(xml_tidy, {
858
+ indentation: " ",
859
+ collapseContent: true,
860
+ lineSeparator: "\n",
861
+ });
862
+ } catch (e) {
863
+ console.info(
864
+ `[WARNING] Could not tidy XML in file [${file}]: ${e}`,
865
+ );
866
+ }
867
+ clean_markdown = clean_markdown.replace(
868
+ xml_to_tidy[i],
869
+ `\`\`\`xml\n${new_xml_string}\n\`\`\``,
870
+ );
871
+ }
872
+ }
873
+ }
874
+ return clean_markdown;
875
+ };
876
+
877
+ const process_doc_header = (fm_headers, doc_path, template, h1) => {
878
+ let wip_doc_header = template;
879
+ let used_h1 = false;
880
+ if (h1 && h1 !== "") {
881
+ wip_doc_header = wip_doc_header.replaceAll("{{title}}", h1);
882
+ used_h1 = true;
883
+ }
884
+ // Process fm_headers properties first
885
+ for (let i = 0; i < fm_headers.length; i++) {
886
+ switch (fm_headers[i].id) {
887
+ case "title":
888
+ if (!used_h1)
889
+ wip_doc_header = wip_doc_header.replaceAll(
890
+ "{{title}}",
891
+ fm_headers[i].value,
892
+ );
893
+ break;
894
+ case "reading-time":
895
+ wip_doc_header = wip_doc_header.replaceAll(
896
+ "{{reading-time}}",
897
+ fm_headers[i].value,
898
+ );
899
+ break;
900
+ case "contributor-count":
901
+ wip_doc_header = wip_doc_header.replaceAll(
902
+ "{{contributor-count}}",
903
+ fm_headers[i].value,
904
+ );
905
+ break;
906
+ case "type":
907
+ wip_doc_header = wip_doc_header.replaceAll(
908
+ "{{doc-type}}",
909
+ fm_headers[i].value,
910
+ );
911
+ break;
912
+ case "edit-path":
913
+ wip_doc_header = wip_doc_header.replaceAll(
914
+ "{{edit-url}}",
915
+ fm_headers[i].value,
916
+ );
917
+ break;
918
+ case "last-commit": {
919
+ let last_commit_date = fm_headers[i].value;
920
+ if (last_commit_date !== "No Commit Date Available") {
921
+ last_commit_date = new Date(fm_headers[i].value).toDateString();
922
+ }
923
+ wip_doc_header = wip_doc_header.replaceAll(
924
+ "{{last-update}}",
925
+ last_commit_date,
926
+ );
927
+ break;
928
+ }
929
+ default:
930
+ break;
931
+ }
932
+ }
933
+
934
+ // Now sort out breadcrumbs
935
+ const logical_path = doc_path.replace(path.extname(doc_path), "");
936
+ const bc_for_path = bc[logical_path];
937
+ let bc_tags = "\n";
938
+ if (bc_for_path) {
939
+ for (let i = 0; i < bc_for_path.length - 1; i++) {
940
+ let bc_link = "/";
941
+ if (redirects[bc_for_path[i].link]) {
942
+ if (redirects[bc_for_path[i].link].location) {
943
+ bc_link += redirects[bc_for_path[i].link].location;
944
+ }
945
+ } else {
946
+ if (bc_for_path[i].link) {
947
+ bc_link = bc_for_path[i].link.startsWith("/")
948
+ ? bc_for_path[i].link
949
+ : `/${bc_for_path[i].link}`;
950
+ } else {
951
+ bc_link = "";
952
+ }
953
+ }
954
+ if (bc_link !== "") {
955
+ bc_tags += `\t\t\t\t<li class="mt-0 nav-bar-item"><a href="${bc_link}" class="ps-0 pe-0 text-decoration-none">${bc_for_path[i].text}</a></li>\n`;
956
+ } else {
957
+ bc_tags += `\t\t\t\t<li class="mt-0 nav-bar-item">${bc_for_path[i].text}</li>\n`;
958
+ }
959
+ }
960
+ } else {
961
+ if (verbose) {
962
+ console.info(
963
+ `[WARNING] Path is not present in navigation items: ${logical_path}`,
964
+ );
965
+ }
966
+ }
967
+ bc_tags += "\t\t\t";
968
+ wip_doc_header = wip_doc_header.replaceAll("{{breadcrumbs}}", bc_tags);
969
+ return wip_doc_header;
970
+ };
971
+
972
+ // File callback for build scan
973
+ const build_file_callback = (element) => {
974
+ if (element.extension === "md") {
975
+ element.hb_source_path = path.join(
976
+ global_source_path,
977
+ element.relativePath,
978
+ );
979
+ const fstats = fs.statSync(element.hb_source_path);
980
+ element.hb_lastmod = `${fstats.mtime.toISOString().slice(0, 19)}Z`;
981
+ md_files.push(element);
982
+ } else {
983
+ // File is html, see if there's a matching md file and if there is then ignore the html
984
+ const md_path = element.path.replace(path.extname(element.path), ".md");
985
+ if (fs.existsSync(md_path)) {
986
+ return;
987
+ }
988
+ element.hb_source_path = path.join(
989
+ global_source_path,
990
+ element.relativePath,
991
+ );
992
+ const fstats = fs.statSync(element.hb_source_path);
993
+ element.hb_lastmod = `${fstats.mtime.toISOString().slice(0, 19)}Z`;
994
+ static_html_files.push(element);
995
+ }
996
+ };
997
+
998
+ // File & folder callback for MD5 hash of built content
999
+ const hash_callback = (element) => {
1000
+ if (element.extension !== "db") {
1001
+ built_file_hashes.push({
1002
+ path: element.relativePath,
1003
+ hash: element.hash,
1004
+ });
1005
+ }
1006
+ };
1007
+
1008
+ // File scan callback for filename validation
1009
+ const filename_validation_callback = (element) => {
1010
+ if (element.relativePath.startsWith("_inline/")) return;
1011
+ if (element.name.toLowerCase() === ".ds_store") return;
1012
+ if (
1013
+ element.name === "article_ext.md" ||
1014
+ element.name === "description_ext.md"
1015
+ )
1016
+ return;
1017
+ if (image_extensions.includes(element.extension)) return;
1018
+ const file_no_ext = element.name.replace(`.${element.extension}`, "");
1019
+ if (!file_no_ext.match(regex_filename))
1020
+ errors_filename.push(element.relativePath);
1021
+ };
1022
+
1023
+ const dreeOptions = {
1024
+ hash: true,
1025
+ extensions: ["md", "html", "htm"],
1026
+ normalize: true,
1027
+ stat: true,
1028
+ };
1029
+
1030
+ const dreeOptionsAllFiles = {
1031
+ descendants: true,
1032
+ excludeEmptyDirectories: true,
1033
+ hash: false,
1034
+ normalize: true,
1035
+ size: false,
1036
+ sizeInBytes: false,
1037
+ symbolicLinks: false,
1038
+ };
1039
+
1040
+ const md5DreeOptions = {
1041
+ hash: true,
1042
+ normalize: true,
1043
+ sorted: true,
1044
+ };
1045
+
1046
+ exports.run = async (
1047
+ source_path,
1048
+ verbose_output,
1049
+ github_api_token,
1050
+ validate,
1051
+ gen_exclude,
1052
+ build_version = "",
1053
+ ) => {
1054
+ if (github_api_token !== "") {
1055
+ git_token = github_api_token;
1056
+ }
1057
+ global_source_path = source_path;
1058
+ verbose = verbose_output;
1059
+
1060
+ const start_time = Date.now();
1061
+ // GERRY: The purpose of this function is to create a zip file containing the hdocbook content,
1062
+ // * Create a _work folder
1063
+ // * copy the hdocbook content to the work folder
1064
+ // * Render all markdown into side-by-side HTML file
1065
+ // * Replace SERVER_VARS embedded in documents with the right version information etc.
1066
+ // * Build an index (sqlite FTS5) by extracting text from all HTML content in the work
1067
+ // folder, conceptually we are making a little mini website crawler to index all of the content
1068
+ // within the book.
1069
+ // * Package everything up into a ZIP file, ready for the build controller to package and publish
1070
+
1071
+ console.log("Hornbill HDocBook Build", "\n");
1072
+ console.log(" Document Path:", source_path, "\n");
1073
+ const build_start_dt = new Date().toLocaleString();
1074
+
1075
+ // Load the hdocbook-project.json file to get the docId
1076
+ // use the docId to get the book config
1077
+ const hdocbook_project_config_path = path.join(
1078
+ source_path,
1079
+ "hdocbook-project.json",
1080
+ );
1081
+ try {
1082
+ hdocbook_project = require(hdocbook_project_config_path);
1083
+ } catch (e) {
1084
+ console.error("File not found: hdocbook-project.json\n");
1085
+ console.error(
1086
+ "hdoc build/validate needs to be run in the root of a HDoc Book.\n",
1087
+ );
1088
+ process.exit(1);
1089
+ }
1090
+ doc_id = hdocbook_project.docId;
1091
+
1092
+ if (
1093
+ !validate &&
1094
+ hdocbook_project.pdfGeneration !== undefined &&
1095
+ hdocbook_project.pdfGeneration.enable !== undefined
1096
+ ) {
1097
+ pdf_enable = hdocbook_project.pdfGeneration.enable;
1098
+ }
1099
+
1100
+ if (
1101
+ hdocbook_project.redirects &&
1102
+ Array.isArray(hdocbook_project.redirects)
1103
+ ) {
1104
+ for (let i = 0; i < hdocbook_project.redirects.length; i++) {
1105
+ const redirect_key =
1106
+ hdocbook_project.redirects[i].url.indexOf("/") === 0
1107
+ ? hdocbook_project.redirects[i].url.substring(1)
1108
+ : hdocbook_project.redirects[i].url;
1109
+ redirects[redirect_key] = hdocbook_project.redirects[i];
1110
+ }
1111
+ }
1112
+
1113
+ console.log("Loading hdocbook config...");
1114
+
1115
+ const book_path = path.join(source_path, doc_id);
1116
+ const hdocbook_path = path.join(book_path, "hdocbook.json");
1117
+ const work_path = path.join(source_path, "_work");
1118
+ const work_hdocbook_path = path.join(work_path, doc_id, "hdocbook.json");
1119
+
1120
+ hdocbook_config = require(hdocbook_path);
1121
+ if (build_version !== "") {
1122
+ if (build_version.match(regex_version)) {
1123
+ hdocbook_config.version = build_version;
1124
+ } else {
1125
+ console.info(
1126
+ `\n[WARNING] Argument build version [${build_version}] does not match expected pattern, defaulting to version specified in book [${hdocbook_config.version}]\n`,
1127
+ );
1128
+ }
1129
+ }
1130
+
1131
+ if (!hdocbook_config.version.match(regex_version)) {
1132
+ console.error(
1133
+ `ERROR: Version number does not match required format - ${hdocbook_config.version}\n`,
1134
+ );
1135
+ process.exit(1);
1136
+ }
1137
+
1138
+ if (hdocbook_config.publicSource?.endsWith(".git"))
1139
+ hdocbook_config.publicSource = hdocbook_config.publicSource.substring(
1140
+ 0,
1141
+ hdocbook_config.publicSource.length - 4,
1142
+ );
1143
+
1144
+ console.log("Loading product families...");
1145
+ const prods = await hdoc.load_product_families();
1146
+ if (!prods.success) {
1147
+ console.error(`${prods.errors}\n`);
1148
+ process.exit(1);
1149
+ } else {
1150
+ prod_families = prods.prod_families;
1151
+ prods_supported = prods.prods_supported;
1152
+ }
1153
+
1154
+ if (!validate) {
1155
+ console.log("Caching CSS for PDF generation...");
1156
+ const css_files = [
1157
+ path.join(pdf_template_path, "css", "custom-block.css"),
1158
+ path.join(pdf_template_path, "css", "hdocs-pdf.css"),
1159
+ path.join(pdf_template_path, "css", "vars.css"),
1160
+ path.join(
1161
+ ui_css_path,
1162
+ "theme-default",
1163
+ "styles",
1164
+ "components",
1165
+ "api-doc.css",
1166
+ ),
1167
+ ];
1168
+ for (let i = 0; i < css_files.length; i++) {
1169
+ try {
1170
+ css_templates.push(fs.readFileSync(css_files[i], "utf8"));
1171
+ } catch (e) {
1172
+ console.error(`Error reading file[${css_files[i]}]: ${e}`);
1173
+ }
1174
+ }
1175
+ }
1176
+
1177
+ // Validate all filenames first
1178
+ console.log("Validating book filenames meet kebab-case requirements...");
1179
+ dree.scan(book_path, dreeOptionsAllFiles, filename_validation_callback);
1180
+ if (errors_filename.length > 0) {
1181
+ console.log("\r\n-----------------------");
1182
+ console.log(" Validation Output ");
1183
+ console.log("-----------------------");
1184
+ console.error(
1185
+ `${errors_filename.length} files do not meet filename requirements:`,
1186
+ );
1187
+ console.error(` - ${errors_filename.join("\n - ")}`);
1188
+ console.log();
1189
+ process.exit(1);
1190
+ }
1191
+
1192
+ console.log(`Building: ${doc_id} v${hdocbook_config.version}...\n`);
1193
+
1194
+ // Make _work folder to copy everything into
1195
+ work_path_content = path.join(work_path, doc_id);
1196
+ if (fs.existsSync(work_path)) {
1197
+ fs.rmSync(work_path, {
1198
+ recursive: true,
1199
+ force: true,
1200
+ });
1201
+ }
1202
+ fs.mkdirSync(work_path);
1203
+
1204
+ const file_filter = (src) => {
1205
+ return !src.toLowerCase().endsWith(".ds_store");
1206
+ };
1207
+
1208
+ // Copy files from book into _work-doc_id folder
1209
+ console.log("Copying content into work folder...");
1210
+ try {
1211
+ fs.copySync(path.join(source_path, doc_id), work_path_content, {
1212
+ filter: file_filter,
1213
+ });
1214
+ } catch (e) {
1215
+ console.error("Error copying from source_path:\n", e);
1216
+ process.exit(1);
1217
+ }
1218
+
1219
+ // Create MD5 hash of content before build
1220
+ console.log("Creating Hash...");
1221
+
1222
+ dree.scan(work_path_content, md5DreeOptions, hash_callback);
1223
+ let concat_hash = "|";
1224
+ for (let i = 0; i < built_file_hashes.length; i++) {
1225
+ concat_hash += `${built_file_hashes[i].path}:${built_file_hashes[i].hash}|`;
1226
+ }
1227
+ if (concat_hash === "|") {
1228
+ console.error("No hash of content has been returned.");
1229
+ process.exit(1);
1230
+ }
1231
+
1232
+ // Create hash and write file
1233
+ const hash = crypto.createHash("md5").update(concat_hash).digest("hex");
1234
+ const checksum_path = path.join(work_path_content, "checksum.md5");
1235
+ try {
1236
+ fs.writeFileSync(checksum_path, hash);
1237
+ console.log("Hash file creation success:", checksum_path);
1238
+ } catch (e) {
1239
+ console.error("\nError creating", checksum_path, ":", e);
1240
+ process.exit(1);
1241
+ }
1242
+
1243
+ // Load document header templates
1244
+ console.log("Loading templates...");
1245
+ try {
1246
+ doc_header_template = fs.readFileSync(doc_header_template_path, "utf8");
1247
+ doc_header_template_non_git = fs.readFileSync(
1248
+ non_git_doc_header_template_path,
1249
+ "utf8",
1250
+ );
1251
+ pdf_header_template = fs.readFileSync(pdf_header_template_path, "utf8");
1252
+ pdf_header_template_non_git = fs.readFileSync(
1253
+ non_git_pdf_header_template_path,
1254
+ "utf8",
1255
+ );
1256
+ } catch (err) {
1257
+ console.error(`Error reading document header template: ${err}`);
1258
+ process.exit(1);
1259
+ }
1260
+
1261
+ if (pdf_enable) {
1262
+ // Load PDF templates
1263
+ try {
1264
+ pdf_template = fs.readFileSync(pdf_template_file_path, "utf8");
1265
+ } catch (err) {
1266
+ console.error(`Error reading PDF template: ${err}`);
1267
+ process.exit(1);
1268
+ }
1269
+ }
1270
+ console.log("Processing navigation breadcrumbs...");
1271
+ const bc_build = hdoc.build_breadcrumbs(hdocbook_config.navigation.items);
1272
+ if (bc_build.errors.length > 0) {
1273
+ console.log("\r\n-----------------------");
1274
+ console.log(" Validation Output ");
1275
+ console.log("-----------------------");
1276
+ console.log(
1277
+ `\n${bc_build.errors.length} errors found when processing navigation:\n`,
1278
+ );
1279
+ console.error(` - ${bc_build.errors.join("\n\n - ")}`);
1280
+ console.log("\n");
1281
+ process.exit(1);
1282
+ }
1283
+ bc = bc_build.bc;
1284
+ console.log("Processing content...");
1285
+ // Get a list of MD files in work_path
1286
+ dree.scan(work_path, dreeOptions, build_file_callback);
1287
+
1288
+ if (pdf_enable) {
1289
+ // Create a Chromium browser instance generate PDFs with
1290
+ browser = await puppeteer.launch({ headless: "shell" });
1291
+ }
1292
+
1293
+ // Work through MD files and convert to HTML
1294
+ const mdPromiseArray = [];
1295
+ for (let i = 0; i < md_files.length; i++) {
1296
+ mdPromiseArray.push(md_files[i]);
1297
+ }
1298
+ const chunkSize = 8;
1299
+ for (let i = 0; i < mdPromiseArray.length; i += chunkSize) {
1300
+ const chunk = mdPromiseArray.slice(i, i + chunkSize);
1301
+ // do whatever
1302
+ await Promise.all(
1303
+ chunk.map(async (file) => {
1304
+ await transform_markdown_and_save_html(file);
1305
+ }),
1306
+ );
1307
+ }
1308
+
1309
+ // Work through Static HTML files and add Frontmatter tags
1310
+ const htmlPromiseArray = [];
1311
+ for (let i = 0; i < static_html_files.length; i++) {
1312
+ htmlPromiseArray.push(static_html_files[i]);
1313
+ }
1314
+ for (let i = 0; i < htmlPromiseArray.length; i += chunkSize) {
1315
+ const chunk = htmlPromiseArray.slice(i, i + chunkSize);
1316
+ await Promise.all(
1317
+ chunk.map(async (file) => {
1318
+ await transform_static_html(file);
1319
+ }),
1320
+ );
1321
+ }
1322
+
1323
+ if (pdf_enable) {
1324
+ // Close the Chromium browser instance
1325
+ await browser.close();
1326
+ }
1327
+
1328
+ // Output to console
1329
+ console.log(`\n MD files found: ${conversion_attempted}`);
1330
+ console.log(`Successfully converted to HTML: ${conversion_success}`);
1331
+ if (conversion_failed > 0)
1332
+ console.error(` Failed to convert: ${conversion_failed}\n`);
1333
+ else console.log(` Failed to convert: ${conversion_failed}\n`);
1334
+ console.log(` Includes Found: ${includes_found}`);
1335
+ console.log(` Includes Success: ${includes_success}`);
1336
+ if (includes_failed > 0)
1337
+ console.error(` Includes Failed: ${includes_failed}\n`);
1338
+ else console.log(` Includes Failed: ${includes_failed}\n`);
1339
+ console.log(
1340
+ ` Static HTML Files Found: ${static_html_files.length}\n`,
1341
+ );
1342
+ if (!validate) {
1343
+ console.log(` PDF Files Created: ${pdf_created}\n`);
1344
+ }
1345
+
1346
+ // Validate content
1347
+ const validation_success = await hdoc_validate.run(
1348
+ work_path,
1349
+ doc_id,
1350
+ verbose,
1351
+ hdocbook_config,
1352
+ hdocbook_project,
1353
+ bc,
1354
+ prod_families,
1355
+ prods_supported,
1356
+ gen_exclude,
1357
+ redirects,
1358
+ );
1359
+ if (!validation_success) {
1360
+ const end_time = Date.now();
1361
+ console.log(`\nTime Taken: ${get_duration(start_time, end_time)}\n`);
1362
+ process.exit(1);
1363
+ }
1364
+
1365
+ // Delete markdown files
1366
+ console.log("Performing Markdown Cleanup");
1367
+
1368
+ const filePromiseArray = [];
1369
+ for (let i = 0; i < md_files_delete.length; i++) {
1370
+ filePromiseArray.push(md_files_delete[i]);
1371
+ }
1372
+ await Promise.all(
1373
+ filePromiseArray.map(async (file) => {
1374
+ fs.unlink(file, (err) => {
1375
+ if (err) console.error(`Error deleting ${file}: ${e}`);
1376
+ });
1377
+ }),
1378
+ );
1379
+
1380
+ // Add book read timing to the hdocbook.json
1381
+ hdocbook_config.readingTime = Math.ceil(
1382
+ book_read_time + (book_read_time / 100) * 10,
1383
+ );
1384
+ hdocbook_config.navigation.items = hdoc.strip_drafts(
1385
+ hdocbook_config.navigation.items,
1386
+ );
1387
+ try {
1388
+ fs.writeFileSync(
1389
+ work_hdocbook_path,
1390
+ JSON.stringify(hdocbook_config, null, 2),
1391
+ );
1392
+ console.log("\nhdocbook.json update success:", work_hdocbook_path);
1393
+ } catch (e) {
1394
+ console.error("\nError creating", work_hdocbook_path, ":", e);
1395
+ process.exit(1);
1396
+ }
1397
+
1398
+ // Build the index
1399
+ // Create the DB and tables
1400
+ console.log("Building the Index");
1401
+ const db = hdoc_build_db.create_db(work_path, doc_id);
1402
+ if (db.error && db.error !== null) {
1403
+ console.error(db.error);
1404
+ process.exit(1);
1405
+ }
1406
+ // Populate primary index tables
1407
+ const index = await hdoc_build_db.populate_index(
1408
+ db.db,
1409
+ doc_id,
1410
+ hdocbook_config,
1411
+ index_records,
1412
+ verbose,
1413
+ );
1414
+ if (!index.success) {
1415
+ console.error(index.error);
1416
+ process.exit(1);
1417
+ }
1418
+
1419
+ // Populate redirect index table records
1420
+ if (
1421
+ hdocbook_project.redirects &&
1422
+ Array.isArray(hdocbook_project.redirects) &&
1423
+ hdocbook_project.redirects.length > 0
1424
+ ) {
1425
+ const redirects_index = hdoc_build_db.populate_redirects(
1426
+ db.db,
1427
+ hdocbook_project.redirects,
1428
+ verbose,
1429
+ );
1430
+ if (!redirects_index.success) {
1431
+ for (let i = 0; i < index.errors.length; i++) {
1432
+ console.error(index.errors[i]);
1433
+ }
1434
+ process.exit(1);
1435
+ }
1436
+ }
1437
+
1438
+ if (!validate) {
1439
+ try {
1440
+ const zip_path = path.join(work_path, `${doc_id}.zip`);
1441
+
1442
+ const output = fs.createWriteStream(zip_path);
1443
+ const archive = archiver("zip");
1444
+ archive.on("error", (err) => {
1445
+ throw err;
1446
+ });
1447
+ archive.pipe(output);
1448
+
1449
+ // append files from a sub-directory, putting its contents at the root of archive
1450
+ archive.directory(work_path_content, false);
1451
+ archive.finalize();
1452
+
1453
+ //await zip(work_path_content, zip_path);
1454
+ console.log(`\nZIP Creation Success: ${zip_path}\n`);
1455
+ console.log(" Build Started:", build_start_dt);
1456
+ console.log(`Build Completed: ${new Date().toLocaleString()}\n`);
1457
+ } catch (e) {
1458
+ console.error(`\nError creating ZIP: ${e}`);
1459
+ }
1460
+ } else {
1461
+ console.log("\nValidation Complete\n");
1462
+ }
1463
+ const end_time = Date.now();
1464
+ console.log(`Time Taken: ${get_duration(start_time, end_time)}\n`);
1465
+ };
1466
+
1467
+ const get_duration = (start, end) => {
1468
+ const total_time = new Date(end - start).toISOString().slice(11, 19);
1469
+ const duration_arr = total_time.split(":");
1470
+ let duration = "";
1471
+ if (Number.parseInt(duration_arr[0], 10) > 0) {
1472
+ duration += `${Number.parseInt(duration_arr[0], 10)}h `;
1473
+ }
1474
+ if (duration !== "" || Number.parseInt(duration_arr[1], 10)) {
1475
+ duration += `${Number.parseInt(duration_arr[1], 10)}m `;
1476
+ }
1477
+ duration += `${Number.parseInt(duration_arr[2], 10)}s`;
1478
+ return duration;
1479
+ };
1480
+ })();