hdoc-tools 0.20.0 → 0.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/hdoc-module.js CHANGED
@@ -1,708 +1,728 @@
1
- (() => {
2
- const axios = require("axios");
3
- const axiosRetry = require("axios-retry").default;
4
- const cheerio = require("cheerio");
5
- const fs = require("node:fs");
6
- const https = require("node:https");
7
- const htmlentities = require("html-entities");
8
- const html2text = require("html-to-text");
9
- const { JSDOM } = require("jsdom");
10
- const path = require("node:path");
11
- const wordsCount = require("words-count").default;
12
-
13
- const includesCache = {};
14
- const agent = new https.Agent({
15
- rejectUnauthorized: false,
16
- });
17
-
18
- let retried = false;
19
-
20
- axiosRetry(axios, {
21
- retries: 5,
22
- shouldResetTimeout: true,
23
- retryCondition: (error) => {
24
- return !error.response.status;
25
- },
26
- onRetry: (retryCount, error, requestConfig) => {
27
- retried = true;
28
- console.info(
29
- `\n[WARNING] API call failed - ${error.message}\nEndpoint: ${requestConfig.url}\nRetrying: ${retryCount}`,
30
- );
31
- },
32
- });
33
-
34
- exports.content_type_for_ext = (ext) => {
35
- switch (ext) {
36
- case ".z":
37
- return "application/x-compress";
38
- case ".tgz":
39
- return "application/x-compressed";
40
- case ".gz":
41
- return "application/x-gzip";
42
- case ".zip":
43
- return "application/x-zip-compressed";
44
- case ".xml":
45
- return "application/xml";
46
- case ".bmp":
47
- return "image/bmp";
48
- case ".gif":
49
- return "image/gif";
50
- case ".jpg":
51
- return "image/jpeg";
52
- case ".png":
53
- return "image/png";
54
- case ".tiff":
55
- return "image/tiff";
56
- case ".ico":
57
- return "image/x-icon";
58
- case ".svg":
59
- return "image/svg+xml";
60
- case ".css":
61
- return "text/css";
62
- case ".htm":
63
- case ".html":
64
- return "text/html";
65
- case ".txt":
66
- return "text/plain";
67
- case ".md":
68
- return "text/plain";
69
- case ".json":
70
- return "application/json";
71
- case ".js":
72
- return "application/javascript";
73
- default:
74
- return "application/octet-stream";
75
- }
76
- };
77
-
78
- exports.valid_url = (url) => {
79
- const stringIsAValidUrl = (s) => {
80
- try {
81
- const url_obj = new URL(s);
82
- return url_obj;
83
- } catch (err) {
84
- return false;
85
- }
86
- };
87
- return stringIsAValidUrl(url);
88
- };
89
-
90
- exports.expand_variables = (text, docId = "") => {
91
- let clean_text = text;
92
- if (docId !== "") {
93
- clean_text = clean_text.replaceAll("{{DOC_ID}}", docId);
94
- }
95
- clean_text = clean_text.replaceAll("{{BUILD_NUMBER}}", "0");
96
-
97
- let build_date = new Date().toISOString();
98
- build_date = build_date.replace("T", " ");
99
- build_date = build_date.substring(0, 19);
100
- clean_text = clean_text.replaceAll("{{BUILD_DATE}}", build_date);
101
- return clean_text;
102
- };
103
-
104
- exports.process_includes = async (file_path, body, source_path) => {
105
- const response = {
106
- body: body,
107
- found: 0,
108
- success: 0,
109
- failed: 0,
110
- included: [],
111
- errors: [],
112
- };
113
-
114
- // Search body for INCLUDEs
115
- const regexp = /\[\[INCLUDE .*]]/g;
116
- const body_array = [...response.body.matchAll(regexp)];
117
-
118
- for (let i = 0; i < body_array.length; i++) {
119
- response.found++;
120
-
121
- // Extract include data from array
122
- const include_value = body_array[i][0];
123
-
124
- let link;
125
- try {
126
- link = include_value.split(" ")[1];
127
- link = link.substring(0, link.length - 2);
128
- } catch (e) {
129
- response.failed++;
130
- response.errors.push(
131
- `Error parsing INCLUDE [${include_value}] from [${file_path}]: ${err}`,
132
- );
133
- continue;
134
- }
135
-
136
- if (
137
- (link.startsWith("http://") || link.startsWith("https://")) &&
138
- includesCache[link] !== undefined
139
- ) {
140
- console.log(`Serving From Cache: ${link}`);
141
- response.body = response.body.replace(
142
- include_value,
143
- includesCache[link],
144
- );
145
- response.success++;
146
- continue;
147
- }
148
-
149
- // Validate link in INCLUDE
150
- let file_content;
151
- if (link.startsWith("http://") || link.startsWith("https://")) {
152
- // Remote content to include
153
- try {
154
- new URL(link);
155
- } catch (e) {
156
- response.failed++;
157
- response.errors.push(
158
- `Error validating INCLUDE link [${link}] from [${file_path}]: ${e}`,
159
- );
160
- continue;
161
- }
162
-
163
- try {
164
- const file_response = await axios.get(link);
165
- if (retried) {
166
- retried = false;
167
- console.log("API call retry success!");
168
- }
169
- if (file_response.status === 200) {
170
- file_content = file_response.data;
171
- } else {
172
- throw `Unexpected Status ${file_response.status}`;
173
- }
174
- } catch (e) {
175
- response.failed++;
176
- response.errors.push(
177
- `Error getting INCLUDE link content [${link}] from [${file_path}]: ${e}`,
178
- );
179
- continue;
180
- }
181
- console.log(`Included From Remote Source: ${link}`);
182
- } else {
183
- // Local content to include
184
- try {
185
- file_content = fs.readFileSync(path.join(source_path, link), "utf8");
186
- } catch (e) {
187
- response.failed++;
188
- response.errors.push(
189
- `Error getting INCLUDE file [${link}] from [${file_path}]: ${e}`,
190
- );
191
- continue;
192
- }
193
- console.log(`Included From Local Source: ${link}`);
194
- }
195
- response.success++;
196
- includesCache[link] = file_content;
197
- response.body = response.body.replace(include_value, file_content);
198
- }
199
- return response;
200
- };
201
-
202
- // Takes html, returns the first heading detected in the order provided in h_to_search
203
- // Looks for h1 tags first, then hX, hY, hZ in order
204
- exports.getFirstHTMLHeading = (html_body, h_to_search = ["h1"]) => {
205
- const $ = cheerio.load(html_body);
206
- for (let i = 0; i < h_to_search.length; i++) {
207
- const heading = $(h_to_search[i])
208
- .map(function (i) {
209
- return $(this);
210
- })
211
- .get();
212
- if (heading.length > 0) {
213
- return heading[0];
214
- }
215
- }
216
- return false;
217
- };
218
-
219
- const makeAnchorIdFriendly = (str) => {
220
- return `hb-doc-anchor-${str // Add prefix
221
- .toLowerCase() // Convert to lowercase
222
- .trim() // Trim leading and trailing spaces
223
- .replace(/[^a-z0-9\s-]/g, "") // Remove all non-alphanumeric characters except spaces and hyphens
224
- .replace(/\s+/g, "-") // Replace spaces with hyphens
225
- .replace(/-+/g, "-")}`; // Replace multiple hyphens with a single hyphen
226
- };
227
-
228
- // Processes HTML, wraps h2 and h3 tags and their content in divs with an id matching that of the h text
229
- exports.wrapHContent = (htmlContent) => {
230
- const dom = new JSDOM(htmlContent);
231
- const document = dom.window.document;
232
-
233
- const nodes = Array.from(document.body.childNodes); // Convert NodeList to Array for easier manipulation
234
- const newContent = document.createDocumentFragment(); // Create a document fragment to hold the new structure
235
-
236
- let currentH2Div = null;
237
- let currentH3Div = null;
238
-
239
- for (const node of nodes) {
240
- if (node.nodeType === dom.window.Node.ELEMENT_NODE) {
241
- if (node.tagName.toLowerCase() === "h2") {
242
- // When an <h2> is found, close the current <h2> div (if any) and start a new one
243
- if (currentH2Div) {
244
- if (currentH3Div) {
245
- currentH2Div.appendChild(currentH3Div);
246
- currentH3Div = null;
247
- }
248
- newContent.appendChild(currentH2Div);
249
- }
250
- currentH2Div = document.createElement("div");
251
- currentH2Div.id = makeAnchorIdFriendly(node.textContent.trim());
252
- currentH2Div.appendChild(node);
253
- } else if (node.tagName.toLowerCase() === "h3") {
254
- // When an <h3> is found, close the current <h3> div (if any) and start a new one
255
- if (currentH3Div) {
256
- if (currentH2Div) {
257
- currentH2Div.appendChild(currentH3Div);
258
- } else {
259
- newContent.appendChild(currentH3Div);
260
- }
261
- }
262
- currentH3Div = document.createElement("div");
263
- currentH3Div.id = makeAnchorIdFriendly(node.textContent.trim());
264
- currentH3Div.appendChild(node);
265
- } else {
266
- if (currentH3Div) {
267
- currentH3Div.appendChild(node);
268
- } else if (currentH2Div) {
269
- currentH2Div.appendChild(node);
270
- } else {
271
- newContent.appendChild(node);
272
- }
273
- }
274
- } else {
275
- if (currentH3Div) {
276
- currentH3Div.appendChild(node);
277
- } else if (currentH2Div) {
278
- currentH2Div.appendChild(node);
279
- } else {
280
- newContent.appendChild(node);
281
- }
282
- }
283
- }
284
-
285
- // Append the last <h3> div if any
286
- if (currentH3Div) {
287
- if (currentH2Div) {
288
- currentH2Div.appendChild(currentH3Div);
289
- } else {
290
- newContent.appendChild(currentH3Div);
291
- }
292
- }
293
-
294
- // Append the last <h2> div if any
295
- if (currentH2Div) {
296
- newContent.appendChild(currentH2Div);
297
- }
298
-
299
- // Replace the old body content with the new content
300
- document.body.innerHTML = "";
301
- document.body.appendChild(newContent);
302
-
303
- // Serialize the document back to HTML and save it to a new file (for example: 'output.html')
304
- const outputHtml = dom.serialize();
305
- return outputHtml;
306
- };
307
-
308
- exports.getIDDivs = (html_body) => {
309
- const $ = cheerio.load(html_body, {
310
- decodeEntities: false,
311
- });
312
-
313
- const divs = [];
314
-
315
- $("div").each(function (i, element) {
316
- if ($(this).attr("id")?.startsWith("hb-doc-anchor-")) {
317
- divs.push({
318
- id: $(this).attr("id"),
319
- html: $(this).html(),
320
- text: $(this).text(),
321
- });
322
- }
323
- });
324
- return divs;
325
- };
326
-
327
- exports.getHTMLFrontmatterHeader = (html_body) => {
328
- const response = {
329
- fm_header: "",
330
- fm_properties: {},
331
- };
332
- const $ = cheerio.load(html_body, {
333
- decodeEntities: false,
334
- });
335
- if (
336
- $._root?.children &&
337
- Array.isArray($._root.children) &&
338
- $._root.children.length > 0
339
- ) {
340
- for (const child of $._root.children) {
341
- if (
342
- child.type === "comment" &&
343
- child.data &&
344
- child.data.startsWith("[[FRONTMATTER")
345
- ) {
346
- // We have a Frontmatter header - return each property in an array
347
- const fm_properties = child.data.split(/\r?\n/);
348
- for (let i = 0; i < fm_properties.length; i++) {
349
- if (fm_properties[i].includes(":")) {
350
- const property_details = fm_properties[i].split(/:(.*)/s);
351
- if (property_details.length > 1) {
352
- let prop_val = property_details[1].trim();
353
- if (/^".*"$/.test(prop_val)) {
354
- prop_val = prop_val.substring(1, prop_val.length - 1);
355
- }
356
- if (property_details[0].trim().toLowerCase() === "title") {
357
- prop_val = htmlentities.decode(prop_val);
358
- }
359
- response.fm_properties[
360
- property_details[0].trim().toLowerCase()
361
- ] = prop_val;
362
- }
363
- }
364
- }
365
-
366
- // And return the header as a whole so it can be easily replaced
367
- response.fm_header = child.data;
368
- }
369
- }
370
- }
371
-
372
- return response;
373
- };
374
-
375
- exports.truncate_string = (str, n, useWordBoundary) => {
376
- if (str.length <= n) {
377
- return str;
378
- }
379
- const subString = str.slice(0, n - 1);
380
- return `${
381
- useWordBoundary
382
- ? subString.slice(0, subString.lastIndexOf(" "))
383
- : subString
384
- }…`;
385
- };
386
-
387
- exports.get_html_read_time = (html) => {
388
- // Get word count
389
- const text = html2text.convert(html, {
390
- wordwrap: null,
391
- });
392
- const word_count = wordsCount(text);
393
- if (word_count === 0) return 0;
394
-
395
- // Calculate the read time - divide the word count by 200
396
- let read_time = Math.round(word_count / 200);
397
- if (read_time === 0) read_time = 1;
398
- return read_time;
399
- };
400
-
401
- exports.get_github_api_path = (repo, relative_path) => {
402
- if (repo) {
403
- const clean_repo = repo.endsWith("/") ? repo.slice(0, -1) : repo;
404
- const github_paths = {};
405
- github_paths.api_path = clean_repo.replace(
406
- "https://github.com/",
407
- "https://api.github.com/repos/",
408
- );
409
- github_paths.api_path += `/commits?path=${encodeURIComponent(
410
- `/${relative_path.replace("\\\\", "/").replace("\\", "/")}`,
411
- )}`;
412
- github_paths.edit_path = `${repo}/blob/main/${relative_path.replace("\\\\", "/").replace("\\", "/")}`;
413
- return github_paths;
414
- }
415
- return "";
416
- };
417
-
418
- const get_github_contributors_path = (repo) => {
419
- const clean_repo = repo.endsWith("/") ? repo.slice(0, -1) : repo;
420
- const github_paths = {};
421
- github_paths.api_path = clean_repo.replace(
422
- "https://github.com/",
423
- "https://api.github.com/repos/",
424
- );
425
- github_paths.api_path += "/contributors";
426
- return github_paths;
427
- };
428
-
429
- exports.get_github_contributors = async (
430
- github_url,
431
- github_api_token,
432
- repo,
433
- ) => {
434
- const response = {
435
- success: false,
436
- error: "",
437
- contributors: [],
438
- contributor_count: 0,
439
- last_commit_date: "",
440
- };
441
- const contributors = {};
442
-
443
- const request_options = {
444
- headers: {
445
- "User-Agent": "HornbillDocsBuild",
446
- "Cache-Control": "no-cache",
447
- Host: "api.github.com",
448
- Accept: "application/json",
449
- },
450
- timeout: 5000,
451
- };
452
- if (github_api_token !== "") {
453
- request_options.headers.authorization = `Bearer ${github_api_token}`;
454
- }
455
- let github_response;
456
- try {
457
- github_response = await axios.get(github_url, request_options);
458
- if (retried) {
459
- retried = false;
460
- console.log("API call retry success!");
461
- }
462
- } catch (err) {
463
- if (err.response) {
464
- if (err.response.status !== 403 && err.response.status !== 401) {
465
- response.error = err;
466
- return response;
467
- }
468
- github_response = err.response;
469
- } else {
470
- response.error = `Unexpected response from GitHub for [${github_url}:\n${JSON.stringify(
471
- err,
472
- )}]`;
473
- }
474
- }
475
- if (github_response.status === 200) {
476
- response.success = true;
477
- const commits = github_response.data;
478
- for (const commit of commits) {
479
- if (
480
- commit.committer?.type &&
481
- commit.committer.type.toLowerCase() === "user" &&
482
- commit.committer.login.toLowerCase() !== "web-flow"
483
- ) {
484
- if (!contributors[commit.committer.id]) {
485
- response.contributor_count++;
486
- contributors[commit.committer.id] = {
487
- login: commit.committer.login,
488
- avatar_url: commit.committer.avatar_url,
489
- html_url: commit.committer.html_url,
490
- name: commit.commit.committer.name,
491
- };
492
- }
493
- if (response.last_commit_date !== "") {
494
- const new_commit_date = new Date(commit.commit.committer.date);
495
- const exist_commit_date = new Date(response.last_commit_date);
496
- if (new_commit_date > exist_commit_date)
497
- response.last_commit_date = commit.commit.committer.date;
498
- } else {
499
- response.last_commit_date = commit.commit.committer.date;
500
- }
501
- } else if (commit.author?.id) {
502
- if (!contributors[commit.author.id]) {
503
- response.contributor_count++;
504
- contributors[commit.author.id] = {
505
- login: commit.author.login,
506
- avatar_url: commit.author.avatar_url,
507
- html_url: commit.author.html_url,
508
- name: commit.commit.author.name,
509
- };
510
- }
511
- if (response.last_commit_date !== "") {
512
- const new_commit_date = new Date(commit.commit.author.date);
513
- const exist_commit_date = new Date(response.last_commit_date);
514
- if (new_commit_date > exist_commit_date)
515
- response.last_commit_date = commit.commit.author.date;
516
- } else {
517
- response.last_commit_date = commit.commit.author.date;
518
- }
519
- }
520
- }
521
- for (const key in contributors) {
522
- if (Object.hasOwn(contributors, key)) {
523
- response.contributors.push(contributors[key]);
524
- }
525
- }
526
- } else if (github_response.status === 403) {
527
- // Private repo, fine-grained permissions don't yet support getting commits without content, get list from meta permissions
528
- const contrib_url = get_github_contributors_path(repo).api_path;
529
- try {
530
- github_response = await axios.get(contrib_url, request_options);
531
- if (retried) {
532
- retried = false;
533
- console.log("API call retry success!");
534
- }
535
- } catch (err) {
536
- if (err.response?.status) {
537
- if (err.response.status !== 200) {
538
- response.error = err;
539
- return response;
540
- }
541
- } else {
542
- response.error = `Unexpected response from GitHub for [${contrib_url}:\n${JSON.stringify(
543
- err,
544
- )}]`;
545
- }
546
- }
547
- if (github_response.status === 200) {
548
- response.success = true;
549
- const commits = github_response.data;
550
- for (const commit of commits) {
551
- if (
552
- commit.type &&
553
- commit.type.toLowerCase() === "user" &&
554
- commit.login.toLowerCase() !== "web-flow"
555
- ) {
556
- if (!contributors[commit.id]) {
557
- response.contributor_count++;
558
- contributors[commit.id] = {
559
- login: commit.login,
560
- avatar_url: commit.avatar_url,
561
- html_url: commit.html_url,
562
- name: commit.name ? commit.name : commit.login,
563
- };
564
- }
565
- if (
566
- response.last_commit_date !== "" &&
567
- response.last_commit_date !== "No Commit Date Available"
568
- ) {
569
- const new_commit_date = new Date(commit.date);
570
- const exist_commit_date = new Date(response.last_commit_date);
571
- if (new_commit_date > exist_commit_date)
572
- response.last_commit_date = commit.date;
573
- } else {
574
- response.last_commit_date = commit.date
575
- ? commit.date
576
- : "No Commit Date Available";
577
- }
578
- }
579
- }
580
- for (const key in contributors) {
581
- if (Object.hasOwn(contributors, key)) {
582
- response.contributors.push(contributors[key]);
583
- }
584
- }
585
- }
586
- } else {
587
- response.error = `Unexpected Status: ${github_response.status}.`;
588
- }
589
- return response;
590
- };
591
-
592
- exports.strip_drafts = (nav_items) => {
593
- const return_nav = nav_items;
594
- recurse_nav(return_nav);
595
- return return_nav;
596
- };
597
-
598
- const recurse_nav = (nav_items) => {
599
- for (const key in nav_items) {
600
- if (nav_items[key].draft) {
601
- nav_items.splice(key, 1);
602
- recurse_nav(nav_items);
603
- } else if (nav_items[key].items) {
604
- recurse_nav(nav_items[key].items);
605
- }
606
- }
607
- };
608
-
609
- exports.build_breadcrumbs = (nav_items) => {
610
- const response = {
611
- bc: {},
612
- errors: [],
613
- };
614
- const buildBreadcrumb = (items, parentLinks) => {
615
- // Process parent links
616
- let parentlink = true;
617
- if (parentLinks.length > 0) {
618
- if (parentLinks[0].link === undefined || parentLinks[0].link === "")
619
- parentlink = false;
620
-
621
- for (let i = 1; i < 10; i++) {
622
- if (
623
- parentLinks[i] &&
624
- parentLinks[i].link === undefined &&
625
- items.length > 0 &&
626
- items[0].link
627
- ) {
628
- parentLinks[i].link = items[0].link;
629
- }
630
- }
631
- }
632
-
633
- // Loop through items, build breadcrumb
634
- for (let i = 0; i < items.length; i++) {
635
- if (!items[i].text) {
636
- response.errors.push(
637
- `The following Nav Item is missing its text property: ${JSON.stringify(
638
- items[i],
639
- )}`,
640
- );
641
- }
642
-
643
- if (!items[i].link && !items[i].items) {
644
- response.errors.push(
645
- `The following Nav Item has no link or items property: ${JSON.stringify(
646
- items[i],
647
- )}`,
648
- );
649
- }
650
- const item = items[i];
651
- if (!parentlink && item.link) {
652
- parentLinks[0].link = item.link;
653
- parentlink = true;
654
- }
655
- const { text, link, items: subItems } = item;
656
- const breadcrumb = [...parentLinks, { text, link }];
657
-
658
- if (link) {
659
- response.bc[link] = breadcrumb;
660
- }
661
-
662
- if (subItems) {
663
- buildBreadcrumb(subItems, breadcrumb);
664
- }
665
- }
666
- };
667
-
668
- buildBreadcrumb(nav_items, []);
669
- return response;
670
- };
671
-
672
- exports.load_product_families = async () => {
673
- const response = {
674
- success: false,
675
- prod_families: {},
676
- prods_supported: [],
677
- errors: "",
678
- };
679
- const prod_families_url = "https://docs.hornbill.com/_books/products.json";
680
- for (let i = 1; i < 4; i++) {
681
- try {
682
- const prods = await axios.get(prod_families_url, {
683
- httpsAgent: agent,
684
- timeout: 5000,
685
- });
686
- if (prods.status === 200) {
687
- response.prod_families = prods.data;
688
- response.prods_supported = [];
689
- for (let i = 0; i < response.prod_families.products.length; i++) {
690
- response.prods_supported.push(
691
- response.prod_families.products[i].id,
692
- );
693
- }
694
- response.success = true;
695
- break;
696
- }
697
- throw `Unexpected status - ${prods.status} ${prods.statusText}`;
698
- } catch (e) {
699
- if (response.errors === "")
700
- response.errors = `Request to ${prod_families_url} failed:`;
701
- response.errors += `\nAttempt ${i} - Error returning product families: ${e}`;
702
- // Wait 2 seconds and try again
703
- await new Promise((r) => setTimeout(r, 2000));
704
- }
705
- }
706
- return response;
707
- };
708
- })();
1
+ (() => {
2
+ const axios = require("axios");
3
+ const axiosRetry = require("axios-retry").default;
4
+ const cheerio = require("cheerio");
5
+ const fs = require("node:fs");
6
+ const https = require("node:https");
7
+ const htmlentities = require("html-entities");
8
+ const html2text = require("html-to-text");
9
+ const { JSDOM } = require("jsdom");
10
+ const path = require("node:path");
11
+ const wordsCount = require("words-count").default;
12
+
13
+ const includesCache = {};
14
+ const agent = new https.Agent({
15
+ rejectUnauthorized: false,
16
+ });
17
+
18
+ let retried = false;
19
+
20
+ axiosRetry(axios, {
21
+ retries: 5,
22
+ shouldResetTimeout: true,
23
+ retryCondition: (error) => {
24
+ return !error.response.status;
25
+ },
26
+ onRetry: (retryCount, error, requestConfig) => {
27
+ retried = true;
28
+ console.info(
29
+ `\n[WARNING] API call failed - ${error.message}\nEndpoint: ${requestConfig.url}\nRetrying: ${retryCount}`,
30
+ );
31
+ },
32
+ });
33
+
34
+ exports.content_type_for_ext = (ext) => {
35
+ switch (ext) {
36
+ case ".z":
37
+ return "application/x-compress";
38
+ case ".tgz":
39
+ return "application/x-compressed";
40
+ case ".gz":
41
+ return "application/x-gzip";
42
+ case ".zip":
43
+ return "application/x-zip-compressed";
44
+ case ".xml":
45
+ return "application/xml";
46
+ case ".bmp":
47
+ return "image/bmp";
48
+ case ".gif":
49
+ return "image/gif";
50
+ case ".jpg":
51
+ return "image/jpeg";
52
+ case ".png":
53
+ return "image/png";
54
+ case ".tiff":
55
+ return "image/tiff";
56
+ case ".ico":
57
+ return "image/x-icon";
58
+ case ".svg":
59
+ return "image/svg+xml";
60
+ case ".css":
61
+ return "text/css";
62
+ case ".htm":
63
+ case ".html":
64
+ return "text/html";
65
+ case ".txt":
66
+ return "text/plain";
67
+ case ".md":
68
+ return "text/plain";
69
+ case ".json":
70
+ return "application/json";
71
+ case ".js":
72
+ return "application/javascript";
73
+ default:
74
+ return "application/octet-stream";
75
+ }
76
+ };
77
+
78
+ exports.valid_url = (url) => {
79
+ const stringIsAValidUrl = (s) => {
80
+ try {
81
+ const url_obj = new URL(s);
82
+ return url_obj;
83
+ } catch (err) {
84
+ return false;
85
+ }
86
+ };
87
+ return stringIsAValidUrl(url);
88
+ };
89
+
90
+ exports.expand_variables = (text, docId = "") => {
91
+ let clean_text = text;
92
+ if (docId !== "") {
93
+ clean_text = clean_text.replaceAll("{{DOC_ID}}", docId);
94
+ }
95
+ clean_text = clean_text.replaceAll("{{BUILD_NUMBER}}", "0");
96
+
97
+ let build_date = new Date().toISOString();
98
+ build_date = build_date.replace("T", " ");
99
+ build_date = build_date.substring(0, 19);
100
+ clean_text = clean_text.replaceAll("{{BUILD_DATE}}", build_date);
101
+ return clean_text;
102
+ };
103
+
104
+ exports.process_includes = async (file_path, body, source_path) => {
105
+ const response = {
106
+ body: body,
107
+ found: 0,
108
+ success: 0,
109
+ failed: 0,
110
+ included: [],
111
+ errors: [],
112
+ };
113
+
114
+ // Search body for INCLUDEs
115
+ const regexp = /\[\[INCLUDE .*]]/g;
116
+ const body_array = [...response.body.matchAll(regexp)];
117
+
118
+ for (let i = 0; i < body_array.length; i++) {
119
+ response.found++;
120
+
121
+ // Extract include data from array
122
+ const include_value = body_array[i][0];
123
+
124
+ let link;
125
+ try {
126
+ link = include_value.split(" ")[1];
127
+ link = link.substring(0, link.length - 2);
128
+ } catch (e) {
129
+ response.failed++;
130
+ response.errors.push(
131
+ `Error parsing INCLUDE [${include_value}] from [${file_path}]: ${err}`,
132
+ );
133
+ continue;
134
+ }
135
+
136
+ if (
137
+ (link.startsWith("http://") || link.startsWith("https://")) &&
138
+ includesCache[link] !== undefined
139
+ ) {
140
+ console.log(`Serving From Cache: ${link}`);
141
+ response.body = response.body.replace(
142
+ include_value,
143
+ includesCache[link],
144
+ );
145
+ response.success++;
146
+ continue;
147
+ }
148
+
149
+ // Validate link in INCLUDE
150
+ let file_content;
151
+ if (link.startsWith("http://") || link.startsWith("https://")) {
152
+ // Remote content to include
153
+ try {
154
+ new URL(link);
155
+ } catch (e) {
156
+ response.failed++;
157
+ response.errors.push(
158
+ `Error validating INCLUDE link [${link}] from [${file_path}]: ${e}`,
159
+ );
160
+ continue;
161
+ }
162
+
163
+ try {
164
+ const file_response = await axios.get(link);
165
+ if (retried) {
166
+ retried = false;
167
+ console.log("API call retry success!");
168
+ }
169
+ if (file_response.status === 200) {
170
+ file_content = file_response.data;
171
+ } else {
172
+ throw `Unexpected Status ${file_response.status}`;
173
+ }
174
+ } catch (e) {
175
+ response.failed++;
176
+ response.errors.push(
177
+ `Error getting INCLUDE link content [${link}] from [${file_path}]: ${e}`,
178
+ );
179
+ continue;
180
+ }
181
+ console.log(`Included From Remote Source: ${link}`);
182
+ } else {
183
+ // Local content to include
184
+ try {
185
+ file_content = fs.readFileSync(path.join(source_path, link), "utf8");
186
+ } catch (e) {
187
+ response.failed++;
188
+ response.errors.push(
189
+ `Error getting INCLUDE file [${link}] from [${file_path}]: ${e}`,
190
+ );
191
+ continue;
192
+ }
193
+ console.log(`Included From Local Source: ${link}`);
194
+ }
195
+ response.success++;
196
+ includesCache[link] = file_content;
197
+ response.body = response.body.replace(include_value, file_content);
198
+ }
199
+ return response;
200
+ };
201
+
202
+ // Takes html, returns the first heading detected in the order provided in h_to_search
203
+ // Looks for h1 tags first, then hX, hY, hZ in order
204
+ exports.getFirstHTMLHeading = (html_body, h_to_search = ["h1"]) => {
205
+ const $ = cheerio.load(html_body);
206
+ for (let i = 0; i < h_to_search.length; i++) {
207
+ const heading = $(h_to_search[i])
208
+ .map(function (i) {
209
+ return $(this);
210
+ })
211
+ .get();
212
+ if (heading.length > 0) {
213
+ return heading[0];
214
+ }
215
+ }
216
+ return false;
217
+ };
218
+
219
+ const makeAnchorIdFriendly = (str) => {
220
+ return `hb-doc-anchor-${str // Add prefix
221
+ .toLowerCase() // Convert to lowercase
222
+ .trim() // Trim leading and trailing spaces
223
+ .replace(/[^a-z0-9\s-]/g, "") // Remove all non-alphanumeric characters except spaces and hyphens
224
+ .replace(/\s+/g, "-") // Replace spaces with hyphens
225
+ .replace(/-+/g, "-")}`; // Replace multiple hyphens with a single hyphen
226
+ };
227
+
228
+ // Processes HTML, wraps h2 and h3 tags and their content in divs with an id matching that of the h text
229
+ exports.wrapHContent = (htmlContent) => {
230
+ const dom = new JSDOM(htmlContent);
231
+ const document = dom.window.document;
232
+
233
+ const nodes = Array.from(document.body.childNodes); // Convert NodeList to Array for easier manipulation
234
+ const newContent = document.createDocumentFragment(); // Create a document fragment to hold the new structure
235
+
236
+ let currentH2Div = null;
237
+ let currentH3Div = null;
238
+
239
+ for (const node of nodes) {
240
+ if (node.nodeType === dom.window.Node.ELEMENT_NODE) {
241
+ if (node.tagName.toLowerCase() === "h2") {
242
+ // When an <h2> is found, close the current <h2> div (if any) and start a new one
243
+ if (currentH2Div) {
244
+ if (currentH3Div) {
245
+ currentH2Div.appendChild(currentH3Div);
246
+ currentH3Div = null;
247
+ }
248
+ newContent.appendChild(currentH2Div);
249
+ }
250
+ currentH2Div = document.createElement("div");
251
+ currentH2Div.id = makeAnchorIdFriendly(node.textContent.trim());
252
+ currentH2Div.appendChild(node);
253
+ } else if (node.tagName.toLowerCase() === "h3") {
254
+ // When an <h3> is found, close the current <h3> div (if any) and start a new one
255
+ if (currentH3Div) {
256
+ if (currentH2Div) {
257
+ currentH2Div.appendChild(currentH3Div);
258
+ } else {
259
+ newContent.appendChild(currentH3Div);
260
+ }
261
+ }
262
+ currentH3Div = document.createElement("div");
263
+ currentH3Div.id = makeAnchorIdFriendly(node.textContent.trim());
264
+ currentH3Div.appendChild(node);
265
+ } else {
266
+ if (currentH3Div) {
267
+ currentH3Div.appendChild(node);
268
+ } else if (currentH2Div) {
269
+ currentH2Div.appendChild(node);
270
+ } else {
271
+ newContent.appendChild(node);
272
+ }
273
+ }
274
+ } else {
275
+ if (currentH3Div) {
276
+ currentH3Div.appendChild(node);
277
+ } else if (currentH2Div) {
278
+ currentH2Div.appendChild(node);
279
+ } else {
280
+ newContent.appendChild(node);
281
+ }
282
+ }
283
+ }
284
+
285
+ // Append the last <h3> div if any
286
+ if (currentH3Div) {
287
+ if (currentH2Div) {
288
+ currentH2Div.appendChild(currentH3Div);
289
+ } else {
290
+ newContent.appendChild(currentH3Div);
291
+ }
292
+ }
293
+
294
+ // Append the last <h2> div if any
295
+ if (currentH2Div) {
296
+ newContent.appendChild(currentH2Div);
297
+ }
298
+
299
+ // Replace the old body content with the new content
300
+ document.body.innerHTML = "";
301
+ document.body.appendChild(newContent);
302
+
303
+ // Serialize the document back to HTML and save it to a new file (for example: 'output.html')
304
+ const outputHtml = dom.serialize();
305
+ return outputHtml;
306
+ };
307
+
308
+ exports.getIDDivs = (html_body) => {
309
+ const $ = cheerio.load(html_body, {
310
+ decodeEntities: false,
311
+ });
312
+
313
+ const divs = [];
314
+
315
+ $("div").each(function (i, element) {
316
+ if ($(this).attr("id")?.startsWith("hb-doc-anchor-")) {
317
+ divs.push({
318
+ id: $(this).attr("id"),
319
+ html: $(this).html(),
320
+ text: $(this).text(),
321
+ });
322
+ }
323
+ });
324
+ return divs;
325
+ };
326
+
327
+ exports.getHTMLFrontmatterHeader = (html_body) => {
328
+ const response = {
329
+ fm_header: "",
330
+ fm_properties: {},
331
+ };
332
+ const $ = cheerio.load(html_body, {
333
+ decodeEntities: false,
334
+ });
335
+ if (
336
+ $._root?.children &&
337
+ Array.isArray($._root.children) &&
338
+ $._root.children.length > 0
339
+ ) {
340
+ for (const child of $._root.children) {
341
+ if (
342
+ child.type === "comment" &&
343
+ child.data &&
344
+ child.data.startsWith("[[FRONTMATTER")
345
+ ) {
346
+ // We have a Frontmatter header - return each property in an array
347
+ const fm_properties = child.data.split(/\r?\n/);
348
+ for (let i = 0; i < fm_properties.length; i++) {
349
+ if (fm_properties[i].includes(":")) {
350
+ const property_details = fm_properties[i].split(/:(.*)/s);
351
+ if (property_details.length > 1) {
352
+ let prop_val = property_details[1].trim();
353
+ if (/^".*"$/.test(prop_val)) {
354
+ prop_val = prop_val.substring(1, prop_val.length - 1);
355
+ }
356
+ if (property_details[0].trim().toLowerCase() === "title") {
357
+ prop_val = htmlentities.decode(prop_val);
358
+ }
359
+ response.fm_properties[
360
+ property_details[0].trim().toLowerCase()
361
+ ] = prop_val;
362
+ }
363
+ }
364
+ }
365
+
366
+ // And return the header as a whole so it can be easily replaced
367
+ response.fm_header = child.data;
368
+ }
369
+ }
370
+ }
371
+
372
+ return response;
373
+ };
374
+
375
+ exports.truncate_string = (str, n, useWordBoundary) => {
376
+ if (str.length <= n) {
377
+ return str;
378
+ }
379
+ const subString = str.slice(0, n - 1);
380
+ return `${
381
+ useWordBoundary
382
+ ? subString.slice(0, subString.lastIndexOf(" "))
383
+ : subString
384
+ }…`;
385
+ };
386
+
387
+ exports.get_html_read_time = (html) => {
388
+ // Get word count
389
+ const text = html2text.convert(html, {
390
+ wordwrap: null,
391
+ });
392
+ const word_count = wordsCount(text);
393
+ if (word_count === 0) return 0;
394
+
395
+ // Calculate the read time - divide the word count by 200
396
+ let read_time = Math.round(word_count / 200);
397
+ if (read_time === 0) read_time = 1;
398
+ return read_time;
399
+ };
400
+
401
+ exports.get_github_api_path = (repo, relative_path) => {
402
+ if (repo) {
403
+ const clean_repo = repo.endsWith("/") ? repo.slice(0, -1) : repo;
404
+ const github_paths = {};
405
+ github_paths.api_path = clean_repo.replace(
406
+ "https://github.com/",
407
+ "https://api.github.com/repos/",
408
+ );
409
+ github_paths.api_path += `/commits?path=${encodeURIComponent(
410
+ `/${relative_path.replace("\\\\", "/").replace("\\", "/")}`,
411
+ )}`;
412
+ github_paths.edit_path = `${repo}/blob/main/${relative_path.replace("\\\\", "/").replace("\\", "/")}`;
413
+ return github_paths;
414
+ }
415
+ return "";
416
+ };
417
+
418
+ const get_github_contributors_path = (repo) => {
419
+ const clean_repo = repo.endsWith("/") ? repo.slice(0, -1) : repo;
420
+ const github_paths = {};
421
+ github_paths.api_path = clean_repo.replace(
422
+ "https://github.com/",
423
+ "https://api.github.com/repos/",
424
+ );
425
+ github_paths.api_path += "/contributors";
426
+ return github_paths;
427
+ };
428
+
429
+ exports.get_github_contributors = async (
430
+ github_url,
431
+ github_api_token,
432
+ repo,
433
+ ) => {
434
+ const response = {
435
+ success: false,
436
+ error: "",
437
+ contributors: [],
438
+ contributor_count: 0,
439
+ last_commit_date: "",
440
+ };
441
+ const contributors = {};
442
+
443
+ const request_options = {
444
+ headers: {
445
+ "User-Agent": "HornbillDocsBuild",
446
+ "Cache-Control": "no-cache",
447
+ Host: "api.github.com",
448
+ Accept: "application/json",
449
+ },
450
+ timeout: 5000,
451
+ };
452
+ if (github_api_token !== "") {
453
+ request_options.headers.authorization = `Bearer ${github_api_token}`;
454
+ }
455
+ let github_response;
456
+ try {
457
+ github_response = await axios.get(github_url, request_options);
458
+ if (retried) {
459
+ retried = false;
460
+ console.log("API call retry success!");
461
+ }
462
+ } catch (err) {
463
+ if (err.response) {
464
+ if (err.response.status !== 403 && err.response.status !== 401) {
465
+ response.error = err;
466
+ return response;
467
+ }
468
+ github_response = err.response;
469
+ } else {
470
+ response.error = `Unexpected response from GitHub for [${github_url}:\n${JSON.stringify(
471
+ err,
472
+ )}]`;
473
+ }
474
+ }
475
+ if (github_response.status === 200) {
476
+ response.success = true;
477
+ const commits = github_response.data;
478
+ for (const commit of commits) {
479
+ if (
480
+ commit.committer?.type &&
481
+ commit.committer.type.toLowerCase() === "user" &&
482
+ commit.committer.login.toLowerCase() !== "web-flow"
483
+ ) {
484
+ if (!contributors[commit.committer.id]) {
485
+ response.contributor_count++;
486
+ contributors[commit.committer.id] = {
487
+ login: commit.committer.login,
488
+ avatar_url: commit.committer.avatar_url,
489
+ html_url: commit.committer.html_url,
490
+ name: commit.commit.committer.name,
491
+ };
492
+ }
493
+ if (response.last_commit_date !== "") {
494
+ const new_commit_date = new Date(commit.commit.committer.date);
495
+ const exist_commit_date = new Date(response.last_commit_date);
496
+ if (new_commit_date > exist_commit_date)
497
+ response.last_commit_date = commit.commit.committer.date;
498
+ } else {
499
+ response.last_commit_date = commit.commit.committer.date;
500
+ }
501
+ } else if (commit.author?.id) {
502
+ if (!contributors[commit.author.id]) {
503
+ response.contributor_count++;
504
+ contributors[commit.author.id] = {
505
+ login: commit.author.login,
506
+ avatar_url: commit.author.avatar_url,
507
+ html_url: commit.author.html_url,
508
+ name: commit.commit.author.name,
509
+ };
510
+ }
511
+ if (response.last_commit_date !== "") {
512
+ const new_commit_date = new Date(commit.commit.author.date);
513
+ const exist_commit_date = new Date(response.last_commit_date);
514
+ if (new_commit_date > exist_commit_date)
515
+ response.last_commit_date = commit.commit.author.date;
516
+ } else {
517
+ response.last_commit_date = commit.commit.author.date;
518
+ }
519
+ }
520
+ }
521
+ for (const key in contributors) {
522
+ if (Object.hasOwn(contributors, key)) {
523
+ response.contributors.push(contributors[key]);
524
+ }
525
+ }
526
+ } else if (github_response.status === 403) {
527
+ // Private repo, fine-grained permissions don't yet support getting commits without content, get list from meta permissions
528
+ const contrib_url = get_github_contributors_path(repo).api_path;
529
+ try {
530
+ github_response = await axios.get(contrib_url, request_options);
531
+ if (retried) {
532
+ retried = false;
533
+ console.log("API call retry success!");
534
+ }
535
+ } catch (err) {
536
+ if (err.response?.status) {
537
+ if (err.response.status !== 200) {
538
+ response.error = err;
539
+ return response;
540
+ }
541
+ } else {
542
+ response.error = `Unexpected response from GitHub for [${contrib_url}:\n${JSON.stringify(
543
+ err,
544
+ )}]`;
545
+ }
546
+ }
547
+ if (github_response.status === 200) {
548
+ response.success = true;
549
+ const commits = github_response.data;
550
+ for (const commit of commits) {
551
+ if (
552
+ commit.type &&
553
+ commit.type.toLowerCase() === "user" &&
554
+ commit.login.toLowerCase() !== "web-flow"
555
+ ) {
556
+ if (!contributors[commit.id]) {
557
+ response.contributor_count++;
558
+ contributors[commit.id] = {
559
+ login: commit.login,
560
+ avatar_url: commit.avatar_url,
561
+ html_url: commit.html_url,
562
+ name: commit.name ? commit.name : commit.login,
563
+ };
564
+ }
565
+ if (
566
+ response.last_commit_date !== "" &&
567
+ response.last_commit_date !== "No Commit Date Available"
568
+ ) {
569
+ const new_commit_date = new Date(commit.date);
570
+ const exist_commit_date = new Date(response.last_commit_date);
571
+ if (new_commit_date > exist_commit_date)
572
+ response.last_commit_date = commit.date;
573
+ } else {
574
+ response.last_commit_date = commit.date
575
+ ? commit.date
576
+ : "No Commit Date Available";
577
+ }
578
+ }
579
+ }
580
+ for (const key in contributors) {
581
+ if (Object.hasOwn(contributors, key)) {
582
+ response.contributors.push(contributors[key]);
583
+ }
584
+ }
585
+ }
586
+ } else {
587
+ response.error = `Unexpected Status: ${github_response.status}.`;
588
+ }
589
+ return response;
590
+ };
591
+
592
+ exports.strip_drafts = (nav_items) => {
593
+ const return_nav = nav_items;
594
+ recurse_nav(return_nav);
595
+ return return_nav;
596
+ };
597
+
598
+ const recurse_nav = (nav_items) => {
599
+ for (const key in nav_items) {
600
+ if (nav_items[key].draft) {
601
+ nav_items.splice(key, 1);
602
+ recurse_nav(nav_items);
603
+ } else if (nav_items[key].items) {
604
+ recurse_nav(nav_items[key].items);
605
+ }
606
+ }
607
+ };
608
+
609
+ exports.build_breadcrumbs = (nav_items) => {
610
+ const response = {
611
+ bc: {},
612
+ errors: [],
613
+ };
614
+ const buildBreadcrumb = (items, parentLinks) => {
615
+ // Process parent links
616
+ let parentlink = true;
617
+ if (parentLinks.length > 0) {
618
+ if (parentLinks[0].link === undefined || parentLinks[0].link === "" || parentLinks[0].draft )
619
+ parentlink = false;
620
+
621
+ for (let i = 1; i < 10; i++) {
622
+ if (
623
+ parentLinks[i] &&
624
+ parentLinks[i].link === undefined &&
625
+ items.length > 0 &&
626
+ items[0].link
627
+ ) {
628
+ parentLinks[i].link = items[0].link;
629
+ }
630
+ }
631
+ }
632
+
633
+ // Loop through items, build breadcrumb
634
+ for (let i = 0; i < items.length; i++) {
635
+ if (!items[i].text) {
636
+ response.errors.push(
637
+ `The following Nav Item is missing its text property: ${JSON.stringify(
638
+ items[i],
639
+ )}`,
640
+ );
641
+ }
642
+
643
+ if (!items[i].link && !items[i].items) {
644
+ response.errors.push(
645
+ `The following Nav Item has no link or items property: ${JSON.stringify(
646
+ items[i],
647
+ )}`,
648
+ );
649
+ }
650
+ const item = items[i];
651
+ if (!parentlink && item.link) {
652
+ parentLinks[0].link = item.link;
653
+ parentlink = true;
654
+ }
655
+ const { text, link, items: subItems } = item;
656
+ const breadcrumb = [...parentLinks, { text, link }];
657
+
658
+ if (link) {
659
+ response.bc[link] = breadcrumb;
660
+ }
661
+
662
+ if (subItems) {
663
+ buildBreadcrumb(subItems, breadcrumb);
664
+ }
665
+ }
666
+ };
667
+
668
+ buildBreadcrumb(nav_items, []);
669
+ return response;
670
+ };
671
+
672
+ exports.get_draft_links = (items, parent_is_draft = false) => {
673
+ let draft_links = [];
674
+
675
+ for (const item of items) {
676
+ // Check if this item is draft or if any parent was draft
677
+ const is_draft = parent_is_draft || item.draft;
678
+
679
+ // If the current item has a link and is draft (or parent is draft), add the link
680
+ if (is_draft && item.link) {
681
+ draft_links.push(item.link);
682
+ }
683
+
684
+ // If the current item has nested items, recursively check them
685
+ if (item.items) {
686
+ draft_links = draft_links.concat(this.get_draft_links(item.items, is_draft));
687
+ }
688
+ }
689
+ return draft_links;
690
+ };
691
+
692
+ exports.load_product_families = async () => {
693
+ const response = {
694
+ success: false,
695
+ prod_families: {},
696
+ prods_supported: [],
697
+ errors: "",
698
+ };
699
+ const prod_families_url = "https://docs.hornbill.com/_books/products.json";
700
+ for (let i = 1; i < 4; i++) {
701
+ try {
702
+ const prods = await axios.get(prod_families_url, {
703
+ httpsAgent: agent,
704
+ timeout: 5000,
705
+ });
706
+ if (prods.status === 200) {
707
+ response.prod_families = prods.data;
708
+ response.prods_supported = [];
709
+ for (let i = 0; i < response.prod_families.products.length; i++) {
710
+ response.prods_supported.push(
711
+ response.prod_families.products[i].id,
712
+ );
713
+ }
714
+ response.success = true;
715
+ break;
716
+ }
717
+ throw `Unexpected status - ${prods.status} ${prods.statusText}`;
718
+ } catch (e) {
719
+ if (response.errors === "")
720
+ response.errors = `Request to ${prod_families_url} failed:`;
721
+ response.errors += `\nAttempt ${i} - Error returning product families: ${e}`;
722
+ // Wait 2 seconds and try again
723
+ await new Promise((r) => setTimeout(r, 2000));
724
+ }
725
+ }
726
+ return response;
727
+ };
728
+ })();