hdoc-tools 0.46.0 → 0.47.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/hdoc-create.js CHANGED
@@ -1,5 +1,5 @@
1
1
  (() => {
2
- const fs = require("fs-extra");
2
+ const fs = require("node:fs");
3
3
  const path = require("node:path");
4
4
  const hdoc = require(path.join(__dirname, "hdoc-module.js"));
5
5
 
package/hdoc-db.js CHANGED
@@ -1,5 +1,4 @@
1
1
  (() => {
2
- const html2text = require("html-to-text");
3
2
  const path = require("node:path");
4
3
  const hdoc = require(path.join(__dirname, "hdoc-module.js"));
5
4
 
@@ -69,31 +68,10 @@
69
68
  response.fm_props = fm_headers.fm_properties;
70
69
 
71
70
  // Convert HTML into plain text
72
- response.text = html2text.convert(html_txt, {
73
- ignoreHref: true,
74
- ignoreImage: true,
75
- uppercaseHeadings: false,
76
- wordwrap: null,
77
- selectors: [
78
- { selector: 'h2', format: 'blockString' },
79
- { selector: 'h3', format: 'blockString' },
80
- { selector: 'h4', format: 'blockString' }
81
- ]
82
- });
71
+ response.text = hdoc.html_to_text(html_txt);
83
72
 
84
73
  // Convert HTML into preview text
85
- let preview = html2text.convert(html_txt, {
86
- baseElement: "p",
87
- ignoreHref: true,
88
- ignoreImage: true,
89
- uppercaseHeadings: false,
90
- wordwrap: null,
91
- selectors: [
92
- { selector: 'h2', format: 'blockString' },
93
- { selector: 'h3', format: 'blockString' },
94
- { selector: 'h4', format: 'blockString' }
95
- ]
96
- });
74
+ let preview = hdoc.html_to_text(html_txt, { baseElement: "p" });
97
75
  preview = hdoc
98
76
  .truncate_string(preview, 200, true)
99
77
  .replace(/(?:\r\n|\r|\n)/g, " ");
package/hdoc-init.js CHANGED
@@ -1,11 +1,9 @@
1
1
  (() => {
2
2
  // Required modules
3
- const prompt = require("prompt");
4
- const fs = require("fs-extra");
3
+ const readline = require("node:readline");
4
+ const fs = require("node:fs");
5
5
  const path = require("node:path");
6
6
 
7
- // Configure prompt module preferences
8
- prompt.message = false;
9
7
  const promptProps = [
10
8
  {
11
9
  name: "id",
@@ -40,6 +38,29 @@
40
38
  },
41
39
  ];
42
40
 
41
+ // Asks a single question, re-prompting if the field is required and empty or if
42
+ // the value fails the validator regex. Shows the default value in the prompt label.
43
+ const askQuestion = (rl, field) => new Promise((resolve) => {
44
+ const label = field.default ? `${field.description} (${field.default}): ` : `${field.description}: `;
45
+ const ask = () => {
46
+ rl.question(label, (answer) => {
47
+ const value = answer.trim() || field.default || "";
48
+ if (field.required && !value) {
49
+ console.error("This field is required.");
50
+ ask();
51
+ return;
52
+ }
53
+ if (value && field.validator && !field.validator.test(value)) {
54
+ console.error(field.warning);
55
+ ask();
56
+ return;
57
+ }
58
+ resolve(value);
59
+ });
60
+ };
61
+ ask();
62
+ });
63
+
43
64
  const createBook = (server_path, source_path, docProps) => {
44
65
  console.log("\r\nCreating book with the following properties:\r\n");
45
66
  console.log(" Doc ID:", docProps.id);
@@ -55,7 +76,7 @@
55
76
  if (fs.existsSync(templatePath)) {
56
77
  // If template path exists, do sync copy into book path
57
78
  try {
58
- fs.copySync(templatePath, source_path);
79
+ fs.cpSync(templatePath, source_path, { recursive: true });
59
80
  } catch (e) {
60
81
  console.error("Error copying template:\r\n", e);
61
82
  process.exit(1);
@@ -155,7 +176,7 @@
155
176
  }
156
177
  };
157
178
 
158
- exports.run = (server_path, source_path, md) => {
179
+ exports.run = async (server_path, source_path, md) => {
159
180
  // GERRY: The init function should create a new starting point HDocBook folder structure
160
181
  // ready to run the preview server and start editing.
161
182
  //
@@ -168,14 +189,13 @@
168
189
  const curr_dirs = source_path.split(path.sep);
169
190
  const doc_id = curr_dirs[curr_dirs.length - 1];
170
191
 
171
- prompt.start();
192
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
172
193
  promptProps[0].default = doc_id;
173
- prompt.get(promptProps, (err, result) => {
174
- if (err) {
175
- console.error(err);
176
- return err;
177
- }
178
- createBook(server_path, source_path, result);
179
- });
194
+ const result = {};
195
+ for (const field of promptProps) {
196
+ result[field.name] = await askQuestion(rl, field);
197
+ }
198
+ rl.close();
199
+ createBook(server_path, source_path, result);
180
200
  };
181
201
  })();
package/hdoc-module.js CHANGED
@@ -1,43 +1,53 @@
1
1
  (() => {
2
- const axios = require("axios");
3
- const axiosRetry = require("axios-retry").default;
4
2
  const cheerio = require("cheerio");
3
+ const crypto = require("node:crypto");
5
4
  const fs = require("node:fs");
6
- const https = require("node:https");
7
- const htmlentities = require("html-entities");
8
- const html2text = require("html-to-text");
5
+ const os = require("node:os");
9
6
  const { JSDOM } = require("jsdom");
10
7
  const path = require("node:path");
11
- const wordsCount = require("words-count").default;
12
8
 
13
9
  const includesCache = {};
14
- const agent = new https.Agent({
15
- rejectUnauthorized: false,
16
- });
17
10
 
18
11
  let retried = false;
19
12
 
20
- axiosRetry(axios, {
21
- retries: 5,
22
- shouldResetTimeout: true,
23
- retryCondition(error) {
24
- if (error.response && error.response.status) {
25
- if (error.response.status >= 400 && error.response.status !== 401 && error.response.status !== 403) {
26
- return true;
27
- } else {
28
- return false;
29
- }
30
- } else {
31
- return false;
13
+ // Wraps the built-in fetch() with automatic retry for transient errors.
14
+ // Retries up to maxRetries times when the server returns an HTTP error status
15
+ // >= 400, except for 401 (Unauthorized) and 403 (Forbidden) which are auth
16
+ // failures that won't be resolved by retrying. Network errors (where fetch
17
+ // itself throws) are also retried. Sets the module-level `retried` flag so
18
+ // callers can detect and log a success-after-retry message.
19
+ //
20
+ // Pass `timeoutMs` inside options to apply a per-attempt timeout. A fresh
21
+ // AbortSignal is created for every attempt so that a timed-out first attempt
22
+ // does not leave an already-aborted signal in place for the retries.
23
+ const fetchWithRetry = async (url, options = {}, maxRetries = 5) => {
24
+ const { timeoutMs, ...fetchOptions } = options;
25
+ let retryCount = 0;
26
+ while (true) {
27
+ // Create a fresh signal for each attempt; reusing an already-aborted
28
+ // signal would cause every subsequent retry to abort immediately.
29
+ const attemptOptions = timeoutMs
30
+ ? { ...fetchOptions, signal: AbortSignal.timeout(timeoutMs) }
31
+ : fetchOptions;
32
+ let response;
33
+ try {
34
+ response = await fetch(url, attemptOptions);
35
+ } catch (err) {
36
+ // Network-level error (DNS failure, connection refused, timeout, etc.)
37
+ retryCount++;
38
+ if (retryCount > maxRetries) throw err;
39
+ retried = true;
40
+ continue;
41
+ }
42
+ // Auth failures and successes are not retried
43
+ if (response.ok || response.status === 401 || response.status === 403) {
44
+ return response;
32
45
  }
33
- },
34
- onRetry: (retryCount, error, requestConfig) => {
46
+ retryCount++;
47
+ if (retryCount > maxRetries) return response;
35
48
  retried = true;
36
- console.info(
37
- `\n[WARNING] API call failed - ${error.message}\nEndpoint: ${requestConfig.url}\nRetrying: ${retryCount}`,
38
- );
39
- },
40
- });
49
+ }
50
+ };
41
51
 
42
52
  exports.content_type_for_ext = (ext) => {
43
53
  switch (ext) {
@@ -169,13 +179,13 @@
169
179
  }
170
180
 
171
181
  try {
172
- const file_response = await axios.get(link);
182
+ const file_response = await fetchWithRetry(link);
173
183
  if (retried) {
174
184
  retried = false;
175
185
  console.log("API call retry success!");
176
186
  }
177
187
  if (file_response.status === 200) {
178
- file_content = file_response.data;
188
+ file_content = await file_response.text();
179
189
  } else {
180
190
  throw `Unexpected Status ${file_response.status}`;
181
191
  }
@@ -362,7 +372,11 @@
362
372
  prop_val = prop_val.substring(1, prop_val.length - 1);
363
373
  }
364
374
  if (property_details[0].trim().toLowerCase() === "title") {
365
- prop_val = htmlentities.decode(prop_val);
375
+ // Decode HTML entities in the title value: handles named entities
376
+ // (& < > " ' '), decimal numeric refs ({),
377
+ // and hex numeric refs ({).
378
+ prop_val = prop_val.replace(/&|<|>|"|'|'|&#(\d+);|&#x([0-9a-fA-F]+);/g,
379
+ (m, dec, hex) => dec ? String.fromCharCode(+dec) : hex ? String.fromCharCode(parseInt(hex, 16)) : ({ '&amp;': '&', '&lt;': '<', '&gt;': '>', '&quot;': '"', '&#39;': "'", '&apos;': "'" })[m]);
366
380
  }
367
381
  response.fm_properties[
368
382
  property_details[0].trim().toLowerCase()
@@ -392,12 +406,21 @@
392
406
  }…`;
393
407
  };
394
408
 
409
+ exports.html_to_text = (html, { baseElement } = {}) => {
410
+ const dom = new JSDOM(html);
411
+ const document = dom.window.document;
412
+ if (baseElement) {
413
+ return Array.from(document.querySelectorAll(baseElement))
414
+ .map((el) => el.textContent)
415
+ .join("\n");
416
+ }
417
+ return document.body ? document.body.textContent : "";
418
+ };
419
+
395
420
  exports.get_html_read_time = (html) => {
396
421
  // Get word count
397
- const text = html2text.convert(html, {
398
- wordwrap: null,
399
- });
400
- const word_count = wordsCount(text);
422
+ const text = exports.html_to_text(html);
423
+ const word_count = text.trim().split(/\s+/).filter(Boolean).length;
401
424
  if (word_count === 0) return 0;
402
425
 
403
426
  // Calculate the read time - divide the word count by 200
@@ -458,32 +481,34 @@
458
481
  }
459
482
 
460
483
  let github_response;
484
+ let github_data;
461
485
  try {
462
- github_response = await axios.get(github_url, request_options);
486
+ github_response = await fetchWithRetry(github_url, {
487
+ headers: request_options.headers,
488
+ timeoutMs: 5000,
489
+ });
463
490
  if (retried) {
464
491
  retried = false;
465
492
  console.log("API call retry success!");
466
493
  }
494
+ github_data = await github_response.json();
467
495
  } catch (err) {
468
- if (err.response) {
469
- if (err.response.status !== 403 && err.response.status !== 401) {
470
- response.error = err;
471
- return response;
472
- }
473
- github_response = err.response;
474
- } else {
475
- response.error = `Unexpected response from GitHub for [${github_url}:\n${JSON.stringify(
476
- err,
477
- )}]`;
478
- }
496
+ // Network-level failure (fetchWithRetry re-throws after exhausting retries)
497
+ response.error = `Unexpected response from GitHub for [${github_url}:\n${JSON.stringify(err)}]`;
498
+ return response;
499
+ }
500
+ // fetch does not throw on HTTP errors — return early for unexpected status codes
501
+ if (github_response.status !== 200 && github_response.status !== 401 && github_response.status !== 403) {
502
+ response.error = new Error(`HTTP ${github_response.status}`);
503
+ return response;
479
504
  }
480
505
  if (github_response.status === 200) {
481
506
  response.success = true;
482
- response.data = github_response.data;
483
- response.private = github_response.data.private;
507
+ response.data = github_data;
508
+ response.private = github_data.private;
484
509
  } else {
485
510
  // Is it a 404 or 403?
486
- response.error = `${github_response.status} : ${github_response.data.message}`;
511
+ response.error = `${github_response.status} : ${github_data.message}`;
487
512
  }
488
513
  return response;
489
514
  };
@@ -515,28 +540,30 @@
515
540
  request_options.headers.authorization = `Bearer ${github_api_token}`;
516
541
  }
517
542
  let github_response;
543
+ let github_data;
518
544
  try {
519
- github_response = await axios.get(github_url, request_options);
545
+ github_response = await fetchWithRetry(github_url, {
546
+ headers: request_options.headers,
547
+ timeoutMs: 5000,
548
+ });
520
549
  if (retried) {
521
550
  retried = false;
522
551
  console.log("API call retry success!");
523
552
  }
553
+ github_data = await github_response.json();
524
554
  } catch (err) {
525
- if (err.response) {
526
- if (err.response.status !== 403 && err.response.status !== 401) {
527
- response.error = err;
528
- return response;
529
- }
530
- github_response = err.response;
531
- } else {
532
- response.error = `Unexpected response from GitHub for [${github_url}:\n${JSON.stringify(
533
- err,
534
- )}]`;
535
- }
555
+ // Network-level failure (fetchWithRetry re-throws after exhausting retries)
556
+ response.error = `Unexpected response from GitHub for [${github_url}:\n${JSON.stringify(err)}]`;
557
+ return response;
558
+ }
559
+ // fetch does not throw on HTTP errors — return early for unexpected status codes
560
+ if (github_response.status !== 200 && github_response.status !== 401 && github_response.status !== 403) {
561
+ response.error = new Error(`HTTP ${github_response.status}`);
562
+ return response;
536
563
  }
537
564
  if (github_response.status === 200) {
538
565
  response.success = true;
539
- const commits = github_response.data;
566
+ const commits = github_data;
540
567
  for (const commit of commits) {
541
568
  if (
542
569
  commit.committer?.type &&
@@ -589,26 +616,28 @@
589
616
  // Private repo, fine-grained permissions don't yet support getting commits without content, get list from meta permissions
590
617
  const contrib_url = get_github_contributors_path(repo).api_path;
591
618
  try {
592
- github_response = await axios.get(contrib_url, request_options);
619
+ github_response = await fetchWithRetry(contrib_url, {
620
+ headers: request_options.headers,
621
+ timeoutMs: 5000,
622
+ });
593
623
  if (retried) {
594
624
  retried = false;
595
625
  console.log("API call retry success!");
596
626
  }
627
+ github_data = await github_response.json();
597
628
  } catch (err) {
598
- if (err.response?.status) {
599
- if (err.response.status !== 200) {
600
- response.error = err;
601
- return response;
602
- }
603
- } else {
604
- response.error = `Unexpected response from GitHub for [${contrib_url}:\n${JSON.stringify(
605
- err,
606
- )}]`;
607
- }
629
+ // Network-level failure (fetchWithRetry re-throws after exhausting retries)
630
+ response.error = `Unexpected response from GitHub for [${contrib_url}:\n${JSON.stringify(err)}]`;
631
+ return response;
632
+ }
633
+ // fetch does not throw on HTTP errors — return early if fallback request failed
634
+ if (github_response.status !== 200) {
635
+ response.error = new Error(`HTTP ${github_response.status}`);
636
+ return response;
608
637
  }
609
638
  if (github_response.status === 200) {
610
639
  response.success = true;
611
- const commits = github_response.data;
640
+ const commits = github_data;
612
641
  for (const commit of commits) {
613
642
  if (
614
643
  commit.type &&
@@ -761,12 +790,11 @@
761
790
  const prod_families_url = "https://docs.hornbill.com/_books/products.json";
762
791
  for (let i = 1; i < 4; i++) {
763
792
  try {
764
- const prods = await axios.get(prod_families_url, {
765
- httpsAgent: agent,
766
- timeout: 5000,
793
+ const prods = await fetch(prod_families_url, {
794
+ signal: AbortSignal.timeout(5000),
767
795
  });
768
796
  if (prods.status === 200) {
769
- response.prod_families = prods.data;
797
+ response.prod_families = await prods.json();
770
798
  response.prods_supported = [];
771
799
  for (let i = 0; i < response.prod_families.products.length; i++) {
772
800
  response.prods_supported.push(
@@ -788,6 +816,191 @@
788
816
  return response;
789
817
  };
790
818
 
819
+ // Recursively walks a directory tree and invokes fileCallback(element) for each file.
820
+ // Mirrors the dree.scan API so existing options objects and callbacks work unchanged.
821
+ // Supported options:
822
+ // extensions - array of extensions to include (e.g. ["md","html"]); omit for all files
823
+ // hash - compute MD5 hash of each file's content and set element.hash
824
+ // normalize - convert backslashes to forward slashes in all paths
825
+ // sorted - sort directory entries alphabetically before recursing
826
+ // sizeInBytes / size - include file size as element.sizeInBytes
827
+ // stat - include the fs.Stats object as element.stat
828
+ // depth - maximum recursion depth (default: unlimited)
829
+ // excludeEmptyDirectories - skip directories that contain no entries
830
+ // symbolicLinks - set false to skip symbolic links (default: include them)
831
+ // Each element passed to the callback has: name, path (absolute), relativePath, and any
832
+ // optional fields enabled above.
833
+ exports.scan_dir = (dirPath, opts = {}, fileCallback) => {
834
+ const extensions = opts.extensions ? new Set(opts.extensions.map((e) => e.toLowerCase())) : null;
835
+ const maxDepth = opts.depth !== undefined ? opts.depth : Infinity;
836
+
837
+ const walk = (currentPath, depth) => {
838
+ if (depth > maxDepth) return;
839
+ let entries;
840
+ try {
841
+ entries = fs.readdirSync(currentPath, { withFileTypes: true });
842
+ } catch (_) { return; }
843
+
844
+ if (opts.sorted) entries = entries.slice().sort((a, b) => a.name.localeCompare(b.name));
845
+
846
+ for (const entry of entries) {
847
+ if (opts.symbolicLinks === false && entry.isSymbolicLink()) continue;
848
+ const fullPath = path.join(currentPath, entry.name);
849
+
850
+ if (entry.isDirectory()) {
851
+ if (opts.excludeEmptyDirectories) {
852
+ try { if (fs.readdirSync(fullPath).length === 0) continue; } catch (_) { continue; }
853
+ }
854
+ walk(fullPath, depth + 1);
855
+ } else if (entry.isFile()) {
856
+ const ext = path.extname(entry.name).slice(1).toLowerCase();
857
+ if (extensions && !extensions.has(ext)) continue;
858
+
859
+ const absPath = opts.normalize ? fullPath.replaceAll("\\", "/") : fullPath;
860
+ const relPath = opts.normalize
861
+ ? path.relative(dirPath, fullPath).replaceAll("\\", "/")
862
+ : path.relative(dirPath, fullPath);
863
+
864
+ const element = { name: entry.name, path: absPath, relativePath: relPath, extension: ext };
865
+
866
+ if (opts.sizeInBytes || opts.size || opts.stat) {
867
+ const stat = fs.statSync(fullPath);
868
+ if (opts.sizeInBytes || opts.size) element.sizeInBytes = stat.size;
869
+ if (opts.stat) element.stat = stat;
870
+ }
871
+
872
+ if (opts.hash) {
873
+ element.hash = crypto.createHash("md5").update(fs.readFileSync(fullPath)).digest("hex");
874
+ }
875
+
876
+ fileCallback(element);
877
+ }
878
+ }
879
+ };
880
+
881
+ walk(dirPath, 1);
882
+ };
883
+
884
+ // Resolves the true on-disk casing of a file path by walking each path segment
885
+ // and doing a case-insensitive match against the actual directory listing.
886
+ // This is important on case-sensitive filesystems (Linux) to catch casing mismatches
887
+ // that would silently pass on macOS/Windows but break in CI or production.
888
+ // On Windows, the drive letter is normalised to uppercase and backslash delimiters are used.
889
+ exports.true_case_path_sync = (filePath) => {
890
+ const isWin = process.platform === "win32";
891
+ const delim = isWin ? "\\" : "/";
892
+ filePath = path.normalize(filePath);
893
+ const segments = filePath.split(delim).filter((s) => s !== "");
894
+ let base = path.isAbsolute(filePath) ? (isWin ? segments.shift().toUpperCase() : "") : process.cwd();
895
+ return segments.reduce((realPath, seg) => {
896
+ const entries = fs.readdirSync(realPath + delim);
897
+ // Escape any regex special chars in the segment name before building the pattern
898
+ const re = new RegExp(`^${seg.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}$`, "i");
899
+ const match = entries.find((e) => re.test(e));
900
+ if (!match) throw new Error(`true_case_path_sync: no match for "${seg}" in "${realPath}"`);
901
+ return realPath + delim + match;
902
+ }, base);
903
+ };
904
+
905
+ // Creates an empty temporary file with a random name and optional file extension suffix
906
+ // (e.g. { postfix: ".mmd" }). Returns an object with:
907
+ // name - the absolute path to the temp file
908
+ // removeCallback - call this to delete the file when done; errors are silently ignored
909
+ // so it is safe to call even if the file was already cleaned up.
910
+ exports.tmp_file_sync = (opts = {}) => {
911
+ const name = path.join(os.tmpdir(), `hdoc-${crypto.randomBytes(8).toString("hex")}${opts.postfix || ""}`);
912
+ fs.closeSync(fs.openSync(name, "w"));
913
+ return { name, removeCallback: () => { try { fs.unlinkSync(name); } catch (_) {} } };
914
+ };
915
+
916
+ // Parses a YAML string into a plain JS object. Designed for markdown frontmatter,
917
+ // which is always simple: scalar key/value pairs, block arrays (- item), and inline
918
+ // arrays ([a, b, c]). Does not support anchors, multi-line strings, or nested objects.
919
+ // parseVal handles type coercion: booleans, null, integers, floats, quoted strings,
920
+ // inline arrays, and plain strings (returned as-is).
921
+ exports.parse_yaml = (str) => {
922
+ const parseVal = (v) => {
923
+ if (v === "true") return true;
924
+ if (v === "false") return false;
925
+ if (v === "null" || v === "~") return null;
926
+ if (/^-?\d+$/.test(v)) return parseInt(v, 10);
927
+ if (/^-?\d*\.\d+$/.test(v)) return parseFloat(v);
928
+ if (/^['"].*['"]$/.test(v)) return v.slice(1, -1); // strip surrounding quotes
929
+ if (v.startsWith("[") && v.endsWith("]")) return v.slice(1, -1).split(",").map(i => parseVal(i.trim()));
930
+ return v;
931
+ };
932
+ const result = {};
933
+ let currentKey = null;
934
+ for (const line of str.split("\n")) {
935
+ if (!line.trim() || /^\s*#/.test(line)) continue;
936
+ // Block array item (indented dash): append to the current key's array
937
+ const arrMatch = line.match(/^\s+-\s+(.+)$/);
938
+ if (arrMatch && currentKey) {
939
+ if (!Array.isArray(result[currentKey])) result[currentKey] = [];
940
+ result[currentKey].push(parseVal(arrMatch[1].trim()));
941
+ continue;
942
+ }
943
+ // key: value pair — value may be empty (e.g. start of a block array)
944
+ const kvMatch = line.match(/^([^:]+):\s*(.*)$/);
945
+ if (kvMatch) {
946
+ currentKey = kvMatch[1].trim();
947
+ result[currentKey] = kvMatch[2].trim() ? parseVal(kvMatch[2].trim()) : null;
948
+ }
949
+ }
950
+ return result;
951
+ };
952
+
953
+ // Pretty-prints an XML string with configurable indentation and line endings.
954
+ // Tokenises the input using a regex that preserves CDATA sections, comments, and
955
+ // processing instructions as atomic units so their content is never re-indented.
956
+ // Options:
957
+ // indentation - string to use per indent level (default: two spaces)
958
+ // lineSeparator - line ending to use (default: "\n")
959
+ // collapseContent - when true (default), elements that contain only a single text
960
+ // node are kept on one line: <tag>text</tag> rather than split
961
+ // across three lines. Uses one-token lookahead to detect this.
962
+ exports.xml_format = (xml, opts = {}) => {
963
+ const ind = opts.indentation || " ";
964
+ const sep = opts.lineSeparator || "\n";
965
+ const collapse = opts.collapseContent !== false;
966
+ // Match CDATA, comments, PIs, tags, and text nodes as individual tokens
967
+ const tokens = xml.trim().match(/(<!\[CDATA\[[\s\S]*?\]\]>|<!--[\s\S]*?-->|<[^>]+>|[^<]+)/g) || [];
968
+ let depth = 0;
969
+ let out = "";
970
+ for (let i = 0; i < tokens.length; i++) {
971
+ const t = tokens[i].trim();
972
+ if (!t) continue;
973
+ const isClose = t.startsWith("</");
974
+ const isSelf = t.startsWith("<") && t.endsWith("/>") && !t.startsWith("<?");
975
+ const isSpecial = t.startsWith("<?") || t.startsWith("<!--") || t.startsWith("<![");
976
+ const isOpen = t.startsWith("<") && !isClose && !isSelf && !isSpecial;
977
+ if (isClose) {
978
+ depth = Math.max(0, depth - 1);
979
+ out += ind.repeat(depth) + t + sep;
980
+ } else if (isSelf || isSpecial) {
981
+ out += ind.repeat(depth) + t + sep;
982
+ } else if (isOpen) {
983
+ // Collapse: if the very next token is text and the one after is the closing tag,
984
+ // emit all three on one line and skip those two tokens.
985
+ if (collapse && i + 2 < tokens.length) {
986
+ const nextTxt = tokens[i + 1] ? tokens[i + 1].trim() : "";
987
+ const nextClose = tokens[i + 2] ? tokens[i + 2].trim() : "";
988
+ if (nextTxt && !nextTxt.startsWith("<") && nextClose.startsWith("</")) {
989
+ out += ind.repeat(depth) + t + nextTxt + nextClose + sep;
990
+ i += 2;
991
+ continue;
992
+ }
993
+ }
994
+ out += ind.repeat(depth) + t + sep;
995
+ depth++;
996
+ } else {
997
+ // Plain text node
998
+ out += ind.repeat(depth) + t + sep;
999
+ }
1000
+ }
1001
+ return out.trimEnd();
1002
+ };
1003
+
791
1004
  exports.find_string_in_string = (fileContent, searchString) => {
792
1005
  const lines = fileContent.split('\n');
793
1006
 
package/hdoc-serve.js CHANGED
@@ -5,11 +5,9 @@
5
5
  const hdoc = require(path.join(__dirname, "hdoc-module.js"));
6
6
  const stream = require("node:stream");
7
7
 
8
- const yaml = require("js-yaml");
9
- const mdfm = require("markdown-it-front-matter");
8
+ const mdfm = require("markdown-it-front-matter");
10
9
 
11
10
  const { execSync } = require("child_process");
12
- const tmp = require("tmp");
13
11
 
14
12
  const mermaid_theme_path = path.resolve(
15
13
  __dirname,
@@ -134,8 +132,8 @@
134
132
  highlight: function (str, lang) {
135
133
  if (lang === "mermaid") {
136
134
  try {
137
- const tmpInput = tmp.fileSync({ postfix: ".mmd" });
138
- const tmpOutput = tmp.fileSync({ postfix: ".svg" });
135
+ const tmpInput = hdoc.tmp_file_sync({ postfix: ".mmd" });
136
+ const tmpOutput = hdoc.tmp_file_sync({ postfix: ".svg" });
139
137
 
140
138
  if (!str.startsWith('---')) {
141
139
  str = '---\n' + fs.readFileSync(mermaid_theme_path, {encoding: 'utf-8'}) + `\n---\n${str}`;
@@ -199,7 +197,7 @@ ${err.message}
199
197
  const html_txt = md.render(md_txt.toString());
200
198
 
201
199
  if (frontmatter_content.length) {
202
- const obj = yaml.load(frontmatter_content);
200
+ const obj = hdoc.parse_yaml(frontmatter_content);
203
201
  const buff = Buffer.from(JSON.stringify(obj), "utf-8");
204
202
  const base64 = buff.toString("base64");
205
203
  res.setHeader("X-frontmatter", base64);