hdoc-tools 0.8.45 → 0.8.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Hornbill Docs
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/hdoc-build-pdf.js CHANGED
@@ -64,7 +64,20 @@
64
64
  }).get();
65
65
  imgs.push(...srcs);
66
66
  for (let i = 0; i < imgs.length; i++) {
67
- if (hdoc.valid_url(imgs[i])) {
67
+ if (!hdoc.valid_url(imgs[i])) {
68
+ // Internal link
69
+ const image_path = path.join(book_work_root, imgs[i].replace('_books/', ''));
70
+ try {
71
+ const image_buffer = fs.readFileSync(image_path);
72
+ const mime_type = mime.lookup(image_path);
73
+ let image_b64 = image_buffer.toString("base64");
74
+ image_b64 = `data:${mime_type};base64,${image_b64}`;
75
+ html_source = html_source.replace(imgs[i], image_b64);
76
+ } catch (err) {
77
+ console.log('Error reading image from HTML source [', image_path, '] -', err);
78
+ return null;
79
+ }
80
+ } else {
68
81
  // External Link
69
82
  try {
70
83
  const file_response = await axios.get(imgs[i]);
@@ -80,19 +93,6 @@
80
93
  } catch (err) {
81
94
  console.log(`Error downloading external source [${imgs[i]}] - ${err}`);
82
95
  }
83
- } else {
84
- // Internal link
85
- const image_path = path.join(book_work_root, imgs[i].replace('_books/', ''));
86
- try {
87
- const image_buffer = fs.readFileSync(image_path);
88
- const mime_type = mime.lookup(image_path);
89
- let image_b64 = image_buffer.toString("base64");
90
- image_b64 = `data:${mime_type};base64,${image_b64}`;
91
- html_source = html_source.replace(imgs[i], image_b64);
92
- } catch (err) {
93
- console.log('Error reading image from HTML source [', image_path, '] -', err);
94
- return null;
95
- }
96
96
  }
97
97
  }
98
98
 
package/hdoc-build.js CHANGED
@@ -12,7 +12,9 @@
12
12
  hdoc_build_db = require(path.join(__dirname, 'hdoc-build-db.js')),
13
13
  hdoc_build_pdf = require(path.join(__dirname, 'hdoc-build-pdf.js')),
14
14
  hdoc_index = require(path.join(__dirname, 'hdoc-db.js')),
15
- {zip} = require('zip-a-folder');
15
+ {
16
+ zip
17
+ } = require('zip-a-folder');
16
18
 
17
19
  const h_tags_to_search = ['h1', 'h2', 'h3'],
18
20
  doc_header_template_path = path.join(__dirname, 'templates', 'doc-header.html'),
@@ -47,7 +49,7 @@
47
49
  work_path_content = '',
48
50
  verbose = false;
49
51
 
50
- const pdf_path_excluded = function(relative_path) {
52
+ const pdf_path_excluded = function (relative_path) {
51
53
  if (!hdocbook_project.pdfGeneration || hdocbook_project.pdfGeneration.exclude_paths === undefined) {
52
54
  return false;
53
55
  }
@@ -231,7 +233,7 @@
231
233
  if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
232
234
  let pdf_txt = await hdoc_build_pdf.process_images(file_path, html_txt);
233
235
  pdf_txt = `${pdf_header}\n${pdf_txt}`;
234
-
236
+
235
237
  // Generate PDF file from HTML
236
238
  const pdf_file_path = file_path.path.replace(path.extname(file_path.path), '.pdf');
237
239
  pdf_size = await hdoc_build_pdf.generate_pdf(pdf_template_path, pdf_template, hdocbook_config, pdf_txt, pdf_file_path, verbose);
@@ -280,7 +282,11 @@
280
282
  linkify: true,
281
283
  typographer: true
282
284
  });
283
- md.linkify.set({ fuzzyEmail: false, fuzzyLink: false, fuzzyIP: false });
285
+ md.linkify.set({
286
+ fuzzyEmail: false,
287
+ fuzzyLink: false,
288
+ fuzzyIP: false
289
+ });
284
290
 
285
291
  // Process Frontmatter tags
286
292
  let frontmatter_content = "";
@@ -310,7 +316,7 @@
310
316
  fm_content.forEach(function (fm_prop) {
311
317
  const fm_id = fm_prop.slice(0, fm_prop.indexOf(':'));
312
318
  const fm_val = fm_prop.slice(fm_prop.indexOf(':') + 1);
313
-
319
+
314
320
  if (fm_id && fm_id.trim().length > 0 && fm_val && fm_val.trim().length > 0) {
315
321
  fm_headers.push({
316
322
  id: fm_id.trim(),
@@ -430,17 +436,17 @@
430
436
  doc_header = process_doc_header(fm_headers, file_path.relativePath, doc_header_template_non_git);
431
437
  if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) pdf_header = process_doc_header(fm_headers, file_path.relativePath, pdf_header_template_non_git);
432
438
  }
433
-
439
+
434
440
  let pdf_size = 0;
435
441
  if (pdf_enable && !pdf_path_excluded(file_path.relativePath)) {
436
442
  let pdf_txt = await hdoc_build_pdf.process_images(file_path, html_txt);
437
443
  pdf_txt = `${pdf_header}\n${pdf_txt}`;
438
-
444
+
439
445
  // Generate PDF file from HTML
440
446
  const pdf_file_path = file_path.path.replace(path.extname(file_path.path), '.pdf');
441
447
  pdf_size = await hdoc_build_pdf.generate_pdf(pdf_template_path, pdf_template, hdocbook_config, pdf_txt, pdf_file_path, verbose);
442
448
  }
443
-
449
+
444
450
  html_txt = `${fm_header}\n${doc_header}\n${html_txt}`;
445
451
 
446
452
  // Save HTML into HTML file
@@ -820,7 +826,7 @@
820
826
  console.log(`\nZIP Creation Success: ${zip_path}\n`);
821
827
  console.log(' Build Started:', build_start_dt);
822
828
  console.log(`Build Completed: ${new Date().toLocaleString()}\n`);
823
-
829
+
824
830
  } catch (e) {
825
831
  console.log('\nError creating ZIP: ' + e);
826
832
  }
package/hdoc-module.js CHANGED
@@ -1,10 +1,10 @@
1
1
  (function () {
2
2
  'use strict';
3
3
 
4
- const axios = require('axios'),
5
- cheerio = require('cheerio'),
6
- html2text = require('html-to-text'),
7
- wordsCount = require('words-count').default;
4
+ const axios = require('axios'),
5
+ cheerio = require('cheerio'),
6
+ html2text = require('html-to-text'),
7
+ wordsCount = require('words-count').default;
8
8
 
9
9
  let includesCache = {};
10
10
 
@@ -58,12 +58,12 @@
58
58
 
59
59
  const stringIsAValidUrl = (s) => {
60
60
  try {
61
- new URL(s);
62
- return true;
61
+ const url_obj = new URL(s);
62
+ return url_obj;
63
63
  } catch (err) {
64
- return false;
64
+ return false;
65
65
  }
66
- };
66
+ };
67
67
  return stringIsAValidUrl(url);
68
68
  };
69
69
 
@@ -156,19 +156,21 @@
156
156
  }).get();
157
157
  if (heading.length > 0) {
158
158
  return heading[0];
159
- }
159
+ }
160
160
  }
161
161
  return false;
162
162
  };
163
163
 
164
- exports.getHTMLFrontmatterHeader = function(html_body) {
164
+ exports.getHTMLFrontmatterHeader = function (html_body) {
165
165
  let response = {
166
166
  fm_header: '',
167
167
  fm_properties: {}
168
168
  };
169
- const $ = cheerio.load(html_body, { decodeEntities: false });
169
+ const $ = cheerio.load(html_body, {
170
+ decodeEntities: false
171
+ });
170
172
  if ($._root && $._root.children && $._root.children instanceof Array && $._root.children.length > 0) {
171
- $._root.children.forEach(function(child){
173
+ $._root.children.forEach(function (child) {
172
174
  if (child.type === 'comment' && child.data && child.data.startsWith('[[FRONTMATTER')) {
173
175
  // We have a Frontmatter header - return each property in an array
174
176
  const fm_properties = child.data.split(/\r?\n/);
@@ -193,13 +195,15 @@
193
195
  return response;
194
196
  };
195
197
 
196
- exports.truncate_string = function( str, n, useWordBoundary ){
197
- if (str.length <= n) { return str; }
198
- const subString = str.slice(0, n-1);
198
+ exports.truncate_string = function (str, n, useWordBoundary) {
199
+ if (str.length <= n) {
200
+ return str;
201
+ }
202
+ const subString = str.slice(0, n - 1);
199
203
  return (useWordBoundary ? subString.slice(0, subString.lastIndexOf(" ")) : subString) + '…';
200
204
  };
201
205
 
202
- exports.get_html_read_time = function(html) {
206
+ exports.get_html_read_time = function (html) {
203
207
  // Get word count
204
208
  const text = html2text.convert(html, {
205
209
  wordwrap: null
@@ -213,7 +217,7 @@
213
217
  return read_time;
214
218
  };
215
219
 
216
- exports.get_github_api_path = function(repo, relative_path) {
220
+ exports.get_github_api_path = function (repo, relative_path) {
217
221
  repo = repo.endsWith('/') ? repo.slice(0, -1) : repo;
218
222
  let github_paths = {};
219
223
  github_paths.api_path = repo.replace('https://github.com/', 'https://api.github.com/repos/');
@@ -222,13 +226,13 @@
222
226
  return github_paths;
223
227
  };
224
228
 
225
- exports.get_github_contributors = async function(github_url, github_api_token) {
229
+ exports.get_github_contributors = async function (github_url, github_api_token) {
226
230
  let response = {
227
231
  success: false,
228
232
  error: '',
229
233
  contributors: [],
230
234
  contributor_count: 0,
231
- last_commit_date:''
235
+ last_commit_date: ''
232
236
  };
233
237
 
234
238
  let contributors = {};
@@ -255,7 +259,7 @@
255
259
  if (github_response.status === 200) {
256
260
  response.success = true;
257
261
  let commits = github_response.data;
258
- commits.forEach(function(commit){
262
+ commits.forEach(function (commit) {
259
263
  if (commit.committer && commit.committer.type && commit.committer.type.toLowerCase() === 'user' && commit.committer.login.toLowerCase() !== 'web-flow') {
260
264
  if (!contributors[commit.committer.id]) {
261
265
  response.contributor_count++;
package/hdoc-validate.js CHANGED
@@ -25,7 +25,8 @@
25
25
  for (let i = 0; i < links.length; i++) {
26
26
 
27
27
  // Validate that link is a valid URL first
28
- if (!hdoc.valid_url(links[i])) {
28
+ const valid_url = hdoc.valid_url(links[i]);
29
+ if (!valid_url) {
29
30
  // Could be a relative path, check
30
31
  isRelativePath(source_path, htmlFile, links[i]);
31
32
  } else {
@@ -36,6 +37,9 @@
36
37
  continue;
37
38
  }
38
39
 
40
+ if (valid_url.protocol === 'mailto:') {
41
+ continue;
42
+ }
39
43
  // Skip if the link is excluded in the project config
40
44
  if (exclude_links[links[i]]) {
41
45
  continue;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "hdoc-tools",
3
- "version": "0.8.45",
3
+ "version": "0.8.47",
4
4
  "description": "Hornbill HDocBook Development Support Tool",
5
5
  "main": "hdoc.js",
6
6
  "bin": {
@@ -0,0 +1,13 @@
1
+ name: HDocBuildPush
2
+ on:
3
+ # Triggers the workflow on push requests
4
+ push:
5
+ paths-ignore:
6
+ - '.github/**'
7
+
8
+ # Allows us to run this workflow manually from the Actions tab
9
+ workflow_dispatch:
10
+
11
+ jobs:
12
+ run-shared-push-validation:
13
+ uses: Hornbill-Docs/hdoc-library/.github/workflows/hdocbuild_shared_push.yml@8c43ee2385d364537ecee64b9860869efc73766c
package/ui/index.html CHANGED
@@ -280,13 +280,13 @@
280
280
  "css/theme-default/styles/components/content.css",
281
281
  "css/theme-default/styles/components/custom-block.css",
282
282
 
283
- "js/highlightJS/styles/vs2015.css"
283
+ "js/highlightjs/styles/vs2015.css"
284
284
  ], function()
285
285
  {
286
286
  //-- required js + 3rd party libs
287
287
  setTimeout(function()
288
288
  {
289
- loadJS(["js/doc.hornbill.js","js/highlightJs/highlight.pack.js","js/highlightjs-badge.js"],function()
289
+ loadJS(["js/doc.hornbill.js","js/highlightjs/highlight.pack.js","js/highlightjs-badge.js"],function()
290
290
  {
291
291
  intialiseApp();
292
292
  });