hdoc-tools 0.8.40 → 0.8.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/hdoc-build-pdf.js CHANGED
@@ -1,13 +1,13 @@
1
1
  (function () {
2
2
  'use strict';
3
3
 
4
- const cheerio = require('cheerio'),
4
+ const axios = require('axios'),
5
+ cheerio = require('cheerio'),
5
6
  dree = require('dree'),
6
7
  fs = require('fs-extra'),
7
8
  mime = require('mime-types'),
8
9
  path = require('path'),
9
10
  puppeteer = require('puppeteer'),
10
- request = require('sync-request'),
11
11
  hdoc = require(path.join(__dirname, 'hdoc-module.js'));
12
12
 
13
13
  const dree_options = {
@@ -67,15 +67,15 @@
67
67
  if (hdoc.valid_url(imgs[i])) {
68
68
  // External Link
69
69
  try {
70
- const file_response = request('GET', imgs[i]);
71
- if (file_response.statusCode === 200) {
72
- const image_buffer = file_response.getBody();
70
+ const file_response = await axios.get(imgs[i]);
71
+ if (file_response.status === 200) {
72
+ const image_buffer = file_response.data;
73
73
  const mime_type = mime.lookup(imgs[i]);
74
74
  let image_b64 = image_buffer.toString("base64");
75
75
  image_b64 = `data:${mime_type};base64,${image_b64}`;
76
76
  html_source = html_source.replace(imgs[i], image_b64);
77
77
  } else {
78
- throw `Unexpected Status ${file_response.statusCode}`;
78
+ throw `Unexpected Status ${file_response.status}`;
79
79
  }
80
80
  } catch (err) {
81
81
  console.log(`Error downloading external source [${imgs[i]}] - ${err}`);
package/hdoc-build.js CHANGED
@@ -12,7 +12,7 @@
12
12
  hdoc_build_db = require(path.join(__dirname, 'hdoc-build-db.js')),
13
13
  hdoc_build_pdf = require(path.join(__dirname, 'hdoc-build-pdf.js')),
14
14
  hdoc_index = require(path.join(__dirname, 'hdoc-db.js')),
15
- zipper = require('zip-local');
15
+ {zip} = require('zip-a-folder');
16
16
 
17
17
  const h_tags_to_search = ['h1', 'h2', 'h3'],
18
18
  doc_header_template_path = path.join(__dirname, 'templates', 'doc-header.html'),
@@ -262,7 +262,7 @@
262
262
  let md_txt = hdoc.expand_variables(fs.readFileSync(file_path.path, 'utf8'));
263
263
 
264
264
  // Pull in external includes
265
- const includes_processed = hdoc.process_includes(file_path.path, md_txt);
265
+ const includes_processed = await hdoc.process_includes(file_path.path, md_txt);
266
266
  md_txt = includes_processed.body;
267
267
  includes_found += includes_processed.found;
268
268
  includes_success += includes_processed.success;
@@ -375,7 +375,7 @@
375
375
  if (hdocbook_config.publicSource && hdocbook_config.publicSource !== '' && hdocbook_config.publicSource.includes('github.com/Hornbill-Docs')) {
376
376
 
377
377
  const github_paths = hdoc.get_github_api_path(hdocbook_config.publicSource, file_path.relativePath);
378
- const contributors = hdoc.get_github_contributors(github_paths.api_path, git_token);
378
+ const contributors = await hdoc.get_github_contributors(github_paths.api_path, git_token);
379
379
 
380
380
  if (!contributors.success) {
381
381
  console.log(`Error retrieving contributors from Github: ${contributors.error}`);
@@ -716,6 +716,28 @@
716
716
  process.exit(1);
717
717
  }
718
718
 
719
+ // Ccreate MD5 hash of content before build
720
+ dree.scan(work_path_content, md5DreeOptions, hash_callback);
721
+ let concat_hash = '|';
722
+ for (let i = 0; i < built_file_hashes.length; i++) {
723
+ concat_hash += built_file_hashes[i].path + ':' + built_file_hashes[i].hash + '|';
724
+ }
725
+ if (concat_hash === '|') {
726
+ console.log('No hash of content has been returned.');
727
+ process.exit(1);
728
+ }
729
+
730
+ // Create hash and write file
731
+ const hash = crypto.createHash("md5").update(concat_hash).digest("hex");
732
+ const checksum_path = path.join(work_path_content, 'checksum.md5');
733
+ try {
734
+ fs.writeFileSync(checksum_path, hash);
735
+ console.log('\nHash file creation success:', checksum_path);
736
+ } catch (e) {
737
+ console.log('\nError creating', checksum_path, ':', e);
738
+ process.exit(1);
739
+ }
740
+
719
741
  // Load document header templates
720
742
  try {
721
743
  doc_header_template = fs.readFileSync(doc_header_template_path, 'utf8');
@@ -768,6 +790,16 @@
768
790
  process.exit(1);
769
791
  }
770
792
 
793
+ // Add book read timing to the hdocbook.json
794
+ hdocbook_config.readingTime = Math.ceil(book_read_time + ((book_read_time / 100) * 10));
795
+ try {
796
+ fs.writeFileSync(work_hdocbook_path, JSON.stringify(hdocbook_config, null, 2));
797
+ console.log('\nhdocbook.json update success:', work_hdocbook_path);
798
+ } catch (e) {
799
+ console.log('\nError creating', work_hdocbook_path, ':', e);
800
+ process.exit(1);
801
+ }
802
+
771
803
  // Build the index
772
804
  // Create the DB and tables
773
805
  let db = hdoc_build_db.create_db(work_path, doc_id);
@@ -782,41 +814,9 @@
782
814
  process.exit(1);
783
815
  }
784
816
 
785
- // Now create MD5 hash of built content
786
- dree.scan(work_path_content, md5DreeOptions, hash_callback);
787
- let concat_hash = '|';
788
- for (let i = 0; i < built_file_hashes.length; i++) {
789
- concat_hash += built_file_hashes[i].path + ':' + built_file_hashes[i].hash + '|';
790
- }
791
- if (concat_hash === '|') {
792
- console.log('No hash of content has been returned.');
793
- process.exit(1);
794
- }
795
-
796
- const hash = crypto.createHash("md5").update(concat_hash).digest("hex");
797
- const checksum_path = path.join(work_path_content, 'checksum.md5');
798
- try {
799
- fs.writeFileSync(checksum_path, hash);
800
- console.log('\nHash file creation success:', checksum_path);
801
- } catch (e) {
802
- console.log('\nError creating', checksum_path, ':', e);
803
- process.exit(1);
804
- }
805
-
806
-
807
- // Add book read timing to the hdocbook.json
808
- hdocbook_config.readingTime = Math.ceil(book_read_time + ((book_read_time / 100) * 10));
809
- try {
810
- fs.writeFileSync(work_hdocbook_path, JSON.stringify(hdocbook_config, null, 2));
811
- console.log('\nhdocbook.json update success:', work_hdocbook_path);
812
- } catch (e) {
813
- console.log('\nError creating', work_hdocbook_path, ':', e);
814
- process.exit(1);
815
- }
816
-
817
817
  try {
818
818
  const zip_path = path.join(work_path, doc_id + '.zip');
819
- zipper.sync.zip(work_path_content).compress().save(zip_path);
819
+ await zip(work_path_content, zip_path);
820
820
  console.log(`\nZIP Creation Success: ${zip_path}\n`);
821
821
  console.log(' Build Started:', build_start_dt);
822
822
  console.log(`Build Completed: ${new Date().toLocaleString()}\n`);
package/hdoc-module.js CHANGED
@@ -1,9 +1,9 @@
1
1
  (function () {
2
2
  'use strict';
3
3
 
4
- const cheerio = require('cheerio'),
4
+ const axios = require('axios'),
5
+ cheerio = require('cheerio'),
5
6
  html2text = require('html-to-text'),
6
- request = require('sync-request'),
7
7
  wordsCount = require('words-count').default;
8
8
 
9
9
  let includesCache = {};
@@ -81,7 +81,7 @@
81
81
  };
82
82
 
83
83
 
84
- exports.process_includes = function (file_path, body) {
84
+ exports.process_includes = async function (file_path, body) {
85
85
  let response = {
86
86
  body: '',
87
87
  found: 0,
@@ -127,11 +127,11 @@
127
127
 
128
128
  let file_content;
129
129
  try {
130
- const file_response = request('GET', link);
131
- if (file_response.statusCode === 200) {
132
- file_content = file_response.getBody('UTF8');
130
+ const file_response = await axios.get(link);
131
+ if (file_response.status === 200) {
132
+ file_content = file_response.data;
133
133
  } else {
134
- throw `Unexpected Status ${file_response.statusCode}`;
134
+ throw `Unexpected Status ${file_response.status}`;
135
135
  }
136
136
  } catch (e) {
137
137
  response.failed++;
@@ -222,7 +222,7 @@
222
222
  return github_paths;
223
223
  };
224
224
 
225
- exports.get_github_contributors = function(github_url, github_api_token) {
225
+ exports.get_github_contributors = async function(github_url, github_api_token) {
226
226
  let response = {
227
227
  success: false,
228
228
  error: '',
@@ -244,12 +244,17 @@
244
244
  if (github_api_token !== '') {
245
245
  request_options.headers.authorization = `Bearer ${github_api_token}`;
246
246
  }
247
- const github_response = request('GET', github_url, request_options);
247
+ let github_response;
248
+ try {
249
+ github_response = await axios.get(github_url, request_options);
250
+ } catch (err) {
251
+ response.error = err;
252
+ return response;
253
+ }
248
254
 
249
- if (github_response.statusCode === 200) {
255
+ if (github_response.status === 200) {
250
256
  response.success = true;
251
- let commits = github_response.getBody('UTF8');
252
- commits = JSON.parse(commits);
257
+ let commits = github_response.data;
253
258
  commits.forEach(function(commit){
254
259
  if (commit.committer && commit.committer.type && commit.committer.type.toLowerCase() === 'user' && commit.committer.login.toLowerCase() !== 'web-flow') {
255
260
  if (!contributors[commit.committer.id]) {
@@ -276,7 +281,7 @@
276
281
  }
277
282
  }
278
283
  } else {
279
- response.error = `Unexpected Status ${github_response.statusCode}. ${JSON.parse(github_response.body.toString('utf8')).message}`;
284
+ response.error = `Unexpected Status: ${github_response.status}.`;
280
285
  }
281
286
  return response;
282
287
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "hdoc-tools",
3
- "version": "0.8.40",
3
+ "version": "0.8.41",
4
4
  "description": "Hornbill HDocBook Development Support Tool",
5
5
  "main": "hdoc.js",
6
6
  "bin": {
@@ -49,8 +49,7 @@
49
49
  "prompt": "^1.3.0",
50
50
  "puppeteer": "^19.6.0",
51
51
  "stream": "0.0.2",
52
- "sync-request": "^6.1.0",
53
52
  "words-count": "^2.0.2",
54
- "zip-local": "^0.3.5"
53
+ "zip-a-folder": "^1.1.5"
55
54
  }
56
55
  }