hdoc-tools 0.8.10 → 0.8.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Hornbill Docs
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/hdoc-build.js CHANGED
@@ -3,7 +3,9 @@
3
3
 
4
4
  const {
5
5
  createHash
6
- } = require('crypto'),
6
+ } = require('crypto'), {
7
+ lutimesSync
8
+ } = require('fs'),
7
9
  dree = require('dree'),
8
10
  fs = require('fs-extra'),
9
11
  mdfm = require('markdown-it-front-matter'),
@@ -33,13 +35,13 @@
33
35
  includes_found = 0,
34
36
  includes_success = 0,
35
37
  includes_failed = 0,
38
+ book_read_time = 0,
36
39
  hdocbook_project,
37
40
  docId = '',
38
41
  md_files = [],
39
42
  static_html_files = [],
40
43
  index_records = [],
41
44
  work_path_content = '',
42
- built_relative_paths = [],
43
45
  built_file_hashes = [];
44
46
 
45
47
  const transform_static_html = function (file_path) {
@@ -50,16 +52,20 @@
50
52
 
51
53
  let html_txt_updated = false;
52
54
 
55
+ let fm_headers = [];
56
+ let existing_fm_headers = false;
57
+
53
58
  // Check if we have a frontmatter comment
54
59
  const fm_header = hdoc.getHTMLFrontmatterHeader(html_txt);
55
60
  if (Object.keys(fm_header.fm_properties).length > 0) {
61
+ existing_fm_headers = true;
62
+
56
63
  // We have some frontmatter headers, check if title is one of them
57
64
  let fm_title_found = false;
58
65
  if (fm_header.fm_properties && fm_header.fm_properties.title !== undefined) {
59
66
  // We have a title - but does the title have a value
60
67
  if (fm_header.fm_properties.title === '') {
61
- // No value - remove title from the properties map
62
- // so we don't end up with 2 title properties, one empty and one with a value
68
+ // No value - remove title from the properties map so we don't end up with 2 title properties, one empty and one with a value
63
69
  delete fm_header.fm_properties.title;
64
70
  } else {
65
71
  // We have a value for the title property
@@ -67,6 +73,22 @@
67
73
  }
68
74
  }
69
75
 
76
+ // Is reading-time in the fm headers?
77
+ if (fm_header.fm_properties['reading-time'] === undefined) {
78
+ const read_time_mins = hdoc.get_html_read_time(html_txt);
79
+ book_read_time += read_time_mins;
80
+ fm_header.fm_properties['reading-time'] = read_time_mins;
81
+ }
82
+
83
+ for (const key in fm_header.fm_properties) {
84
+ if (fm_header.fm_properties.hasOwnProperty(key)) {
85
+ fm_headers.push({
86
+ id: key,
87
+ value: fm_header.fm_properties[key]
88
+ });
89
+ }
90
+ }
91
+
70
92
  if (!fm_title_found) {
71
93
  // No frontmatter title found in properties
72
94
  // Go get title from h tags in html
@@ -74,15 +96,10 @@
74
96
 
75
97
  if (html_heading && html_heading[0] && html_heading[0].children && html_heading[0].children[0] && html_heading[0].children[0].data) {
76
98
  // We've found a heading tag, add that as a title to the existing frontmatter properties
77
- let frontmatter_header = `[[FRONTMATTER\ntitle: ${html_heading[0].children[0].data}`;
78
- for (const key in fm_header.fm_properties) {
79
- if (fm_header.fm_properties.hasOwnProperty(key)) {
80
- frontmatter_header += `\n${key}: ${fm_header.fm_properties[key]}`;
81
- }
82
- }
83
- frontmatter_header += '\n]]';
84
- html_txt = html_txt.replace(fm_header.fm_header, frontmatter_header);
85
- html_txt_updated = true;
99
+ fm_headers.push({
100
+ id: 'title',
101
+ value: html_heading[0].children[0].data
102
+ });
86
103
  } else {
87
104
  // No header tag, no frontmatter title, output a warning
88
105
  console.log(`No frontmatter title property, or ${h_tags_to_search.join(', ')} tags detected in ${file_path.path}`);
@@ -91,16 +108,44 @@
91
108
  } else {
92
109
  // We have no frontmatter headers, get and build one from the html headings
93
110
  const html_heading = hdoc.getFirstHTMLHeading(html_txt, h_tags_to_search);
111
+
112
+ // Add the title
94
113
  if (html_heading && html_heading[0] && html_heading[0].children && html_heading[0].children[0] && html_heading[0].children[0].data) {
95
114
  // We've found a heading tag, add that as a title to the frontmatter content
96
- const frontmatter_header = `<!--[[FRONTMATTER\r\ntitle: ${html_heading[0].children[0].data}\r\n]]-->\r\n`;
97
- html_txt = frontmatter_header + html_txt;
98
- html_txt_updated = true;
115
+ fm_headers.push({
116
+ id: 'title',
117
+ value: html_heading[0].children[0].data
118
+ });
99
119
  } else {
100
120
  // No header tag, no frontmatter title, output a warning
101
121
  console.log(`No frontmatter title property, or ${h_tags_to_search.join(', ')} tags detected in ${file_path.path}`);
102
122
  }
123
+
124
+ // Add the reading time
125
+ const read_time_mins = hdoc.get_html_read_time(html_txt);
126
+ book_read_time += read_time_mins;
127
+ fm_headers.push({
128
+ id: 'reading-time',
129
+ value: read_time_mins
130
+ });
131
+ }
132
+
133
+ if (fm_headers.length > 0) {
134
+ let fm_headers_content = '[[FRONTMATTER\n';
135
+ for (let i = 0; i < fm_headers.length; i++) {
136
+ fm_headers_content += `${fm_headers[i].id}: ${fm_headers[i].value}\n`;
137
+ }
138
+ fm_headers_content += ']]';
139
+
140
+ if (existing_fm_headers) {
141
+ html_txt = html_txt.replace(fm_header.fm_header, fm_headers_content);
142
+ html_txt_updated = true;
143
+ } else {
144
+ html_txt = `<!--${fm_headers_content}-->\n${html_txt}`;
145
+ html_txt_updated = true;
146
+ }
103
147
  }
148
+
104
149
  index_records.push({
105
150
  relative_path: file_path.relativePath,
106
151
  index_html: hdoc_index.transform_html_for_index(html_txt)
@@ -108,7 +153,7 @@
108
153
  if (html_txt_updated) {
109
154
  // Save HTML into HTML file
110
155
  fs.writeFile(file_path.path, html_txt, function writeJSON(err) {
111
- if (err) return console.log('Error writing:', target_file, '\r\n', err);
156
+ if (err) return console.log('Error writing:', target_file, '\n', err);
112
157
  });
113
158
  }
114
159
  }
@@ -155,40 +200,75 @@
155
200
  // Render markdown into HTML
156
201
  let html_txt = md.render(md_txt.toString());
157
202
 
158
- // Does frontmatter tag contain a title property
159
- let fm_contains_title = false;
203
+ // Prepare frontmatter headers
204
+ let fm_headers = [];
160
205
  let fm_content = frontmatter_content.split(/\r?\n/);
206
+
207
+ let fm_contains_title = false,
208
+ fm_contains_reading_time = false;
209
+
161
210
  if (fm_content.length >= 0) {
162
211
  fm_content.forEach(function (fm_prop) {
163
212
  const fm_property = fm_prop.split(':');
164
- if (fm_property[0] && fm_property[0] === 'title' && fm_property[1] && fm_property[1].length > 0) fm_contains_title = true;
213
+ if (fm_property[0] && fm_property[0].trim().length > 0 && fm_property[1] && fm_property[1].trim().length > 0) {
214
+ fm_headers.push({
215
+ id: fm_property[0].trim(),
216
+ value: fm_property[1].trim()
217
+ });
218
+
219
+ if (fm_property[0].trim() === 'title') {
220
+ fm_contains_title = true;
221
+ }
222
+ if (fm_property[0].trim() === 'reading-time') {
223
+ book_read_time += parseInt(fm_property[1].trim(), 10);
224
+ fm_contains_reading_time = true;
225
+ }
226
+ }
165
227
  });
166
228
  }
167
229
 
230
+ // Does frontmatter tag contain a title property
168
231
  if (!fm_contains_title) {
169
232
  // Frontmatter tags don't contain a title property - go pull the first one from the html heading tags
170
233
  const html_heading = hdoc.getFirstHTMLHeading(html_txt, h_tags_to_search);
171
234
 
172
235
  if (html_heading && html_heading[0] && html_heading[0].children && html_heading[0].children[0] && html_heading[0].children[0].data) {
173
236
  // We've found a heading tag, add that as a title to the frontmatter content
174
- if (frontmatter_content.length > 0) frontmatter_content += '\r\n';
175
- frontmatter_content += `title: ${html_heading[0].children[0].data}`;
237
+ fm_headers.push({
238
+ id: 'title',
239
+ value: html_heading[0].children[0].data.trim()
240
+ });
176
241
  } else {
177
242
  // No header tag, no frontmatter title, output a warning
178
243
  console.log(`No frontmatter title property, or h1, h2 or h3 header tags detected in ${file_path}`);
179
244
  }
180
245
  }
181
246
 
247
+ // Does frontmatter tag contain a reading-time property
248
+ if (!fm_contains_reading_time) {
249
+ const read_time_mins = hdoc.get_html_read_time(html_txt);
250
+ book_read_time += read_time_mins;
251
+ fm_headers.push({
252
+ id: 'reading-time',
253
+ value: read_time_mins
254
+ });
255
+ }
256
+
182
257
  // Add frontmatter tags as comment to front of HTML
183
- if (frontmatter_content.length) {
184
- html_txt = "<!--[[FRONTMATTER\r\n" + frontmatter_content + "\r\n]]-->\r\n" + html_txt;
258
+ if (fm_headers.length > 0) {
259
+ let fm_header = '<!--[[FRONTMATTER\n';
260
+ for (let i = 0; i < fm_headers.length; i++) {
261
+ fm_header += `${fm_headers[i].id}: ${fm_headers[i].value}\n`;
262
+ }
263
+ fm_header += ']]-->';
264
+ html_txt = `${fm_header}\n${html_txt}`;
185
265
  }
186
266
 
187
267
  // Save HTML into HTML file
188
268
  const target_file = file_path.path.replace(path.extname(file_path.path), '.html');
189
269
  const relative_path = file_path.relativePath.replace(path.extname(file_path.path), '.html');
190
270
  fs.writeFile(target_file, html_txt, function writeJSON(err) {
191
- if (err) return console.log('Error writing:', target_file, '\r\n', err);
271
+ if (err) return console.log('Error writing:', target_file, '\n', err);
192
272
  });
193
273
 
194
274
  const index_details = hdoc_index.transform_html_for_index(html_txt);
@@ -218,12 +298,8 @@
218
298
  md_files.push(element);
219
299
  } else {
220
300
  // File is html, see if there's a matching md file and if there is then ignore the html
221
- let html_path = element.path.replace(path.extname(element.path), '.html');
222
- if (fs.existsSync(html_path)) {
223
- return;
224
- }
225
- html_path = element.path.replace(path.extname(element.path), '.htm');
226
- if (fs.existsSync(html_path)) {
301
+ const md_path = element.path.replace(path.extname(element.path), '.md');
302
+ if (fs.existsSync(md_path)) {
227
303
  return;
228
304
  }
229
305
  static_html_files.push(element);
@@ -265,8 +341,8 @@
265
341
  // within the book.
266
342
  // * Package everything up into a ZIP file, ready for the build controller to package and publish
267
343
 
268
- console.log('Hornbill HDocBook Build', '\r\n');
269
- console.log(' Document Path:', source_path, '\r\n');
344
+ console.log('Hornbill HDocBook Build', '\n');
345
+ console.log(' Document Path:', source_path, '\n');
270
346
 
271
347
  // Load the hdocbook-project.json file to get the docId
272
348
  // use the docId to get the book config
@@ -277,11 +353,13 @@
277
353
 
278
354
  const book_path = path.join(source_path, docId),
279
355
  hdocbook_path = path.join(book_path, 'hdocbook.json'),
280
- hdocbook_config = require(hdocbook_path);
356
+ hdocbook_config = require(hdocbook_path),
357
+ work_path = path.join(source_path, '_work'),
358
+ work_hdocbook_path = path.join(work_path, docId, 'hdocbook.json');
281
359
 
282
- console.log(`Building: ${docId} v${hdocbook_config.version}...\r\n`);
360
+ console.log(`Building: ${docId} v${hdocbook_config.version}...\n`);
283
361
 
284
- const work_path = path.join(source_path, '_work');
362
+
285
363
  work_path_content = path.join(work_path, docId);
286
364
  // Make _work folder to copy everything into
287
365
  if (fs.existsSync(work_path)) {
@@ -296,7 +374,7 @@
296
374
  try {
297
375
  fs.copySync(path.join(source_path, docId), work_path_content);
298
376
  } catch (e) {
299
- console.error('Error copying from source_path:\r\n', e);
377
+ console.error('Error copying from source_path:\n', e);
300
378
  process.exit(1);
301
379
  }
302
380
 
@@ -316,12 +394,12 @@
316
394
 
317
395
  console.log(` MD files found: ${conversion_attempted}`);
318
396
  console.log(`Successfully converted to HTML: ${conversion_success}`);
319
- console.log(` Failed to convert: ${conversion_failed}\r\n`);
397
+ console.log(` Failed to convert: ${conversion_failed}\n`);
320
398
  console.log(` Includes Found: ${includes_found}`);
321
399
  console.log(` Includes Success: ${includes_success}`);
322
- console.log(` Includes Failed: ${includes_failed}\r\n`);
400
+ console.log(` Includes Failed: ${includes_failed}\n`);
323
401
 
324
- console.log(` Static HTML Files Found: ${static_html_files.length}\r\n`);
402
+ console.log(` Static HTML Files Found: ${static_html_files.length}\n`);
325
403
 
326
404
  // Validate content
327
405
  const validation_success = validate.run(work_path, docId, verbose);
@@ -394,6 +472,16 @@
394
472
  }
395
473
 
396
474
 
475
+ // Add book read timing to the hdocbook.json
476
+ hdocbook_config.readingTime = Math.ceil(book_read_time + ((book_read_time / 100) * 10));
477
+ try {
478
+ fs.writeFileSync(work_hdocbook_path, JSON.stringify(hdocbook_config, null, 2));
479
+ console.log('\nhdocbook.json update success:', work_hdocbook_path);
480
+ } catch (e) {
481
+ console.log('\nError creating', work_hdocbook_path, ':', e);
482
+ process.exit(1);
483
+ }
484
+
397
485
  try {
398
486
  const zip_path = path.join(work_path, docId + '.zip');
399
487
  zipper.sync.zip(work_path_content).compress().save(zip_path);
package/hdoc-init.js CHANGED
@@ -89,7 +89,7 @@
89
89
  hdocbookFile.version = docProps.version;
90
90
  hdocbookFile.navigation.items[0].items = [{
91
91
  "text": "Welcome",
92
- "link": path.join(docProps.id, 'index')
92
+ "link": docProps.id + '/index'
93
93
  }];
94
94
  fs.writeFile(hdocBookFilePath, JSON.stringify(hdocbookFile, null, 2), function writeJSON(err) {
95
95
  if (err) return console.log('Error updating:', hdocBookFilePath, '\r\n', err);
package/hdoc-module.js CHANGED
@@ -2,7 +2,9 @@
2
2
  'use strict';
3
3
 
4
4
  const cheerio = require('cheerio'),
5
- request = require('sync-request');
5
+ request = require('sync-request'),
6
+ html2text = require('html-to-text'),
7
+ wordsCount = require('words-count').default;
6
8
 
7
9
  let includesCache = {};
8
10
 
@@ -196,5 +198,19 @@
196
198
  if (str.length <= n) { return str; }
197
199
  const subString = str.slice(0, n-1);
198
200
  return (useWordBoundary ? subString.slice(0, subString.lastIndexOf(" ")) : subString) + '…';
199
- };
201
+ };
202
+
203
+ exports.get_html_read_time = function(html) {
204
+ // Get word count
205
+ const text = html2text.convert(html, {
206
+ wordwrap: null
207
+ });
208
+ const word_count = wordsCount(text);
209
+ if (word_count === 0) return 0;
210
+
211
+ // Calculate the read time - divide the word count by 200
212
+ let read_time = Math.round(word_count / 200);
213
+ if (read_time === 0) read_time = 1;
214
+ return read_time;
215
+ };
200
216
  })();
package/hdoc-stats.js CHANGED
@@ -60,7 +60,7 @@
60
60
  symbolicLinks: false
61
61
  };
62
62
 
63
- exports.run = function(ui_path, source_path, verbose = false) {
63
+ exports.run = function(ui_path, source_path, md, verbose = false) {
64
64
 
65
65
  // GERRY: The stats here are needed to support content development. The idea is to count all of the ]
66
66
  // words in a HDocBook so we know the size of the book, this helps with 3rd party involvement where
@@ -76,7 +76,7 @@
76
76
  // * MD files, and word count those
77
77
 
78
78
 
79
- console.log('Hornbill HDocBook Stats : verbose=' + verbose, '\r\n');
79
+ console.log('Hornbill HDocBook Stats', '\r\n');
80
80
 
81
81
  const project_json_path = path.join(source_path, 'hdocbook-project.json');
82
82
 
package/hdoc.js CHANGED
@@ -1,4 +1,4 @@
1
- #!/usr/bin/env node
1
+ #!/usr/bin/env node
2
2
 
3
3
  (function () {
4
4
  'use strict';
@@ -58,8 +58,6 @@
58
58
 
59
59
  }
60
60
 
61
- console.log("VERBOSE: ", verbose);
62
-
63
61
  console.log('Hornbill HDocBook Tools v' + getHdocPackageVersion(packageFile), '\r\n');
64
62
  console.log(' Server Path:', __dirname);
65
63
  console.log(' Document Path:', source_path, '\r\n');
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "hdoc-tools",
3
- "version": "0.8.10",
3
+ "version": "0.8.12",
4
4
  "description": "Hornbill HDocBook Development Support Tool",
5
5
  "main": "hdoc.js",
6
6
  "bin": {
@@ -0,0 +1,3 @@
1
+ You should store any files and resources that used in the creation of this hdocbook, but do not form part of the published content.
2
+
3
+ These files are included in the build and final content output but we should keep in the source code repo for future revisions. For example, if you were to create an image used in the content, say, in Photoshop, you will create the image and put that into the appropriate content location. However, you may also want to put the Photoshop source file into this **resources** folder in order to keep for the future, should anyone need to modify the image, we have the original source file.