hdoc-tools 0.7.19 → 0.7.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/hdoc-build.js +40 -26
- package/hdoc-db.js +158 -0
- package/hdoc-module.js +146 -0
- package/hdoc-serve.js +38 -133
- package/hdoc-validate.js +5 -11
- package/hdoc.js +9 -4
- package/package.json +5 -1
- package/ui/js/doc.hornbill.js +3 -2
- package/LICENSE +0 -21
package/hdoc-build.js
CHANGED
|
@@ -1,41 +1,53 @@
|
|
|
1
1
|
(function () {
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
-
const
|
|
4
|
+
const dree = require('dree'),
|
|
5
|
+
fs = require('fs-extra'),
|
|
5
6
|
path = require('path'),
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
URL = require("url").URL,
|
|
8
|
+
validate = require(path.join(__dirname, 'hdoc-validate.js')),
|
|
9
|
+
hdoc = require(path.join(__dirname, 'hdoc-module.js')),
|
|
10
|
+
zipper = require('zip-local');
|
|
9
11
|
|
|
10
12
|
let conversion_attempted = 0,
|
|
11
13
|
conversion_success = 0,
|
|
12
14
|
conversion_failed = 0,
|
|
15
|
+
includes_found = 0,
|
|
16
|
+
includes_success = 0,
|
|
17
|
+
includes_failed = 0,
|
|
13
18
|
docId = '',
|
|
14
19
|
md_files = [];
|
|
15
20
|
|
|
16
|
-
function
|
|
17
|
-
text = text.replaceAll('{{BUILD_NUMBER}}', '0');
|
|
18
|
-
|
|
19
|
-
let build_date = new Date().toISOString();
|
|
20
|
-
build_date = build_date.replace('T', ' ');
|
|
21
|
-
build_date = build_date.substring(0, 19);
|
|
22
|
-
|
|
23
|
-
text = text.replaceAll('{{BUILD_DATE}}', build_date);
|
|
24
|
-
return text;
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
function transform_markdown_and_save_html(file_path, md) {
|
|
21
|
+
const transform_markdown_and_save_html = function (file_path, md) {
|
|
28
22
|
conversion_attempted++;
|
|
29
23
|
if (fs.existsSync(file_path)) {
|
|
30
24
|
// Load markdown file
|
|
31
|
-
let md_txt = expand_variables(fs.readFileSync(file_path, 'utf8'));
|
|
25
|
+
let md_txt = hdoc.expand_variables(fs.readFileSync(file_path, 'utf8'));
|
|
26
|
+
|
|
27
|
+
// Pull in external includes
|
|
28
|
+
const includes_processed = hdoc.process_includes(file_path, md_txt);
|
|
29
|
+
md_txt = includes_processed.body;
|
|
30
|
+
includes_found += includes_processed.found;
|
|
31
|
+
includes_success += includes_processed.success;
|
|
32
|
+
includes_failed += includes_processed.failed;
|
|
33
|
+
if (includes_processed.errors.length > 0) {
|
|
34
|
+
for (let i = 0; i < includes_processed.errors.length; i++) {
|
|
35
|
+
console.error(includes_processed.errors[i]);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
32
38
|
|
|
33
39
|
// Render markdown into HTML
|
|
40
|
+
let frontmatter_content = '';
|
|
34
41
|
var html_txt = md.render(md_txt.toString());
|
|
35
42
|
|
|
43
|
+
|
|
44
|
+
if (frontmatter_content.length) {
|
|
45
|
+
html_txt = "<!--[[FRONTMATTER\r\n" + frontmatter_content + "]]-->\r\n" + html_txt;
|
|
46
|
+
}
|
|
47
|
+
|
|
36
48
|
// Save HTML into HTML file
|
|
37
49
|
const target_file = file_path.replace(path.extname(file_path), '.html');
|
|
38
|
-
fs.
|
|
50
|
+
fs.writeFile(target_file, html_txt, function writeJSON(err) {
|
|
39
51
|
if (err) return console.log('Error writing:', target_file, '\r\n', err);
|
|
40
52
|
});
|
|
41
53
|
conversion_success++;
|
|
@@ -44,7 +56,7 @@
|
|
|
44
56
|
conversion_failed++;
|
|
45
57
|
console.error('MD file does not exist:', file_path);
|
|
46
58
|
return false;
|
|
47
|
-
}
|
|
59
|
+
};
|
|
48
60
|
|
|
49
61
|
// File callbacks for scans
|
|
50
62
|
const fileCallback = function (element) {
|
|
@@ -79,14 +91,14 @@
|
|
|
79
91
|
|
|
80
92
|
// Load the hdocbook-project.json file to get the docId
|
|
81
93
|
// use the docId to get the book config
|
|
82
|
-
const
|
|
83
|
-
|
|
94
|
+
const hdocbook_project_config_path = path.join(source_path, 'hdocbook-project.json'),
|
|
95
|
+
hdocbook_project = require(hdocbook_project_config_path);
|
|
84
96
|
|
|
85
97
|
docId = hdocbook_project.docId;
|
|
86
98
|
|
|
87
|
-
const
|
|
88
|
-
|
|
89
|
-
|
|
99
|
+
const book_path = path.join(source_path, docId),
|
|
100
|
+
hdocbook_path = path.join(book_path, 'hdocbook.json'),
|
|
101
|
+
hdocbook_config = require(hdocbook_path);
|
|
90
102
|
|
|
91
103
|
console.log(`Building: ${docId} v${hdocbook_config.version}...\r\n`);
|
|
92
104
|
|
|
@@ -118,9 +130,11 @@
|
|
|
118
130
|
});
|
|
119
131
|
console.log(` MD files found: ${conversion_attempted}`);
|
|
120
132
|
console.log(`Successfully converted to HTML: ${conversion_success}`);
|
|
121
|
-
console.log(` Failed to convert: ${conversion_failed}`);
|
|
133
|
+
console.log(` Failed to convert: ${conversion_failed}\r\n`);
|
|
134
|
+
console.log(` Includes Found: ${includes_found}`);
|
|
135
|
+
console.log(` Includes Success: ${includes_success}`);
|
|
136
|
+
console.log(` Includes Failed: ${includes_failed}\r\n`);
|
|
122
137
|
|
|
123
|
-
console.log(`\r\nValidating paths in generated HTML files`);
|
|
124
138
|
const validation_success = validate.run(work_path, docId, verbose);
|
|
125
139
|
if (!validation_success) {
|
|
126
140
|
process.exit(1);
|
package/hdoc-db.js
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
const { nextTick } = require('process');
|
|
2
|
+
|
|
3
|
+
(function() {
|
|
4
|
+
'use strict';
|
|
5
|
+
|
|
6
|
+
const dree = require('dree'),
|
|
7
|
+
fs = require('fs-extra'),
|
|
8
|
+
html2text = require('html-to-text'),
|
|
9
|
+
sqlite3 = require('sqlite3'),
|
|
10
|
+
path = require('path');
|
|
11
|
+
|
|
12
|
+
const table_name = 'hbdocs';
|
|
13
|
+
|
|
14
|
+
let db_name = 'hbdocs.db',
|
|
15
|
+
hdocbook_meta = null,
|
|
16
|
+
md_files = [];
|
|
17
|
+
|
|
18
|
+
// File callbacks for scan
|
|
19
|
+
const file_callback = function (element) {
|
|
20
|
+
if (path.extname(element.path) === '.md' && !element.path.includes('_work')) {
|
|
21
|
+
md_files.push(element);
|
|
22
|
+
} else if (element.name === 'hdocbook.json') {
|
|
23
|
+
// hdocbook meta data, read and store
|
|
24
|
+
hdocbook_meta = require(element.path);
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
const create_table = function(db) {
|
|
29
|
+
db.exec(`
|
|
30
|
+
create table ${table_name} (
|
|
31
|
+
relative_url text primary key not null,
|
|
32
|
+
book_id text not null,
|
|
33
|
+
title text not null,
|
|
34
|
+
content text not null,
|
|
35
|
+
tags text null,
|
|
36
|
+
audience text not null
|
|
37
|
+
);
|
|
38
|
+
`, (err) => {
|
|
39
|
+
if (err !== null) {
|
|
40
|
+
console.error(`Error creating table [${table_name}]: ${err}`);
|
|
41
|
+
return false;
|
|
42
|
+
} else {
|
|
43
|
+
console.log(` Table created successfully: ${table_name}`);
|
|
44
|
+
return true;
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
return true;
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
const build_db = function(db, md, book_def, md_files) {
|
|
51
|
+
for (let i = 0; i < md_files.length; i++) {
|
|
52
|
+
db.exec(`
|
|
53
|
+
insert into ${table_name}
|
|
54
|
+
(book_id, title, relative_url, content, tags, audience)
|
|
55
|
+
values
|
|
56
|
+
(
|
|
57
|
+
'${book_def.docId}',
|
|
58
|
+
'${book_def.title}',
|
|
59
|
+
'${md_files[i].relativePath}',
|
|
60
|
+
'Some file content',
|
|
61
|
+
'${book_def.tags.join(';')}',
|
|
62
|
+
'${book_def.audience.join(';')}'
|
|
63
|
+
);
|
|
64
|
+
`, (err) => {
|
|
65
|
+
if (err !== null) {
|
|
66
|
+
console.error(`Error inserting record [${md_files[i].relativePath}]: ${err}`);
|
|
67
|
+
return false;
|
|
68
|
+
} else {
|
|
69
|
+
console.log(` Record inserted successfully: ${md_files[i].relativePath}`);
|
|
70
|
+
return true;
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
const transform_markdown = function (file_path, md) {
|
|
77
|
+
if (fs.existsSync(file_path)) {
|
|
78
|
+
// Load markdown file
|
|
79
|
+
let md_txt = hdoc.expand_variables(fs.readFileSync(file_path, 'utf8'));
|
|
80
|
+
|
|
81
|
+
// Pull in external includes
|
|
82
|
+
const includes_processed = hdoc.process_includes(file_path, md_txt);
|
|
83
|
+
md_txt = includes_processed.body;
|
|
84
|
+
includes_found += includes_processed.found;
|
|
85
|
+
includes_success += includes_processed.success;
|
|
86
|
+
includes_failed += includes_processed.failed;
|
|
87
|
+
if (includes_processed.errors.length > 0) {
|
|
88
|
+
for (let i = 0; i < includes_processed.errors.length; i++) {
|
|
89
|
+
console.error(includes_processed.errors[i]);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Render markdown into HTML
|
|
94
|
+
var html_txt = md.render(md_txt.toString());
|
|
95
|
+
|
|
96
|
+
// Save HTML into HTML file
|
|
97
|
+
const target_file = file_path.replace(path.extname(file_path), '.html');
|
|
98
|
+
fs.writeFile(target_file, html_txt, function writeJSON(err) {
|
|
99
|
+
if (err) return console.log('Error writing:', target_file, '\r\n', err);
|
|
100
|
+
});
|
|
101
|
+
conversion_success++;
|
|
102
|
+
return true;
|
|
103
|
+
}
|
|
104
|
+
conversion_failed++;
|
|
105
|
+
console.error('MD file does not exist:', file_path);
|
|
106
|
+
return false;
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
const dree_options = {
|
|
110
|
+
descendants: true,
|
|
111
|
+
depth: 10,
|
|
112
|
+
extensions: ['md','json'],
|
|
113
|
+
hash: false,
|
|
114
|
+
normalize: true,
|
|
115
|
+
size: false,
|
|
116
|
+
sizeInBytes: false,
|
|
117
|
+
stat: false,
|
|
118
|
+
symbolicLinks: false
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
exports.run = function(source_path, md) {
|
|
122
|
+
/*
|
|
123
|
+
STEVE - The purpose of this command is to allow content developers to do local builds of
|
|
124
|
+
the SQlite database and index file, for validation before commit and build
|
|
125
|
+
*/
|
|
126
|
+
dree.scan(source_path, dree_options, file_callback);
|
|
127
|
+
|
|
128
|
+
if (hdocbook_meta === null) {
|
|
129
|
+
console.error('hdocbook.json was not found, or has no content');
|
|
130
|
+
process.exit(1);
|
|
131
|
+
}
|
|
132
|
+
if (md_files.length === 0) {
|
|
133
|
+
console.error('No markdown files detected in', source_path);
|
|
134
|
+
process.exit(1);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
db_name = path.join(source_path, db_name);
|
|
138
|
+
if (fs.existsSync(db_name)) {
|
|
139
|
+
try {
|
|
140
|
+
fs.removeSync(db_name);
|
|
141
|
+
} catch (e) {
|
|
142
|
+
console.error(`Failed to delete existing db file: ${db_name}`);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
let db = new sqlite3.Database(db_name, (err) => {
|
|
147
|
+
if (err) {
|
|
148
|
+
console.error('Error creating database:', err);
|
|
149
|
+
process.exit(1);
|
|
150
|
+
}
|
|
151
|
+
console.log(`DB file created successfully: ${db_name}`);
|
|
152
|
+
if (create_table(db)) {
|
|
153
|
+
if (!hdocbook_meta.tags) hdocbook_meta.tags = [];
|
|
154
|
+
build_db(db, md, hdocbook_meta, md_files );
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
};
|
|
158
|
+
})();
|
package/hdoc-module.js
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
(function () {
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
const request = require('sync-request');
|
|
5
|
+
|
|
6
|
+
let includesCache = {};
|
|
7
|
+
|
|
8
|
+
exports.content_type_for_ext = function (ext) {
|
|
9
|
+
switch (ext) {
|
|
10
|
+
case '.z':
|
|
11
|
+
return 'application/x-compress';
|
|
12
|
+
case '.tgz':
|
|
13
|
+
return 'application/x-compressed';
|
|
14
|
+
case '.gz':
|
|
15
|
+
return 'application/x-gzip';
|
|
16
|
+
case '.zip':
|
|
17
|
+
return 'application/x-zip-compressed';
|
|
18
|
+
case '.xml':
|
|
19
|
+
return 'application/xml';
|
|
20
|
+
case '.bmp':
|
|
21
|
+
return 'image/bmp';
|
|
22
|
+
case '.gif':
|
|
23
|
+
return 'image/gif';
|
|
24
|
+
case '.jpg':
|
|
25
|
+
return 'image/jpeg';
|
|
26
|
+
case '.png':
|
|
27
|
+
return 'image/png';
|
|
28
|
+
case '.tiff':
|
|
29
|
+
return 'image/tiff';
|
|
30
|
+
case '.ico':
|
|
31
|
+
return 'image/x-icon';
|
|
32
|
+
case '.png':
|
|
33
|
+
return 'image/png';
|
|
34
|
+
case '.svg':
|
|
35
|
+
return 'image/svg+xml';
|
|
36
|
+
case '.css':
|
|
37
|
+
return 'text/css';
|
|
38
|
+
case '.htm':
|
|
39
|
+
case '.html':
|
|
40
|
+
return 'text/html';
|
|
41
|
+
case '.txt':
|
|
42
|
+
return 'text/plain';
|
|
43
|
+
case '.md':
|
|
44
|
+
return 'text/plain';
|
|
45
|
+
case '.json':
|
|
46
|
+
return 'application/json';
|
|
47
|
+
case '.js':
|
|
48
|
+
return 'application/javascript';
|
|
49
|
+
default:
|
|
50
|
+
return 'application/octet-stream';
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
exports.valid_url = function (s) {
|
|
55
|
+
let response = {
|
|
56
|
+
valid: false,
|
|
57
|
+
urlProps: {}
|
|
58
|
+
};
|
|
59
|
+
try {
|
|
60
|
+
response.urlProps = new URL(s);
|
|
61
|
+
response.valid = true;
|
|
62
|
+
} catch (err) {
|
|
63
|
+
response.valid = false;
|
|
64
|
+
}
|
|
65
|
+
return response;
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
exports.expand_variables = function (text, docId = '') {
|
|
69
|
+
if (docId !== '') {
|
|
70
|
+
text = text.replaceAll('{{DOC_ID}}', docId);
|
|
71
|
+
}
|
|
72
|
+
text = text.replaceAll('{{BUILD_NUMBER}}', '0');
|
|
73
|
+
|
|
74
|
+
let build_date = new Date().toISOString();
|
|
75
|
+
build_date = build_date.replace('T', ' ');
|
|
76
|
+
build_date = build_date.substring(0, 19);
|
|
77
|
+
text = text.replaceAll('{{BUILD_DATE}}', build_date);
|
|
78
|
+
return text;
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
exports.process_includes = function (file_path, body) {
|
|
83
|
+
let response = {
|
|
84
|
+
body: '',
|
|
85
|
+
found: 0,
|
|
86
|
+
success: 0,
|
|
87
|
+
failed: 0,
|
|
88
|
+
errors: []
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
// Search body for INCLUDEs
|
|
92
|
+
const regexp = /\[\[INCLUDE .*]]/g;
|
|
93
|
+
const body_array = [...body.matchAll(regexp)];
|
|
94
|
+
|
|
95
|
+
for (let i = 0; i < body_array.length; i++) {
|
|
96
|
+
response.found++;
|
|
97
|
+
|
|
98
|
+
// Extract include data from array
|
|
99
|
+
const include_value = body_array[i][0];
|
|
100
|
+
|
|
101
|
+
let link;
|
|
102
|
+
try {
|
|
103
|
+
link = include_value.split(' ')[1];
|
|
104
|
+
link = link.substring(0, link.length - 2);
|
|
105
|
+
} catch (e) {
|
|
106
|
+
response.failed++;
|
|
107
|
+
response.errors.push(`Error parsing INCLUDE [${include_value}] from [${file_path}]: ${err}`);
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (includesCache[link] !== undefined) {
|
|
112
|
+
console.log(`Serving From Cache: ${link}`);
|
|
113
|
+
body = body.replace(include_value, includesCache[link]);
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Validate link in INCLUDE
|
|
118
|
+
try {
|
|
119
|
+
new URL(link);
|
|
120
|
+
} catch (err) {
|
|
121
|
+
response.failed++;
|
|
122
|
+
response.errors.push(`Error validating INCLUDE link [${link}] from [${file_path}]: ${e}`);
|
|
123
|
+
continue;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
let file_content;
|
|
127
|
+
try {
|
|
128
|
+
const file_response = request('GET', link);
|
|
129
|
+
if (file_response.statusCode === 200) {
|
|
130
|
+
file_content = file_response.getBody('UTF8');
|
|
131
|
+
} else {
|
|
132
|
+
throw `Unexpected Status ${file_response.statusCode}`;
|
|
133
|
+
}
|
|
134
|
+
} catch (e) {
|
|
135
|
+
response.failed++;
|
|
136
|
+
response.errors.push(`Error getting INCLUDE link content [${link}] from [${file_path}]: ${e}`);
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
response.success++;
|
|
140
|
+
includesCache[link] = file_content;
|
|
141
|
+
body = body.replace(include_value, file_content);
|
|
142
|
+
}
|
|
143
|
+
response.body = body;
|
|
144
|
+
return response;
|
|
145
|
+
};
|
|
146
|
+
})();
|
package/hdoc-serve.js
CHANGED
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
(function () {
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
-
const
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
4
|
+
const express = require('express'),
|
|
5
|
+
fs = require('fs'),
|
|
6
|
+
path = require('path'),
|
|
7
|
+
hdoc = require(path.join(__dirname, 'hdoc-module.js')),
|
|
8
|
+
stream = require('stream');
|
|
9
|
+
|
|
10
|
+
let port = 3000;
|
|
11
|
+
let docId;
|
|
12
|
+
let hdocbook_config;
|
|
9
13
|
|
|
10
14
|
exports.run = function (ui_path, source_path, md) {
|
|
11
15
|
|
|
@@ -20,20 +24,19 @@
|
|
|
20
24
|
port = process.argv[x];
|
|
21
25
|
}
|
|
22
26
|
}
|
|
23
|
-
}
|
|
27
|
+
}
|
|
24
28
|
|
|
25
29
|
console.log('Hornbill HDocBook Preview/Dev Server', '\r\n');
|
|
26
|
-
//console.log(' Server Path:', __dirname);
|
|
27
30
|
console.log(' UI Root Path:', ui_path);
|
|
28
31
|
console.log(' Document Path:', source_path, '\r\n');
|
|
29
32
|
console.log(' Server Port:', port);
|
|
30
33
|
|
|
31
|
-
if(fs.existsSync(path.join(source_path, 'hdocbook-project.json')) == false) {
|
|
34
|
+
if (fs.existsSync(path.join(source_path, 'hdocbook-project.json')) == false) {
|
|
32
35
|
console.log("No hdocbook-project.js file found in working folder. Unable to continue.");
|
|
33
36
|
return -1;
|
|
34
37
|
}
|
|
35
38
|
|
|
36
|
-
// Get an express server instance
|
|
39
|
+
// Get an express server instance
|
|
37
40
|
var app = express();
|
|
38
41
|
|
|
39
42
|
// In the root of the project there is a hdocbook.json file which includes
|
|
@@ -44,14 +47,13 @@
|
|
|
44
47
|
var hdocbook_project = require(hdocbook_project_config_path);
|
|
45
48
|
|
|
46
49
|
// Get the ID of the hdocbook we are serving
|
|
47
|
-
|
|
50
|
+
docId = hdocbook_project.docId;
|
|
48
51
|
|
|
49
52
|
// Get the path of the book.json file
|
|
50
53
|
const hdocbook_path = path.join(source_path, docId, 'hdocbook.json');
|
|
51
54
|
|
|
52
55
|
// Pull in the book config file
|
|
53
|
-
|
|
54
|
-
var hdocbook_mtime = fs.statSync(hdocbook_path).mtime;
|
|
56
|
+
hdocbook_config = require(hdocbook_path);
|
|
55
57
|
|
|
56
58
|
app.get('/_books/library.json', function (req, res) {
|
|
57
59
|
let library = {
|
|
@@ -64,65 +66,6 @@
|
|
|
64
66
|
res.send(JSON.stringify(library, null, 3));
|
|
65
67
|
});
|
|
66
68
|
|
|
67
|
-
function content_type_for_ext(ext) {
|
|
68
|
-
switch (ext) {
|
|
69
|
-
case '.z':
|
|
70
|
-
return 'application/x-compress';
|
|
71
|
-
case '.tgz':
|
|
72
|
-
return 'application/x-compressed';
|
|
73
|
-
case '.gz':
|
|
74
|
-
return 'application/x-gzip';
|
|
75
|
-
case '.zip':
|
|
76
|
-
return 'application/x-zip-compressed';
|
|
77
|
-
case '.xml':
|
|
78
|
-
return 'application/xml';
|
|
79
|
-
case '.bmp':
|
|
80
|
-
return 'image/bmp';
|
|
81
|
-
case '.gif':
|
|
82
|
-
return 'image/gif';
|
|
83
|
-
case '.jpg':
|
|
84
|
-
return 'image/jpeg';
|
|
85
|
-
case '.png':
|
|
86
|
-
return 'image/png';
|
|
87
|
-
case '.tiff':
|
|
88
|
-
return 'image/tiff';
|
|
89
|
-
case '.ico':
|
|
90
|
-
return 'image/x-icon';
|
|
91
|
-
case '.png':
|
|
92
|
-
return 'image/png';
|
|
93
|
-
case '.svg':
|
|
94
|
-
return 'image/svg+xml';
|
|
95
|
-
case '.css':
|
|
96
|
-
return 'text/css';
|
|
97
|
-
case '.htm':
|
|
98
|
-
case '.html':
|
|
99
|
-
return 'text/html';
|
|
100
|
-
case '.txt':
|
|
101
|
-
return 'text/plain';
|
|
102
|
-
case '.md':
|
|
103
|
-
return 'text/plain';
|
|
104
|
-
case '.json':
|
|
105
|
-
return 'application/json';
|
|
106
|
-
case '.js':
|
|
107
|
-
return 'application/javascript';
|
|
108
|
-
default:
|
|
109
|
-
return 'application/octet-stream';
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
function expand_variables(text) {
|
|
114
|
-
// For debug mode our base path is our root??
|
|
115
|
-
text = text.replaceAll('{{DOC_ID}}', docId);
|
|
116
|
-
text = text.replaceAll('{{BUILD_NUMBER}}', '0');
|
|
117
|
-
|
|
118
|
-
let build_date = new Date().toISOString();
|
|
119
|
-
build_date = build_date.replace('T', ' ');
|
|
120
|
-
build_date = build_date.substring(0, 19);
|
|
121
|
-
|
|
122
|
-
text = text.replaceAll('{{BUILD_DATE}}', build_date);
|
|
123
|
-
return text;
|
|
124
|
-
}
|
|
125
|
-
|
|
126
69
|
function transform_markdown_and_send_html(req, res, file_path) {
|
|
127
70
|
|
|
128
71
|
if (fs.existsSync(file_path)) {
|
|
@@ -130,8 +73,21 @@
|
|
|
130
73
|
// it to the caller
|
|
131
74
|
|
|
132
75
|
// Load markdown file
|
|
133
|
-
let md_txt = expand_variables(fs.readFileSync(file_path).toString());
|
|
134
|
-
|
|
76
|
+
let md_txt = hdoc.expand_variables(fs.readFileSync(file_path).toString(), docId);
|
|
77
|
+
|
|
78
|
+
// Pull in external includes
|
|
79
|
+
const includes_processed = hdoc.process_includes(file_path, md_txt);
|
|
80
|
+
md_txt = includes_processed.body;
|
|
81
|
+
if (includes_processed.errors.length > 0) {
|
|
82
|
+
console.error(`Error(s) when processing includes in ${file_path}`);
|
|
83
|
+
for (let i = 0; i < includes_processed.errors.length; i++) {
|
|
84
|
+
console.error(includes_processed.errors[i]);
|
|
85
|
+
}
|
|
86
|
+
} else {
|
|
87
|
+
if (includes_processed.found > 0) {
|
|
88
|
+
console.log(`Includes injected into document: ${includes_processed.success}`);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
135
91
|
// Render markdown into HTML
|
|
136
92
|
let frontmatter_content = '';
|
|
137
93
|
var html_txt = md.render(md_txt.toString());
|
|
@@ -155,9 +111,9 @@
|
|
|
155
111
|
}
|
|
156
112
|
|
|
157
113
|
function send_content_file(req, res, file_path) {
|
|
158
|
-
let content_txt = expand_variables(fs.readFileSync(file_path).toString());
|
|
114
|
+
let content_txt = hdoc.expand_variables(fs.readFileSync(file_path).toString(), docId);
|
|
159
115
|
|
|
160
|
-
let contentType = content_type_for_ext(path.extname(file_path));
|
|
116
|
+
let contentType = hdoc.content_type_for_ext(path.extname(file_path));
|
|
161
117
|
|
|
162
118
|
if (path.extname(file_path) == '.md') {
|
|
163
119
|
res.setHeader('Content-Disposition', 'inline');
|
|
@@ -170,7 +126,7 @@
|
|
|
170
126
|
|
|
171
127
|
function send_file(req, res, file_path) {
|
|
172
128
|
// Need to set the content type here??
|
|
173
|
-
let contentType = content_type_for_ext(path.extname(file_path));
|
|
129
|
+
let contentType = hdoc.content_type_for_ext(path.extname(file_path));
|
|
174
130
|
res.setHeader('Content-Type', contentType);
|
|
175
131
|
|
|
176
132
|
const r = fs.createReadStream(file_path);
|
|
@@ -214,64 +170,11 @@
|
|
|
214
170
|
|
|
215
171
|
app.get('/_books/*', function (req, res) {
|
|
216
172
|
|
|
217
|
-
let url = req.url;
|
|
218
|
-
|
|
219
|
-
let segs = url.split('/');
|
|
220
|
-
if (segs.length == 4 && segs[1] == '_books' && segs[3] == 'book.json') {
|
|
221
|
-
// Special case of a virtual file here, we need to check the book ID and
|
|
222
|
-
// if its our book, send the json
|
|
223
|
-
if (hdocbook_config.docId == segs[2]) {
|
|
224
|
-
res.setHeader('Content-Type', 'application/json');
|
|
225
|
-
res.send(JSON.stringify(hdocbook_config, null, 3));
|
|
226
|
-
} else {
|
|
227
|
-
// Return a 404 error here
|
|
228
|
-
res.setHeader('Content-Type', 'text/html');
|
|
229
|
-
res.status(404).send('Specified bookId ' + segs[2] + ' not found');
|
|
230
|
-
}
|
|
231
|
-
return;
|
|
232
|
-
} else if (segs.length == 3 && segs[1] == '_books' && segs[2] == 'index.json') {
|
|
233
|
-
// For development mode, we always have an index with one book in it, the one being developed
|
|
234
|
-
if (hdocbook_config) {
|
|
235
|
-
let index = {
|
|
236
|
-
books: [{
|
|
237
|
-
docId: hdocbook_config.docId,
|
|
238
|
-
title: hdocbook_config.title,
|
|
239
|
-
description: hdocbook_config.description,
|
|
240
|
-
version: hdocbook_config.version
|
|
241
|
-
}]
|
|
242
|
-
};
|
|
243
|
-
res.setHeader('Content-Type', 'application/json');
|
|
244
|
-
res.send(JSON.stringify(index, null, 3));
|
|
245
|
-
} else {
|
|
246
|
-
// Return a 404 error here
|
|
247
|
-
res.setHeader('Content-Type', 'text/html');
|
|
248
|
-
res.status(404).send('Specified bookId ' + segs[2] + ' not found');
|
|
249
|
-
}
|
|
250
|
-
return;
|
|
251
|
-
}
|
|
173
|
+
let url = req.url.replace('/_books/', '/');
|
|
252
174
|
|
|
253
|
-
url = url.replace('/_books/', '/');
|
|
254
|
-
|
|
255
175
|
console.log('URL Requested:', url);
|
|
256
176
|
|
|
257
177
|
let file_path = path.join(source_path, url);
|
|
258
|
-
let ui_file_path = path.join(ui_path, url);
|
|
259
|
-
|
|
260
|
-
// If the requested file is found in the UI folder
|
|
261
|
-
if (url == '/') {
|
|
262
|
-
if (fs.existsSync(path.join(ui_file_path, 'index.html'))) {
|
|
263
|
-
// We want the index.html, send it here
|
|
264
|
-
send_file(req, res, path.join(ui_file_path, 'index.html'));
|
|
265
|
-
return;
|
|
266
|
-
}
|
|
267
|
-
// Return a 404 error here
|
|
268
|
-
send_content_resource_404(req, res);
|
|
269
|
-
return;
|
|
270
|
-
} else if (fs.existsSync(ui_file_path)) {
|
|
271
|
-
// File is found in the UI folder, that takes priority, send the file
|
|
272
|
-
send_file(req, res, ui_file_path);
|
|
273
|
-
return;
|
|
274
|
-
}
|
|
275
178
|
|
|
276
179
|
if (path.extname(file_path) == '.html') {
|
|
277
180
|
// 1a. check for html files, and send/transform as required
|
|
@@ -287,12 +190,14 @@
|
|
|
287
190
|
return;
|
|
288
191
|
}
|
|
289
192
|
}
|
|
193
|
+
|
|
290
194
|
} else if (path.extname(file_path) == '.md') {
|
|
291
195
|
// If the markdown file exists, just send to caller as is
|
|
292
196
|
if (fs.existsSync(file_path)) {
|
|
293
197
|
send_content_file(req, res, file_path);
|
|
294
198
|
return true;
|
|
295
199
|
}
|
|
200
|
+
|
|
296
201
|
} else if (path.extname(file_path).length == 0) {
|
|
297
202
|
// 2. If we request a file, without any file extension
|
|
298
203
|
if (fs.existsSync(file_path + '.md')) {
|
|
@@ -323,7 +228,7 @@
|
|
|
323
228
|
|
|
324
229
|
// Return a 404 error here
|
|
325
230
|
send_content_resource_404(req, res);
|
|
326
|
-
});
|
|
231
|
+
});
|
|
327
232
|
|
|
328
233
|
// Catch all
|
|
329
234
|
app.get('/*', function (req, res) {
|
|
@@ -358,9 +263,9 @@
|
|
|
358
263
|
|
|
359
264
|
let _vars = ['{{DOC_ID}}', '{{BUILD_NUMBER}}', '{{BUILD_DATE}}'];
|
|
360
265
|
console.log("Server Vars:");
|
|
361
|
-
for(let x = 0; x < _vars.length; x++) {
|
|
266
|
+
for (let x = 0; x < _vars.length; x++) {
|
|
362
267
|
let name = _vars[x];
|
|
363
|
-
console.log(" ", name, " = ",
|
|
268
|
+
console.log(" ", name, " = ", hdoc.expand_variables(name, docId));
|
|
364
269
|
}
|
|
365
270
|
});
|
|
366
271
|
|
package/hdoc-validate.js
CHANGED
|
@@ -7,6 +7,7 @@ const parseLinkDestination = require('markdown-it/lib/helpers/parse_link_destina
|
|
|
7
7
|
dree = require('dree'),
|
|
8
8
|
fs = require('fs'),
|
|
9
9
|
path = require('path'),
|
|
10
|
+
hdoc = require(path.join(__dirname, 'hdoc-module.js')),
|
|
10
11
|
URL = require("url").URL;
|
|
11
12
|
|
|
12
13
|
let errors = {},
|
|
@@ -15,20 +16,12 @@ const parseLinkDestination = require('markdown-it/lib/helpers/parse_link_destina
|
|
|
15
16
|
filecount = 0,
|
|
16
17
|
htmlFiles = [];
|
|
17
18
|
|
|
18
|
-
const stringIsAValidUrl = (s) => {
|
|
19
|
-
try {
|
|
20
|
-
new URL(s);
|
|
21
|
-
return true;
|
|
22
|
-
} catch (err) {
|
|
23
|
-
return false;
|
|
24
|
-
}
|
|
25
|
-
};
|
|
26
19
|
|
|
27
20
|
const checkLinks = function (source_path, htmlFile, links) {
|
|
28
21
|
for (let i = 0; i < links.length; i++) {
|
|
29
22
|
|
|
30
23
|
// Validate that link is a valid URL first
|
|
31
|
-
if (!
|
|
24
|
+
if (!hdoc.valid_url(links[i]).valid) {
|
|
32
25
|
|
|
33
26
|
// Could be a relative path, check
|
|
34
27
|
const fileExists = doesFileExist(source_path, htmlFile, links[i]);
|
|
@@ -91,11 +84,12 @@ const parseLinkDestination = require('markdown-it/lib/helpers/parse_link_destina
|
|
|
91
84
|
return links;
|
|
92
85
|
};
|
|
93
86
|
|
|
94
|
-
|
|
95
87
|
exports.run = function (source_path, doc_id, verbose) {
|
|
96
88
|
// Get a list of HTML files in source_path
|
|
97
89
|
dree.scan(source_path, dreeOptions, fileCallback);
|
|
98
90
|
|
|
91
|
+
console.log(`Performing Validation and Building SEO Link List...`);
|
|
92
|
+
|
|
99
93
|
let listContent = '';
|
|
100
94
|
for (let i = 0; i < htmlFiles.length; i++) {
|
|
101
95
|
|
|
@@ -118,7 +112,7 @@ const parseLinkDestination = require('markdown-it/lib/helpers/parse_link_destina
|
|
|
118
112
|
}
|
|
119
113
|
try {
|
|
120
114
|
// Write list
|
|
121
|
-
const listFile = path.join(source_path, doc_id, '
|
|
115
|
+
const listFile = path.join(source_path, doc_id, 'links.txt');
|
|
122
116
|
fs.writeFileSync(listFile, listContent);
|
|
123
117
|
console.log(`\r\nLink list text file created successfully: ${listFile}`);
|
|
124
118
|
} catch (err) {
|
package/hdoc.js
CHANGED
|
@@ -54,8 +54,9 @@ const { createCipheriv } = require('crypto');
|
|
|
54
54
|
let source_path = process.cwd();
|
|
55
55
|
let ui_path = path.join(__dirname, 'ui');
|
|
56
56
|
|
|
57
|
-
let
|
|
58
|
-
|
|
57
|
+
let command = '', // Our command to run
|
|
58
|
+
verbose = false,
|
|
59
|
+
build_index = false;
|
|
59
60
|
|
|
60
61
|
// Get options from command args
|
|
61
62
|
for (let x = 0; x < process.argv.length; x++) {
|
|
@@ -83,6 +84,7 @@ const { createCipheriv } = require('crypto');
|
|
|
83
84
|
if (process.argv[x] === '-v') {
|
|
84
85
|
verbose = true;
|
|
85
86
|
}
|
|
87
|
+
|
|
86
88
|
}
|
|
87
89
|
|
|
88
90
|
console.log('Hornbill HDocBook Tools v' + getHdocPackageVersion(packageFile), '\r\n');
|
|
@@ -102,8 +104,11 @@ const { createCipheriv } = require('crypto');
|
|
|
102
104
|
const init = require(path.join(__dirname, 'hdoc-init.js'));
|
|
103
105
|
init.run(__dirname, source_path, md);
|
|
104
106
|
} else if (command == 'help') {
|
|
105
|
-
const
|
|
106
|
-
|
|
107
|
+
const help = require(path.join(__dirname, 'hdoc-help.js'));
|
|
108
|
+
help.run();
|
|
109
|
+
} else if (command == 'buildindex') {
|
|
110
|
+
const build_db = require(path.join(__dirname, 'hdoc-db.js'));
|
|
111
|
+
build_db.run(source_path, md);
|
|
107
112
|
} else {
|
|
108
113
|
console.log('Unknown command:', command, '\r\n');
|
|
109
114
|
console.log('Run hdoc help for information regarding this tool.\r\n');
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "hdoc-tools",
|
|
3
|
-
"version": "0.7.
|
|
3
|
+
"version": "0.7.21",
|
|
4
4
|
"description": "Hornbill HDocBook Development Support Tool",
|
|
5
5
|
"main": "hdoc.js",
|
|
6
6
|
"bin": {
|
|
@@ -8,9 +8,11 @@
|
|
|
8
8
|
},
|
|
9
9
|
"files": [
|
|
10
10
|
"hdoc.js",
|
|
11
|
+
"hdoc-db.js",
|
|
11
12
|
"hdoc-build.js",
|
|
12
13
|
"hdoc-help.js",
|
|
13
14
|
"hdoc-init.js",
|
|
15
|
+
"hdoc-module.js",
|
|
14
16
|
"hdoc-serve.js",
|
|
15
17
|
"hdoc-stats.js",
|
|
16
18
|
"hdoc-validate.js",
|
|
@@ -40,7 +42,9 @@
|
|
|
40
42
|
"markdown-it-front-matter": "^0.2.3",
|
|
41
43
|
"multer": "^1.4.5-lts.1",
|
|
42
44
|
"prompt": "^1.3.0",
|
|
45
|
+
"sqlite3": "^5.1.4",
|
|
43
46
|
"stream": "0.0.2",
|
|
47
|
+
"sync-request": "^6.1.0",
|
|
44
48
|
"words-count": "^2.0.2",
|
|
45
49
|
"zip-local": "^0.3.5"
|
|
46
50
|
}
|
package/ui/js/doc.hornbill.js
CHANGED
|
@@ -94,7 +94,6 @@ function listenForHrefClicks()
|
|
|
94
94
|
//-- trap all link click events - we want to handle links so can cancel and load content ourselves
|
|
95
95
|
$("A").off("click").on("click", function(ev)
|
|
96
96
|
{
|
|
97
|
-
|
|
98
97
|
let ele = this;
|
|
99
98
|
if(ele.href)
|
|
100
99
|
{
|
|
@@ -304,7 +303,7 @@ function loadContentUrl(linkRef,fromPageRefresh,fromPopState)
|
|
|
304
303
|
view.$nextTick(function()
|
|
305
304
|
{
|
|
306
305
|
//-- find any navigation links that match url and highlight
|
|
307
|
-
listenForHrefClicks();
|
|
306
|
+
// listenForHrefClicks();
|
|
308
307
|
highlightNavigationLinkFromUrl(linkRef);
|
|
309
308
|
|
|
310
309
|
//-- scroll to element that match hash (if have one)
|
|
@@ -315,6 +314,8 @@ function loadContentUrl(linkRef,fromPageRefresh,fromPopState)
|
|
|
315
314
|
});
|
|
316
315
|
view.$forceUpdate();
|
|
317
316
|
}
|
|
317
|
+
|
|
318
|
+
listenForHrefClicks();
|
|
318
319
|
});
|
|
319
320
|
}
|
|
320
321
|
|
package/LICENSE
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
MIT License
|
|
2
|
-
|
|
3
|
-
Copyright (c) 2022 Hornbill Docs
|
|
4
|
-
|
|
5
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
-
in the Software without restriction, including without limitation the rights
|
|
8
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
-
furnished to do so, subject to the following conditions:
|
|
11
|
-
|
|
12
|
-
The above copyright notice and this permission notice shall be included in all
|
|
13
|
-
copies or substantial portions of the Software.
|
|
14
|
-
|
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
-
SOFTWARE.
|