hdoc-tools 0.11.6 → 0.11.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/hdoc-module.js CHANGED
@@ -1,391 +1,391 @@
1
- (function () {
2
- 'use strict';
3
-
4
- const axios = require('axios'),
5
- cheerio = require('cheerio'),
6
- html2text = require('html-to-text'),
7
- https = require('https'),
8
- wordsCount = require('words-count').default;
9
-
10
- let includesCache = {},
11
- agent = new https.Agent({
12
- rejectUnauthorized: false
13
- });
14
-
15
- exports.content_type_for_ext = function (ext) {
16
- switch (ext) {
17
- case '.z':
18
- return 'application/x-compress';
19
- case '.tgz':
20
- return 'application/x-compressed';
21
- case '.gz':
22
- return 'application/x-gzip';
23
- case '.zip':
24
- return 'application/x-zip-compressed';
25
- case '.xml':
26
- return 'application/xml';
27
- case '.bmp':
28
- return 'image/bmp';
29
- case '.gif':
30
- return 'image/gif';
31
- case '.jpg':
32
- return 'image/jpeg';
33
- case '.png':
34
- return 'image/png';
35
- case '.tiff':
36
- return 'image/tiff';
37
- case '.ico':
38
- return 'image/x-icon';
39
- case '.png':
40
- return 'image/png';
41
- case '.svg':
42
- return 'image/svg+xml';
43
- case '.css':
44
- return 'text/css';
45
- case '.htm':
46
- case '.html':
47
- return 'text/html';
48
- case '.txt':
49
- return 'text/plain';
50
- case '.md':
51
- return 'text/plain';
52
- case '.json':
53
- return 'application/json';
54
- case '.js':
55
- return 'application/javascript';
56
- default:
57
- return 'application/octet-stream';
58
- }
59
- };
60
-
61
- exports.valid_url = function (url) {
62
-
63
- const stringIsAValidUrl = (s) => {
64
- try {
65
- const url_obj = new URL(s);
66
- return url_obj;
67
- } catch (err) {
68
- return false;
69
- }
70
- };
71
- return stringIsAValidUrl(url);
72
- };
73
-
74
- exports.expand_variables = function (text, docId = '') {
75
- if (docId !== '') {
76
- text = text.replaceAll('{{DOC_ID}}', docId);
77
- }
78
- text = text.replaceAll('{{BUILD_NUMBER}}', '0');
79
-
80
- let build_date = new Date().toISOString();
81
- build_date = build_date.replace('T', ' ');
82
- build_date = build_date.substring(0, 19);
83
- text = text.replaceAll('{{BUILD_DATE}}', build_date);
84
- return text;
85
- };
86
-
87
-
88
- exports.process_includes = async function (file_path, body) {
89
- let response = {
90
- body: '',
91
- found: 0,
92
- success: 0,
93
- failed: 0,
94
- errors: []
95
- };
96
-
97
- // Search body for INCLUDEs
98
- const regexp = /\[\[INCLUDE .*]]/g;
99
- const body_array = [...body.matchAll(regexp)];
100
-
101
- for (let i = 0; i < body_array.length; i++) {
102
- response.found++;
103
-
104
- // Extract include data from array
105
- const include_value = body_array[i][0];
106
-
107
- let link;
108
- try {
109
- link = include_value.split(' ')[1];
110
- link = link.substring(0, link.length - 2);
111
- } catch (e) {
112
- response.failed++;
113
- response.errors.push(`Error parsing INCLUDE [${include_value}] from [${file_path}]: ${err}`);
114
- continue;
115
- }
116
-
117
- if (includesCache[link] !== undefined) {
118
- console.log(`Serving From Cache: ${link}`);
119
- body = body.replace(include_value, includesCache[link]);
120
- continue;
121
- }
122
-
123
- // Validate link in INCLUDE
124
- try {
125
- new URL(link);
126
- } catch (err) {
127
- response.failed++;
128
- response.errors.push(`Error validating INCLUDE link [${link}] from [${file_path}]: ${e}`);
129
- continue;
130
- }
131
-
132
- let file_content;
133
- try {
134
- const file_response = await axios.get(link);
135
- if (file_response.status === 200) {
136
- file_content = file_response.data;
137
- } else {
138
- throw `Unexpected Status ${file_response.status}`;
139
- }
140
- } catch (e) {
141
- response.failed++;
142
- response.errors.push(`Error getting INCLUDE link content [${link}] from [${file_path}]: ${e}`);
143
- continue;
144
- }
145
- response.success++;
146
- includesCache[link] = file_content;
147
- body = body.replace(include_value, file_content);
148
- }
149
- response.body = body;
150
- return response;
151
- };
152
-
153
- // Takes html, returns the first heading detected in the order provided in h_to_search
154
- // Looks for h1 tags first, then hX, hY, hZ in order
155
- exports.getFirstHTMLHeading = function (html_body, h_to_search = ['h1']) {
156
- const $ = cheerio.load(html_body);
157
- for (let i = 0; i < h_to_search.length; i++) {
158
- let heading = $(h_to_search[i]).map(function (i) {
159
- return $(this);
160
- }).get();
161
- if (heading.length > 0) {
162
- return heading[0];
163
- }
164
- }
165
- return false;
166
- };
167
-
168
- exports.getHTMLFrontmatterHeader = function (html_body) {
169
- let response = {
170
- fm_header: '',
171
- fm_properties: {}
172
- };
173
- const $ = cheerio.load(html_body, {
174
- decodeEntities: false
175
- });
176
- if ($._root && $._root.children && $._root.children instanceof Array && $._root.children.length > 0) {
177
- $._root.children.forEach(function (child) {
178
- if (child.type === 'comment' && child.data && child.data.startsWith('[[FRONTMATTER')) {
179
- // We have a Frontmatter header - return each property in an array
180
- const fm_properties = child.data.split(/\r?\n/);
181
- for (let i = 0; i < fm_properties.length; i++) {
182
- if (fm_properties[i].includes(':')) {
183
- const property_details = fm_properties[i].split(/:(.*)/s);
184
- if (property_details.length > 1) {
185
- let prop_val = property_details[1].trim();
186
- if (/^".*"$/.test(prop_val)) {
187
- prop_val = prop_val.substring(1, prop_val.length - 1);
188
- }
189
- response.fm_properties[property_details[0].trim().toLowerCase()] = prop_val;
190
- }
191
- }
192
- }
193
-
194
- // And return the header as a whole so it can be easily replaced
195
- response.fm_header = child.data;
196
- }
197
- });
198
- }
199
- return response;
200
- };
201
-
202
- exports.truncate_string = function (str, n, useWordBoundary) {
203
- if (str.length <= n) {
204
- return str;
205
- }
206
- const subString = str.slice(0, n - 1);
207
- return (useWordBoundary ? subString.slice(0, subString.lastIndexOf(" ")) : subString) + '…';
208
- };
209
-
210
- exports.get_html_read_time = function (html) {
211
- // Get word count
212
- const text = html2text.convert(html, {
213
- wordwrap: null
214
- });
215
- const word_count = wordsCount(text);
216
- if (word_count === 0) return 0;
217
-
218
- // Calculate the read time - divide the word count by 200
219
- let read_time = Math.round(word_count / 200);
220
- if (read_time === 0) read_time = 1;
221
- return read_time;
222
- };
223
-
224
- exports.get_github_api_path = function (repo, relative_path) {
225
- if (repo) {
226
- repo = repo.endsWith('/') ? repo.slice(0, -1) : repo;
227
- let github_paths = {};
228
- github_paths.api_path = repo.replace('https://github.com/', 'https://api.github.com/repos/');
229
- github_paths.api_path += '/commits?path=/' + relative_path.replace('\\\\', '/').replace('\\', '/');
230
- github_paths.edit_path = repo + '/blob/main/' + relative_path.replace('\\\\', '/').replace('\\', '/');
231
- return github_paths;
232
- }
233
- return '';
234
- };
235
-
236
- exports.get_github_contributors = async function (github_url, github_api_token) {
237
- let response = {
238
- success: false,
239
- error: '',
240
- contributors: [],
241
- contributor_count: 0,
242
- last_commit_date: ''
243
- };
244
-
245
- let contributors = {};
246
-
247
- let request_options = {
248
- headers: {
249
- 'User-Agent': 'HornbillDocsBuild',
250
- 'Cache-Control': 'no-cache',
251
- 'Host': 'api.github.com',
252
- 'Accept': 'application/json'
253
- }
254
- };
255
- if (github_api_token !== '') {
256
- request_options.headers.authorization = `Bearer ${github_api_token}`;
257
- }
258
- let github_response;
259
- try {
260
- github_response = await axios.get(github_url, request_options);
261
- } catch (err) {
262
- response.error = err;
263
- return response;
264
- }
265
-
266
- if (github_response.status === 200) {
267
- response.success = true;
268
- let commits = github_response.data;
269
- commits.forEach(function (commit) {
270
- if (commit.committer && commit.committer.type && commit.committer.type.toLowerCase() === 'user' && commit.committer.login.toLowerCase() !== 'web-flow') {
271
- if (!contributors[commit.committer.id]) {
272
- response.contributor_count++;
273
- contributors[commit.committer.id] = {
274
- login: commit.committer.login,
275
- avatar_url: commit.committer.avatar_url,
276
- html_url: commit.committer.html_url,
277
- name: commit.commit.committer.name
278
- };
279
- }
280
- if (response.last_commit_date !== '') {
281
- const new_commit_date = new Date(commit.commit.committer.date);
282
- const exist_commit_date = new Date(response.last_commit_date);
283
- if (new_commit_date > exist_commit_date) response.last_commit_date = commit.commit.committer.date;
284
- } else {
285
- response.last_commit_date = commit.commit.committer.date;
286
- }
287
- }
288
- });
289
- for (const key in contributors) {
290
- if (contributors.hasOwnProperty(key)) {
291
- response.contributors.push(contributors[key]);
292
- }
293
- }
294
- } else {
295
- response.error = `Unexpected Status: ${github_response.status}.`;
296
- }
297
- return response;
298
- };
299
-
300
- exports.strip_drafts = function (nav_items) {
301
- let return_nav = nav_items;
302
- recurse_nav(return_nav);
303
- return return_nav;
304
- };
305
-
306
- const recurse_nav = function (nav_items) {
307
- for (const key in nav_items) {
308
- if (nav_items[key].draft) {
309
- nav_items.splice(key, 1);
310
- recurse_nav(nav_items);
311
- } else if (nav_items[key].items) {
312
- recurse_nav(nav_items[key].items);
313
- }
314
- }
315
- };
316
-
317
- exports.build_breadcrumbs = function (nav_items) {
318
- const bc = {};
319
- const buildBreadcrumb = (items, parentLinks) => {
320
-
321
- // Process parent links
322
- let parentlink = true;
323
- if (parentLinks.length > 0) {
324
- if (parentLinks[0].link === undefined || parentLinks[0].link === '') parentlink = false;
325
- if (parentLinks[1] && parentLinks[1].link === undefined && items.length > 0 && items[0].link) {
326
- parentLinks[1].link = items[0].link;
327
- }
328
- if (parentLinks[2] && parentLinks[2].link === undefined && items.length > 0 && items[0].link) {
329
- parentLinks[2].link = items[0].link;
330
- }
331
- }
332
-
333
- // Loop through items, build breadcrumb
334
- for (let i = 0; i < items.length; i++) {
335
- const item = items[i];
336
- if (!parentlink && item.link) {
337
- parentLinks[0].link = item.link;
338
- parentlink = true;
339
- }
340
- const { text, link, items: subItems } = item;
341
- const breadcrumb = [...parentLinks, { text, link }];
342
-
343
- if (link) {
344
- bc[link] = breadcrumb;
345
- }
346
-
347
- if (subItems) {
348
- buildBreadcrumb(subItems, breadcrumb);
349
- }
350
- }
351
- };
352
-
353
- buildBreadcrumb(nav_items, []);
354
- return bc;
355
- };
356
-
357
- exports.load_product_families = async function () {
358
- let response = {
359
- success: false,
360
- prod_families: {},
361
- prods_supported: [],
362
- errors: ''
363
- };
364
- const prod_families_url = 'https://docs.hornbill.com/_books/products.json';
365
- for (let i = 1; i < 4; i++) {
366
- try {
367
- const prods = await axios.get(prod_families_url, {
368
- httpsAgent: agent,
369
- timeout: 5000
370
- });
371
- if (prods.status === 200) {
372
- response.prod_families = prods.data;
373
- response.prods_supported = [];
374
- for (let i = 0; i < response.prod_families.products.length; i++) {
375
- response.prods_supported.push(response.prod_families.products[i].id);
376
- }
377
- response.success = true;
378
- break;
379
- } else {
380
- throw `Unexpected status - ${prods.status} ${prods.statusText}`;
381
- }
382
- } catch (e) {
383
- if (response.errors === '') response.errors = `Request to ${prod_families_url} failed:`;
384
- response.errors += `\nAttempt ${i} - Error returning product families: ${e}`;
385
- // Wait 2 seconds and try again
386
- await new Promise(r => setTimeout(r, 2000));
387
- }
388
- }
389
- return response;
390
- };
1
+ (function () {
2
+ 'use strict';
3
+
4
+ const axios = require('axios'),
5
+ cheerio = require('cheerio'),
6
+ html2text = require('html-to-text'),
7
+ https = require('https'),
8
+ wordsCount = require('words-count').default;
9
+
10
+ let includesCache = {},
11
+ agent = new https.Agent({
12
+ rejectUnauthorized: false
13
+ });
14
+
15
+ exports.content_type_for_ext = function (ext) {
16
+ switch (ext) {
17
+ case '.z':
18
+ return 'application/x-compress';
19
+ case '.tgz':
20
+ return 'application/x-compressed';
21
+ case '.gz':
22
+ return 'application/x-gzip';
23
+ case '.zip':
24
+ return 'application/x-zip-compressed';
25
+ case '.xml':
26
+ return 'application/xml';
27
+ case '.bmp':
28
+ return 'image/bmp';
29
+ case '.gif':
30
+ return 'image/gif';
31
+ case '.jpg':
32
+ return 'image/jpeg';
33
+ case '.png':
34
+ return 'image/png';
35
+ case '.tiff':
36
+ return 'image/tiff';
37
+ case '.ico':
38
+ return 'image/x-icon';
39
+ case '.png':
40
+ return 'image/png';
41
+ case '.svg':
42
+ return 'image/svg+xml';
43
+ case '.css':
44
+ return 'text/css';
45
+ case '.htm':
46
+ case '.html':
47
+ return 'text/html';
48
+ case '.txt':
49
+ return 'text/plain';
50
+ case '.md':
51
+ return 'text/plain';
52
+ case '.json':
53
+ return 'application/json';
54
+ case '.js':
55
+ return 'application/javascript';
56
+ default:
57
+ return 'application/octet-stream';
58
+ }
59
+ };
60
+
61
+ exports.valid_url = function (url) {
62
+
63
+ const stringIsAValidUrl = (s) => {
64
+ try {
65
+ const url_obj = new URL(s);
66
+ return url_obj;
67
+ } catch (err) {
68
+ return false;
69
+ }
70
+ };
71
+ return stringIsAValidUrl(url);
72
+ };
73
+
74
+ exports.expand_variables = function (text, docId = '') {
75
+ if (docId !== '') {
76
+ text = text.replaceAll('{{DOC_ID}}', docId);
77
+ }
78
+ text = text.replaceAll('{{BUILD_NUMBER}}', '0');
79
+
80
+ let build_date = new Date().toISOString();
81
+ build_date = build_date.replace('T', ' ');
82
+ build_date = build_date.substring(0, 19);
83
+ text = text.replaceAll('{{BUILD_DATE}}', build_date);
84
+ return text;
85
+ };
86
+
87
+
88
+ exports.process_includes = async function (file_path, body) {
89
+ let response = {
90
+ body: '',
91
+ found: 0,
92
+ success: 0,
93
+ failed: 0,
94
+ errors: []
95
+ };
96
+
97
+ // Search body for INCLUDEs
98
+ const regexp = /\[\[INCLUDE .*]]/g;
99
+ const body_array = [...body.matchAll(regexp)];
100
+
101
+ for (let i = 0; i < body_array.length; i++) {
102
+ response.found++;
103
+
104
+ // Extract include data from array
105
+ const include_value = body_array[i][0];
106
+
107
+ let link;
108
+ try {
109
+ link = include_value.split(' ')[1];
110
+ link = link.substring(0, link.length - 2);
111
+ } catch (e) {
112
+ response.failed++;
113
+ response.errors.push(`Error parsing INCLUDE [${include_value}] from [${file_path}]: ${err}`);
114
+ continue;
115
+ }
116
+
117
+ if (includesCache[link] !== undefined) {
118
+ console.log(`Serving From Cache: ${link}`);
119
+ body = body.replace(include_value, includesCache[link]);
120
+ continue;
121
+ }
122
+
123
+ // Validate link in INCLUDE
124
+ try {
125
+ new URL(link);
126
+ } catch (err) {
127
+ response.failed++;
128
+ response.errors.push(`Error validating INCLUDE link [${link}] from [${file_path}]: ${e}`);
129
+ continue;
130
+ }
131
+
132
+ let file_content;
133
+ try {
134
+ const file_response = await axios.get(link);
135
+ if (file_response.status === 200) {
136
+ file_content = file_response.data;
137
+ } else {
138
+ throw `Unexpected Status ${file_response.status}`;
139
+ }
140
+ } catch (e) {
141
+ response.failed++;
142
+ response.errors.push(`Error getting INCLUDE link content [${link}] from [${file_path}]: ${e}`);
143
+ continue;
144
+ }
145
+ response.success++;
146
+ includesCache[link] = file_content;
147
+ body = body.replace(include_value, file_content);
148
+ }
149
+ response.body = body;
150
+ return response;
151
+ };
152
+
153
+ // Takes html, returns the first heading detected in the order provided in h_to_search
154
+ // Looks for h1 tags first, then hX, hY, hZ in order
155
+ exports.getFirstHTMLHeading = function (html_body, h_to_search = ['h1']) {
156
+ const $ = cheerio.load(html_body);
157
+ for (let i = 0; i < h_to_search.length; i++) {
158
+ let heading = $(h_to_search[i]).map(function (i) {
159
+ return $(this);
160
+ }).get();
161
+ if (heading.length > 0) {
162
+ return heading[0];
163
+ }
164
+ }
165
+ return false;
166
+ };
167
+
168
+ exports.getHTMLFrontmatterHeader = function (html_body) {
169
+ let response = {
170
+ fm_header: '',
171
+ fm_properties: {}
172
+ };
173
+ const $ = cheerio.load(html_body, {
174
+ decodeEntities: false
175
+ });
176
+ if ($._root && $._root.children && $._root.children instanceof Array && $._root.children.length > 0) {
177
+ $._root.children.forEach(function (child) {
178
+ if (child.type === 'comment' && child.data && child.data.startsWith('[[FRONTMATTER')) {
179
+ // We have a Frontmatter header - return each property in an array
180
+ const fm_properties = child.data.split(/\r?\n/);
181
+ for (let i = 0; i < fm_properties.length; i++) {
182
+ if (fm_properties[i].includes(':')) {
183
+ const property_details = fm_properties[i].split(/:(.*)/s);
184
+ if (property_details.length > 1) {
185
+ let prop_val = property_details[1].trim();
186
+ if (/^".*"$/.test(prop_val)) {
187
+ prop_val = prop_val.substring(1, prop_val.length - 1);
188
+ }
189
+ response.fm_properties[property_details[0].trim().toLowerCase()] = prop_val;
190
+ }
191
+ }
192
+ }
193
+
194
+ // And return the header as a whole so it can be easily replaced
195
+ response.fm_header = child.data;
196
+ }
197
+ });
198
+ }
199
+ return response;
200
+ };
201
+
202
+ exports.truncate_string = function (str, n, useWordBoundary) {
203
+ if (str.length <= n) {
204
+ return str;
205
+ }
206
+ const subString = str.slice(0, n - 1);
207
+ return (useWordBoundary ? subString.slice(0, subString.lastIndexOf(" ")) : subString) + '…';
208
+ };
209
+
210
+ exports.get_html_read_time = function (html) {
211
+ // Get word count
212
+ const text = html2text.convert(html, {
213
+ wordwrap: null
214
+ });
215
+ const word_count = wordsCount(text);
216
+ if (word_count === 0) return 0;
217
+
218
+ // Calculate the read time - divide the word count by 200
219
+ let read_time = Math.round(word_count / 200);
220
+ if (read_time === 0) read_time = 1;
221
+ return read_time;
222
+ };
223
+
224
+ exports.get_github_api_path = function (repo, relative_path) {
225
+ if (repo) {
226
+ repo = repo.endsWith('/') ? repo.slice(0, -1) : repo;
227
+ let github_paths = {};
228
+ github_paths.api_path = repo.replace('https://github.com/', 'https://api.github.com/repos/');
229
+ github_paths.api_path += '/commits?path=/' + relative_path.replace('\\\\', '/').replace('\\', '/');
230
+ github_paths.edit_path = repo + '/blob/main/' + relative_path.replace('\\\\', '/').replace('\\', '/');
231
+ return github_paths;
232
+ }
233
+ return '';
234
+ };
235
+
236
+ exports.get_github_contributors = async function (github_url, github_api_token) {
237
+ let response = {
238
+ success: false,
239
+ error: '',
240
+ contributors: [],
241
+ contributor_count: 0,
242
+ last_commit_date: ''
243
+ };
244
+
245
+ let contributors = {};
246
+
247
+ let request_options = {
248
+ headers: {
249
+ 'User-Agent': 'HornbillDocsBuild',
250
+ 'Cache-Control': 'no-cache',
251
+ 'Host': 'api.github.com',
252
+ 'Accept': 'application/json'
253
+ }
254
+ };
255
+ if (github_api_token !== '') {
256
+ request_options.headers.authorization = `Bearer ${github_api_token}`;
257
+ }
258
+ let github_response;
259
+ try {
260
+ github_response = await axios.get(github_url, request_options);
261
+ } catch (err) {
262
+ response.error = err;
263
+ return response;
264
+ }
265
+
266
+ if (github_response.status === 200) {
267
+ response.success = true;
268
+ let commits = github_response.data;
269
+ commits.forEach(function (commit) {
270
+ if (commit.committer && commit.committer.type && commit.committer.type.toLowerCase() === 'user' && commit.committer.login.toLowerCase() !== 'web-flow') {
271
+ if (!contributors[commit.committer.id]) {
272
+ response.contributor_count++;
273
+ contributors[commit.committer.id] = {
274
+ login: commit.committer.login,
275
+ avatar_url: commit.committer.avatar_url,
276
+ html_url: commit.committer.html_url,
277
+ name: commit.commit.committer.name
278
+ };
279
+ }
280
+ if (response.last_commit_date !== '') {
281
+ const new_commit_date = new Date(commit.commit.committer.date);
282
+ const exist_commit_date = new Date(response.last_commit_date);
283
+ if (new_commit_date > exist_commit_date) response.last_commit_date = commit.commit.committer.date;
284
+ } else {
285
+ response.last_commit_date = commit.commit.committer.date;
286
+ }
287
+ }
288
+ });
289
+ for (const key in contributors) {
290
+ if (contributors.hasOwnProperty(key)) {
291
+ response.contributors.push(contributors[key]);
292
+ }
293
+ }
294
+ } else {
295
+ response.error = `Unexpected Status: ${github_response.status}.`;
296
+ }
297
+ return response;
298
+ };
299
+
300
+ exports.strip_drafts = function (nav_items) {
301
+ let return_nav = nav_items;
302
+ recurse_nav(return_nav);
303
+ return return_nav;
304
+ };
305
+
306
+ const recurse_nav = function (nav_items) {
307
+ for (const key in nav_items) {
308
+ if (nav_items[key].draft) {
309
+ nav_items.splice(key, 1);
310
+ recurse_nav(nav_items);
311
+ } else if (nav_items[key].items) {
312
+ recurse_nav(nav_items[key].items);
313
+ }
314
+ }
315
+ };
316
+
317
+ exports.build_breadcrumbs = function (nav_items) {
318
+ const bc = {};
319
+ const buildBreadcrumb = (items, parentLinks) => {
320
+
321
+ // Process parent links
322
+ let parentlink = true;
323
+ if (parentLinks.length > 0) {
324
+ if (parentLinks[0].link === undefined || parentLinks[0].link === '') parentlink = false;
325
+ if (parentLinks[1] && parentLinks[1].link === undefined && items.length > 0 && items[0].link) {
326
+ parentLinks[1].link = items[0].link;
327
+ }
328
+ if (parentLinks[2] && parentLinks[2].link === undefined && items.length > 0 && items[0].link) {
329
+ parentLinks[2].link = items[0].link;
330
+ }
331
+ }
332
+
333
+ // Loop through items, build breadcrumb
334
+ for (let i = 0; i < items.length; i++) {
335
+ const item = items[i];
336
+ if (!parentlink && item.link) {
337
+ parentLinks[0].link = item.link;
338
+ parentlink = true;
339
+ }
340
+ const { text, link, items: subItems } = item;
341
+ const breadcrumb = [...parentLinks, { text, link }];
342
+
343
+ if (link) {
344
+ bc[link] = breadcrumb;
345
+ }
346
+
347
+ if (subItems) {
348
+ buildBreadcrumb(subItems, breadcrumb);
349
+ }
350
+ }
351
+ };
352
+
353
+ buildBreadcrumb(nav_items, []);
354
+ return bc;
355
+ };
356
+
357
+ exports.load_product_families = async function () {
358
+ let response = {
359
+ success: false,
360
+ prod_families: {},
361
+ prods_supported: [],
362
+ errors: ''
363
+ };
364
+ const prod_families_url = 'https://docs.hornbill.com/_books/products.json';
365
+ for (let i = 1; i < 4; i++) {
366
+ try {
367
+ const prods = await axios.get(prod_families_url, {
368
+ httpsAgent: agent,
369
+ timeout: 5000
370
+ });
371
+ if (prods.status === 200) {
372
+ response.prod_families = prods.data;
373
+ response.prods_supported = [];
374
+ for (let i = 0; i < response.prod_families.products.length; i++) {
375
+ response.prods_supported.push(response.prod_families.products[i].id);
376
+ }
377
+ response.success = true;
378
+ break;
379
+ } else {
380
+ throw `Unexpected status - ${prods.status} ${prods.statusText}`;
381
+ }
382
+ } catch (e) {
383
+ if (response.errors === '') response.errors = `Request to ${prod_families_url} failed:`;
384
+ response.errors += `\nAttempt ${i} - Error returning product families: ${e}`;
385
+ // Wait 2 seconds and try again
386
+ await new Promise(r => setTimeout(r, 2000));
387
+ }
388
+ }
389
+ return response;
390
+ };
391
391
  })();