spec-up-t 1.3.0 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/copilot-instructions.md +13 -0
- package/assets/compiled/body.js +18 -12
- package/assets/compiled/head.css +8 -6
- package/assets/css/collapse-definitions.css +0 -1
- package/assets/css/counter.css +10 -22
- package/assets/css/create-pdf.css +4 -2
- package/assets/css/create-term-filter.css +4 -4
- package/assets/css/definition-buttons-container.css +60 -0
- package/assets/css/{pdf-download.css → download-pdf-docx.css} +9 -5
- package/assets/css/insert-trefs.css +7 -0
- package/assets/css/sidebar-toc.css +2 -1
- package/assets/css/terms-and-definitions.css +73 -22
- package/assets/js/add-href-to-snapshot-link.js +16 -9
- package/assets/js/addAnchorsToTerms.js +2 -2
- package/assets/js/charts.js +10 -0
- package/assets/js/collapse-definitions.js +13 -2
- package/assets/js/collapse-meta-info.js +11 -9
- package/assets/js/definition-button-container-utils.js +82 -0
- package/assets/js/download-pdf-docx.js +68 -0
- package/assets/js/edit-term-buttons.js +77 -20
- package/assets/js/github-issues.js +35 -0
- package/assets/js/github-repo-info.js +144 -0
- package/assets/js/highlight-heading-plus-sibling-nodes.test.js +18 -0
- package/assets/js/insert-trefs.js +62 -13
- package/assets/js/mermaid-diagrams.js +11 -0
- package/assets/js/terminology-section-utility-container/README.md +107 -0
- package/assets/js/terminology-section-utility-container/create-alphabet-index.js +17 -0
- package/assets/js/{create-term-filter.js → terminology-section-utility-container/create-term-filter.js} +11 -44
- package/assets/js/terminology-section-utility-container/hide-show-utility-container.js +21 -0
- package/assets/js/terminology-section-utility-container/search.js +203 -0
- package/assets/js/terminology-section-utility-container.js +203 -0
- package/assets/js/tooltips.js +283 -0
- package/config/asset-map.json +26 -18
- package/index.js +57 -390
- package/package.json +5 -2
- package/src/add-remove-xref-source.js +20 -21
- package/src/collect-external-references.js +8 -337
- package/src/collect-external-references.test.js +440 -33
- package/src/configure.js +8 -109
- package/src/create-docx.js +7 -6
- package/src/create-pdf.js +15 -14
- package/src/freeze-spec-data.js +46 -0
- package/src/git-info.test.js +76 -0
- package/src/health-check/destination-gitignore-checker.js +5 -3
- package/src/health-check/external-specs-checker.js +5 -4
- package/src/health-check/specs-configuration-checker.js +2 -1
- package/src/health-check/term-references-checker.js +5 -3
- package/src/health-check/terms-intro-checker.js +2 -1
- package/src/health-check/tref-term-checker.js +8 -7
- package/src/health-check.js +8 -7
- package/src/init.js +3 -2
- package/src/install-from-boilerplate/add-gitignore-entries.js +3 -2
- package/src/install-from-boilerplate/add-scripts-keys.js +5 -4
- package/src/install-from-boilerplate/boilerplate/.github/workflows/menu.yml +74 -97
- package/src/install-from-boilerplate/boilerplate/README.md +1 -1
- package/src/install-from-boilerplate/boilerplate/spec/example-markup-in-markdown.md +1 -1
- package/src/install-from-boilerplate/boilerplate/spec/spec-head.md +2 -2
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/composability.md +3 -0
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/compost.md +3 -0
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/fertilizer.md +3 -0
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/mulch.md +3 -0
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/pruning.md +3 -0
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/seedling.md +3 -0
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/soil.md +11 -0
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/watering.md +3 -0
- package/src/install-from-boilerplate/boilerplate/specs.json +24 -10
- package/src/install-from-boilerplate/config-scripts-keys.js +3 -3
- package/src/install-from-boilerplate/config-system-files.js +0 -1
- package/src/install-from-boilerplate/copy-boilerplate.js +2 -1
- package/src/install-from-boilerplate/copy-system-files.js +4 -3
- package/src/install-from-boilerplate/custom-update.js +12 -1
- package/src/install-from-boilerplate/help.txt +1 -1
- package/src/install-from-boilerplate/menu.sh +6 -6
- package/src/json-key-validator.js +17 -11
- package/src/markdown-it/README.md +207 -0
- package/src/markdown-it/definition-lists.js +397 -0
- package/src/markdown-it/index.js +83 -0
- package/src/markdown-it/link-enhancement.js +98 -0
- package/src/markdown-it/plugins.js +118 -0
- package/src/markdown-it/table-enhancement.js +97 -0
- package/src/markdown-it/template-tag-syntax.js +152 -0
- package/src/parsers/index.js +16 -0
- package/src/parsers/spec-parser.js +152 -0
- package/src/parsers/spec-parser.test.js +109 -0
- package/src/parsers/template-tag-parser.js +277 -0
- package/src/parsers/template-tag-parser.test.js +107 -0
- package/src/pipeline/configuration/configure-starterpack.js +200 -0
- package/src/{create-external-specs-list.js → pipeline/configuration/create-external-specs-list.js} +13 -12
- package/src/{create-term-index.js → pipeline/configuration/create-term-index.js} +19 -18
- package/src/{create-versions-index.js → pipeline/configuration/create-versions-index.js} +4 -3
- package/src/{insert-term-index.js → pipeline/configuration/insert-term-index.js} +2 -2
- package/src/pipeline/configuration/prepare-spec-configuration.js +70 -0
- package/src/pipeline/parsing/apply-markdown-it-extensions.js +35 -0
- package/src/pipeline/parsing/create-markdown-parser.js +94 -0
- package/src/pipeline/parsing/create-markdown-parser.test.js +49 -0
- package/src/{html-dom-processor.js → pipeline/postprocessing/definition-list-postprocessor.js} +69 -10
- package/src/{escape-handler.js → pipeline/preprocessing/escape-processor.js} +3 -1
- package/src/{fix-markdown-files.js → pipeline/preprocessing/normalize-terminology-markdown.js} +41 -31
- package/src/pipeline/references/collect-external-references.js +307 -0
- package/src/pipeline/references/external-references-service.js +231 -0
- package/src/pipeline/references/fetch-terms-from-index.js +198 -0
- package/src/pipeline/references/match-term.js +34 -0
- package/src/{collectExternalReferences/matchTerm.test.js → pipeline/references/match-term.test.js} +8 -2
- package/src/pipeline/references/process-xtrefs-data.js +94 -0
- package/src/pipeline/references/xtref-utils.js +166 -0
- package/src/pipeline/rendering/render-spec-document.js +146 -0
- package/src/pipeline/rendering/render-utils.js +154 -0
- package/src/utils/LOGGER.md +81 -0
- package/src/utils/{doesUrlExist.js → does-url-exist.js} +4 -3
- package/src/utils/fetch.js +5 -4
- package/src/utils/file-opener.js +3 -2
- package/src/utils/git-info.js +77 -0
- package/src/utils/logger.js +74 -0
- package/src/utils/regex-patterns.js +471 -0
- package/src/utils/regex-patterns.test.js +281 -0
- package/templates/template.html +56 -21
- package/assets/js/create-alphabet-index.js +0 -60
- package/assets/js/hide-show-utility-container.js +0 -16
- package/assets/js/index.js +0 -87
- package/assets/js/pdf-download.js +0 -46
- package/assets/js/search.js +0 -365
- package/src/collectExternalReferences/fetchTermsFromIndex.js +0 -284
- package/src/collectExternalReferences/matchTerm.js +0 -32
- package/src/collectExternalReferences/processXTrefsData.js +0 -108
- package/src/freeze.js +0 -90
- package/src/install-from-boilerplate/boilerplate/.github/workflows/fetch-and-push-xrefs.yml.old +0 -42
- package/src/install-from-boilerplate/boilerplate/.github/workflows/render-specs.yml +0 -47
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/term-1.md +0 -13
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/term-2.md +0 -3
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/term-3.md +0 -3
- package/src/install-from-boilerplate/boilerplate/spec/terms-definitions/term-4.md +0 -3
- package/src/markdown-it-extensions.js +0 -395
- package/src/references.js +0 -114
- /package/assets/css/{bootstrap.min.css → embedded-libraries/bootstrap.min.css} +0 -0
- /package/assets/css/{prism.css → embedded-libraries/prism.css} +0 -0
- /package/assets/css/{prism.dark.css → embedded-libraries/prism.dark.css} +0 -0
- /package/assets/css/{prism.default.css → embedded-libraries/prism.default.css} +0 -0
- /package/assets/js/{bootstrap.bundle.min.js → embedded-libraries/bootstrap.bundle.min.js} +0 -0
- /package/assets/js/{chart.js → embedded-libraries/chart.js} +0 -0
- /package/assets/js/{diff.min.js → embedded-libraries/diff.min.js} +0 -0
- /package/assets/js/{font-awesome.js → embedded-libraries/font-awesome.js} +0 -0
- /package/assets/js/{mermaid.js → embedded-libraries/mermaid.js} +0 -0
- /package/assets/js/{notyf.js → embedded-libraries/notyf.js} +0 -0
- /package/assets/js/{popper.js → embedded-libraries/popper.js} +0 -0
- /package/assets/js/{prism.dark.js → embedded-libraries/prism.dark.js} +0 -0
- /package/assets/js/{prism.default.js → embedded-libraries/prism.default.js} +0 -0
- /package/assets/js/{prism.js → embedded-libraries/prism.js} +0 -0
- /package/assets/js/{tippy.js → embedded-libraries/tippy.js} +0 -0
- /package/src/{escape-mechanism.js → pipeline/preprocessing/escape-placeholder-utils.js} +0 -0
- /package/src/utils/{isLineWithDefinition.js → is-line-with-definition.js} +0 -0
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file fetch-terms-from-index.js
|
|
3
|
+
* @description Fetches term definitions from an external specification repository using either the published GitHub Pages site or the raw repository fallback.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const { JSDOM } = require('jsdom');
|
|
9
|
+
const axios = require('axios');
|
|
10
|
+
const { getPath } = require('../../../config/paths');
|
|
11
|
+
const Logger = require('../../utils/logger');
|
|
12
|
+
|
|
13
|
+
const CACHE_DIR = getPath('githubcache');
|
|
14
|
+
|
|
15
|
+
async function getFileCommitHash(token, owner, repo, filePath, headers) {
|
|
16
|
+
try {
|
|
17
|
+
const normalizedPath = filePath.replace(/^\//, '');
|
|
18
|
+
const commitsUrl = `https://api.github.com/repos/${owner}/${repo}/commits?path=${normalizedPath}&per_page=1`;
|
|
19
|
+
Logger.process(`Fetching latest commit for file: ${commitsUrl}`);
|
|
20
|
+
|
|
21
|
+
const response = await axios.get(commitsUrl, { headers });
|
|
22
|
+
if (response.status !== 200 || !Array.isArray(response.data) || response.data.length === 0) {
|
|
23
|
+
Logger.error(`Could not find commit information for ${filePath}`);
|
|
24
|
+
return null;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
return response.data[0].sha;
|
|
28
|
+
} catch (error) {
|
|
29
|
+
Logger.error(`Error fetching commit hash: ${error.message}`);
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async function fetchAllTermsFromIndex(token, owner, repo, options = {}) {
|
|
35
|
+
try {
|
|
36
|
+
const headers = token ? { Authorization: `token ${token}` } : {};
|
|
37
|
+
let indexHtmlUrl;
|
|
38
|
+
let commitHash = null;
|
|
39
|
+
|
|
40
|
+
if (options.ghPageUrl) {
|
|
41
|
+
indexHtmlUrl = options.ghPageUrl.endsWith('/')
|
|
42
|
+
? `${options.ghPageUrl}index.html`
|
|
43
|
+
: `${options.ghPageUrl}/index.html`;
|
|
44
|
+
Logger.process(`Fetching index.html from GitHub Pages: ${indexHtmlUrl}`);
|
|
45
|
+
|
|
46
|
+
try {
|
|
47
|
+
const mainBranchUrl = `https://api.github.com/repos/${owner}/${repo}/branches/main`;
|
|
48
|
+
const branchResponse = await axios.get(mainBranchUrl, { headers });
|
|
49
|
+
if (branchResponse.status === 200) {
|
|
50
|
+
commitHash = branchResponse.data.commit.sha;
|
|
51
|
+
Logger.success(`Got commit hash from main branch: ${commitHash}`);
|
|
52
|
+
}
|
|
53
|
+
} catch (error) {
|
|
54
|
+
Logger.error(`Could not get commit hash from main branch: ${error.message}`);
|
|
55
|
+
}
|
|
56
|
+
} else {
|
|
57
|
+
Logger.warn('No GitHub Pages URL provided, falling back to repository method');
|
|
58
|
+
const specsJsonUrl = `https://api.github.com/repos/${owner}/${repo}/contents/specs.json`;
|
|
59
|
+
Logger.process(`Fetching specs.json from: ${specsJsonUrl}`);
|
|
60
|
+
|
|
61
|
+
const specsJsonResponse = await axios.get(specsJsonUrl, { headers });
|
|
62
|
+
if (specsJsonResponse.status !== 200) {
|
|
63
|
+
Logger.error(`Could not find specs.json in repository ${owner}/${repo}`);
|
|
64
|
+
return null;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const specsJsonContent = Buffer.from(specsJsonResponse.data.content, 'base64').toString('utf8');
|
|
68
|
+
const specsJson = JSON.parse(specsJsonContent);
|
|
69
|
+
const outputPath = specsJson.specs?.[0]?.output_path;
|
|
70
|
+
if (!outputPath) {
|
|
71
|
+
Logger.error(`No output_path found in specs.json for repository ${owner}/${repo}`);
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const normalizedOutputPath = outputPath.replace(/^\.\//, '').replace(/\/$/, '');
|
|
76
|
+
const indexHtmlPath = `${normalizedOutputPath}/index.html`;
|
|
77
|
+
indexHtmlUrl = `https://raw.githubusercontent.com/${owner}/${repo}/main/${indexHtmlPath}`;
|
|
78
|
+
Logger.process(`Fetching index.html from raw repository: ${indexHtmlUrl}`);
|
|
79
|
+
|
|
80
|
+
commitHash = await getFileCommitHash(token, owner, repo, indexHtmlPath, headers);
|
|
81
|
+
if (!commitHash) {
|
|
82
|
+
Logger.warn('Could not get commit hash for index.html, continuing without it');
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const indexHtmlResponse = await axios.get(indexHtmlUrl, { headers });
|
|
87
|
+
if (indexHtmlResponse.status !== 200) {
|
|
88
|
+
Logger.error(`Could not find index.html at ${indexHtmlUrl}`);
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const dom = new JSDOM(indexHtmlResponse.data);
|
|
93
|
+
const document = dom.window.document;
|
|
94
|
+
const termDlList = document.querySelector('dl.terms-and-definitions-list');
|
|
95
|
+
if (!termDlList) {
|
|
96
|
+
Logger.error(`No terms-and-definitions-list found in ${indexHtmlUrl}`);
|
|
97
|
+
return null;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const terms = [];
|
|
101
|
+
const dtElements = termDlList.querySelectorAll('dt');
|
|
102
|
+
dtElements.forEach(dt => {
|
|
103
|
+
const termSpan = dt.querySelector('span[id^="term:"]');
|
|
104
|
+
if (!termSpan) {
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
let termText = '';
|
|
109
|
+
termSpan.childNodes.forEach(node => {
|
|
110
|
+
if (node.nodeType === dom.window.Node.TEXT_NODE) {
|
|
111
|
+
termText += node.textContent.trim();
|
|
112
|
+
}
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
if (!termText) {
|
|
116
|
+
termText = termSpan.textContent.trim();
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (!termText) {
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const definitions = [];
|
|
124
|
+
let pointer = dt.nextElementSibling;
|
|
125
|
+
while (pointer && pointer.tagName.toLowerCase() === 'dd') {
|
|
126
|
+
definitions.push(pointer.outerHTML);
|
|
127
|
+
pointer = pointer.nextElementSibling;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
terms.push({ term: termText, definition: definitions.join('\n') });
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
const timestamp = Date.now();
|
|
134
|
+
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
|
135
|
+
const outputFileName = `${timestamp}-${owner}-${repo}-terms.json`;
|
|
136
|
+
const outputFilePath = path.join(CACHE_DIR, outputFileName);
|
|
137
|
+
|
|
138
|
+
const result = {
|
|
139
|
+
timestamp,
|
|
140
|
+
repository: `${owner}/${repo}`,
|
|
141
|
+
terms,
|
|
142
|
+
sha: commitHash,
|
|
143
|
+
avatarUrl: null,
|
|
144
|
+
outputFileName
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
fs.writeFileSync(outputFilePath, JSON.stringify(result, null, 2));
|
|
148
|
+
Logger.success(`Saved ${terms.length} terms to ${outputFilePath}`);
|
|
149
|
+
|
|
150
|
+
return result;
|
|
151
|
+
} catch (error) {
|
|
152
|
+
if (error.response) {
|
|
153
|
+
if (error.response.status === 404) {
|
|
154
|
+
Logger.error(`Resource not found: ${error.config.url}`);
|
|
155
|
+
} else if (error.response.status === 403 && error.response.headers['x-ratelimit-remaining'] === '0') {
|
|
156
|
+
const resetTime = new Date(Number(error.response.headers['x-ratelimit-reset']) * 1000);
|
|
157
|
+
Logger.error(`GitHub API rate limit exceeded. Try again after ${resetTime.toLocaleString()}`);
|
|
158
|
+
} else {
|
|
159
|
+
Logger.error(`Error fetching data: ${error.response.status} ${error.response.statusText}`);
|
|
160
|
+
}
|
|
161
|
+
} else {
|
|
162
|
+
Logger.error(`Error fetching term: ${error.message}`);
|
|
163
|
+
}
|
|
164
|
+
return null;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
async function fetchTermsFromIndex(token, term, owner, repo, termsDir, options = {}) {
|
|
169
|
+
const allTermsData = await fetchAllTermsFromIndex(token, owner, repo, options);
|
|
170
|
+
if (!allTermsData || !Array.isArray(allTermsData.terms)) {
|
|
171
|
+
return null;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
const foundTerm = allTermsData.terms.find(t => t.term.toLowerCase() === term.toLowerCase());
|
|
175
|
+
if (!foundTerm) {
|
|
176
|
+
Logger.error(`Term "${term}" not found in repository ${owner}/${repo}`);
|
|
177
|
+
return null;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
Logger.success(`Found term '${term}' in repository ${owner}/${repo}`);
|
|
181
|
+
return {
|
|
182
|
+
term: foundTerm.term,
|
|
183
|
+
content: foundTerm.definition,
|
|
184
|
+
sha: allTermsData.sha,
|
|
185
|
+
repository: {
|
|
186
|
+
owner: {
|
|
187
|
+
login: owner,
|
|
188
|
+
avatar_url: allTermsData.avatarUrl
|
|
189
|
+
},
|
|
190
|
+
name: repo
|
|
191
|
+
}
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
module.exports = {
|
|
196
|
+
fetchTermsFromIndex,
|
|
197
|
+
fetchAllTermsFromIndex
|
|
198
|
+
};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file match-term.js
|
|
3
|
+
* @description Utilities for matching a specific term within a [[def: ...]] definition block.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { isLineWithDefinition } = require('../../utils/is-line-with-definition');
|
|
7
|
+
const Logger = require('../../utils/logger');
|
|
8
|
+
|
|
9
|
+
function matchTerm(text, term) {
|
|
10
|
+
if (!text || typeof text !== 'string') {
|
|
11
|
+
Logger.warn('Nothing to match for term:', term);
|
|
12
|
+
return false;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const firstLine = text.split('\n')[0].trim();
|
|
16
|
+
if (!isLineWithDefinition(firstLine)) {
|
|
17
|
+
Logger.warn('String does not start with `[[def:` or end with `]]`');
|
|
18
|
+
return false;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const startPos = firstLine.indexOf('[[def:') + 6;
|
|
22
|
+
const endPos = firstLine.indexOf(']]');
|
|
23
|
+
if (startPos === -1 || endPos === -1) {
|
|
24
|
+
return false;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const relevantPart = firstLine.substring(startPos, endPos);
|
|
28
|
+
const termsArray = relevantPart.split(',').map(entry => entry.trim());
|
|
29
|
+
return termsArray.includes(term);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
module.exports = {
|
|
33
|
+
matchTerm
|
|
34
|
+
};
|
package/src/{collectExternalReferences/matchTerm.test.js → pipeline/references/match-term.test.js}
RENAMED
|
@@ -1,30 +1,36 @@
|
|
|
1
|
-
const { matchTerm } = require('./
|
|
1
|
+
const { matchTerm } = require('./match-term');
|
|
2
2
|
|
|
3
|
+
// Tests for matching terms within definition markup
|
|
3
4
|
describe('matchTerm', () => {
|
|
5
|
+
// Test: Can the system find terms that exist in definition markup?
|
|
4
6
|
test('returns true when the term is found in a correctly formatted definition', () => {
|
|
5
7
|
const text = '[[def: term1, term2]]\nSome additional text';
|
|
6
8
|
expect(matchTerm(text, 'term1')).toBe(true);
|
|
7
9
|
expect(matchTerm(text, 'term2')).toBe(true);
|
|
8
10
|
});
|
|
9
11
|
|
|
12
|
+
// Test: Does the system correctly identify when a term is not present?
|
|
10
13
|
test('returns false when the term is not found in the definition', () => {
|
|
11
14
|
const text = '[[def: term1, term2]]\nSome additional text';
|
|
12
15
|
expect(matchTerm(text, 'term3')).toBe(false);
|
|
13
16
|
});
|
|
14
17
|
|
|
18
|
+
// Test: Does the system handle invalid input gracefully?
|
|
15
19
|
test('returns false when the text is null or not a string', () => {
|
|
16
20
|
expect(matchTerm(null, 'term1')).toBe(false);
|
|
17
21
|
expect(matchTerm(123, 'term1')).toBe(false);
|
|
18
22
|
});
|
|
19
23
|
|
|
24
|
+
// Test: Does the system require proper definition markup format?
|
|
20
25
|
test('returns false when the first line is not a valid definition', () => {
|
|
21
26
|
const text = 'Invalid definition line\n[[def: term1, term2]]';
|
|
22
27
|
expect(matchTerm(text, 'term1')).toBe(false);
|
|
23
28
|
});
|
|
24
29
|
|
|
30
|
+
// Test: Can the system handle whitespace variations in markup?
|
|
25
31
|
test('handles extra spaces correctly', () => {
|
|
26
32
|
const text = '[[def: term1 , term2 ]]';
|
|
27
33
|
expect(matchTerm(text, 'term1')).toBe(true);
|
|
28
34
|
expect(matchTerm(text, 'term2')).toBe(true);
|
|
29
35
|
});
|
|
30
|
-
});
|
|
36
|
+
});
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { fetchAllTermsFromIndex } = require('./fetch-terms-from-index');
|
|
4
|
+
const { getPath } = require('../../../config/paths');
|
|
5
|
+
const Logger = require('../../utils/logger');
|
|
6
|
+
|
|
7
|
+
const CACHE_DIR = getPath('githubcache');
|
|
8
|
+
|
|
9
|
+
async function processXTrefsData(allXTrefs, GITHUB_API_TOKEN, outputPathJSON, outputPathJS, outputPathJSTimeStamped) {
|
|
10
|
+
try {
|
|
11
|
+
if (!fs.existsSync(CACHE_DIR)) {
|
|
12
|
+
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
allXTrefs.xtrefs = allXTrefs.xtrefs.filter(xtref => {
|
|
16
|
+
if (!xtref.owner || !xtref.repo || !xtref.repoUrl) {
|
|
17
|
+
Logger.warn(`Removing incomplete reference: ${xtref.externalSpec}, ${xtref.term}`);
|
|
18
|
+
return false;
|
|
19
|
+
}
|
|
20
|
+
return true;
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const xrefsByRepo = allXTrefs.xtrefs.reduce((groups, xtref) => {
|
|
24
|
+
const repoKey = `${xtref.owner}/${xtref.repo}`;
|
|
25
|
+
if (!groups[repoKey]) {
|
|
26
|
+
groups[repoKey] = {
|
|
27
|
+
owner: xtref.owner,
|
|
28
|
+
repo: xtref.repo,
|
|
29
|
+
xtrefs: []
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
groups[repoKey].xtrefs.push(xtref);
|
|
33
|
+
return groups;
|
|
34
|
+
}, {});
|
|
35
|
+
|
|
36
|
+
Logger.highlight(`Grouped ${allXTrefs.xtrefs.length} terms into ${Object.keys(xrefsByRepo).length} repositories`);
|
|
37
|
+
|
|
38
|
+
for (const repoKey of Object.keys(xrefsByRepo)) {
|
|
39
|
+
const repoGroup = xrefsByRepo[repoKey];
|
|
40
|
+
Logger.process(`Processing repository: ${repoKey} (${repoGroup.xtrefs.length} terms)`);
|
|
41
|
+
|
|
42
|
+
const ghPageUrl = repoGroup.xtrefs[0]?.ghPageUrl;
|
|
43
|
+
const allTermsData = await fetchAllTermsFromIndex(
|
|
44
|
+
GITHUB_API_TOKEN,
|
|
45
|
+
repoGroup.owner,
|
|
46
|
+
repoGroup.repo,
|
|
47
|
+
{ ghPageUrl }
|
|
48
|
+
);
|
|
49
|
+
|
|
50
|
+
if (!allTermsData) {
|
|
51
|
+
Logger.error(`Could not fetch terms from repository ${repoKey}`);
|
|
52
|
+
repoGroup.xtrefs.forEach(xtref => {
|
|
53
|
+
xtref.commitHash = 'not found';
|
|
54
|
+
xtref.content = 'This term was not found in the external repository.';
|
|
55
|
+
xtref.avatarUrl = null;
|
|
56
|
+
});
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
for (const xtref of repoGroup.xtrefs) {
|
|
61
|
+
const foundTerm = allTermsData.terms.find(
|
|
62
|
+
t => t.term.toLowerCase() === xtref.term.toLowerCase()
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
if (foundTerm) {
|
|
66
|
+
xtref.commitHash = allTermsData.sha;
|
|
67
|
+
xtref.content = foundTerm.definition;
|
|
68
|
+
xtref.avatarUrl = allTermsData.avatarUrl;
|
|
69
|
+
Logger.success(`Match found for term: ${xtref.term} in ${xtref.externalSpec}`);
|
|
70
|
+
} else {
|
|
71
|
+
xtref.commitHash = 'not found';
|
|
72
|
+
xtref.content = 'This term was not found in the external repository.';
|
|
73
|
+
xtref.avatarUrl = null;
|
|
74
|
+
Logger.error(`Origin: ${xtref.sourceFile || xtref.sourceFiles.join(', ')} 👉 No match found for term: ${xtref.term} in ${xtref.externalSpec} (${repoKey})`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
Logger.success(`Finished processing repository: ${repoKey}`);
|
|
79
|
+
Logger.separator();
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
const allXTrefsStr = JSON.stringify(allXTrefs, null, 2);
|
|
83
|
+
fs.writeFileSync(outputPathJSON, allXTrefsStr, 'utf8');
|
|
84
|
+
const jsPayload = `const allXTrefs = ${allXTrefsStr};`;
|
|
85
|
+
fs.writeFileSync(outputPathJS, jsPayload, 'utf8');
|
|
86
|
+
fs.writeFileSync(outputPathJSTimeStamped, jsPayload, 'utf8');
|
|
87
|
+
} catch (error) {
|
|
88
|
+
Logger.error('An error occurred:', error);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
module.exports = {
|
|
93
|
+
processXTrefsData
|
|
94
|
+
};
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file Utility helpers for identifying, parsing, and aggregating external term references (xref/tref).
|
|
3
|
+
*
|
|
4
|
+
* These functions were previously embedded in the monolithic `collect-external-references` module.
|
|
5
|
+
* Splitting them into a dedicated utility keeps the collection pipeline focused on orchestration
|
|
6
|
+
* and makes the primitives easier to reuse in other reference-aware stages.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const { externalReferences, utils } = require('../../utils/regex-patterns');
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Checks if a specific xtref is present in the markdown content.
|
|
13
|
+
*
|
|
14
|
+
* @param {{ externalSpec: string, term: string }} xtref - Reference descriptor.
|
|
15
|
+
* @param {string} markdownContent - Markdown text to inspect.
|
|
16
|
+
* @returns {boolean} True when the reference is found.
|
|
17
|
+
*/
|
|
18
|
+
function isXTrefInMarkdown(xtref, markdownContent) {
|
|
19
|
+
const regexTerm = utils.createXTrefRegex(xtref.externalSpec, xtref.term);
|
|
20
|
+
return regexTerm.test(markdownContent);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Finds a reference across multiple markdown files.
|
|
25
|
+
*
|
|
26
|
+
* @param {{ externalSpec: string, term: string }} xtref - Reference descriptor.
|
|
27
|
+
* @param {Map<string, string>} fileContents - Markdown contents keyed by filename.
|
|
28
|
+
* @returns {boolean} True when the reference is found in any file.
|
|
29
|
+
*/
|
|
30
|
+
function isXTrefInAnyFile(xtref, fileContents) {
|
|
31
|
+
for (const content of fileContents.values()) {
|
|
32
|
+
if (isXTrefInMarkdown(xtref, content)) {
|
|
33
|
+
return true;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Adds a pre-parsed xtref object to the aggregated collection.
|
|
41
|
+
* This function handles deduplication and source file tracking.
|
|
42
|
+
*
|
|
43
|
+
* @param {object} xtrefObject - Pre-parsed xtref object from template-tag-parser
|
|
44
|
+
* @param {{ xtrefs: Array<object> }} allXTrefs - Aggregated reference collection
|
|
45
|
+
* @param {string|null} filename - Originating filename for bookkeeping
|
|
46
|
+
* @returns {{ xtrefs: Array<object> }} Updated reference collection
|
|
47
|
+
*/
|
|
48
|
+
function addXtrefToCollection(xtrefObject, allXTrefs, filename = null) {
|
|
49
|
+
const referenceType = xtrefObject.referenceType;
|
|
50
|
+
const cleanXTrefObj = { ...xtrefObject };
|
|
51
|
+
delete cleanXTrefObj.referenceType;
|
|
52
|
+
|
|
53
|
+
const existingIndex = allXTrefs?.xtrefs?.findIndex(existingXTref =>
|
|
54
|
+
existingXTref.term === cleanXTrefObj.term &&
|
|
55
|
+
existingXTref.externalSpec === cleanXTrefObj.externalSpec
|
|
56
|
+
);
|
|
57
|
+
|
|
58
|
+
if (existingIndex === -1) {
|
|
59
|
+
if (filename) {
|
|
60
|
+
cleanXTrefObj.sourceFiles = [{ file: filename, type: referenceType }];
|
|
61
|
+
}
|
|
62
|
+
allXTrefs.xtrefs.push(cleanXTrefObj);
|
|
63
|
+
return allXTrefs;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
if (!filename) {
|
|
67
|
+
return allXTrefs;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const existingXTref = allXTrefs.xtrefs[existingIndex];
|
|
71
|
+
|
|
72
|
+
// Update the existing entry with new data to handle changes in aliases
|
|
73
|
+
// Preserve the existing sourceFiles array and extend it with new entries
|
|
74
|
+
const existingSourceFiles = existingXTref.sourceFiles || [];
|
|
75
|
+
|
|
76
|
+
// Smart merge: Priority is given to tref over xref for properties like aliases
|
|
77
|
+
// If the new reference is an xref and existing has tref data, preserve tref properties
|
|
78
|
+
const hasExistingTref = existingSourceFiles.some(sf => sf.type === 'tref');
|
|
79
|
+
const isNewXref = referenceType === 'xref';
|
|
80
|
+
|
|
81
|
+
if (hasExistingTref && isNewXref) {
|
|
82
|
+
// Don't overwrite tref data with xref data - just merge xref aliases
|
|
83
|
+
// Keep existing tref aliases and properties, but add xref aliases
|
|
84
|
+
if (cleanXTrefObj.xrefAliases && cleanXTrefObj.xrefAliases.length > 0) {
|
|
85
|
+
existingXTref.xrefAliases = cleanXTrefObj.xrefAliases;
|
|
86
|
+
existingXTref.firstXrefAlias = cleanXTrefObj.firstXrefAlias;
|
|
87
|
+
}
|
|
88
|
+
} else if (!hasExistingTref && isNewXref) {
|
|
89
|
+
// New xref with no existing tref - initialize empty tref arrays
|
|
90
|
+
Object.assign(existingXTref, cleanXTrefObj);
|
|
91
|
+
if (!existingXTref.trefAliases) {
|
|
92
|
+
existingXTref.trefAliases = [];
|
|
93
|
+
}
|
|
94
|
+
} else {
|
|
95
|
+
// Update with new tref data (either new tref, or updating existing tref)
|
|
96
|
+
Object.assign(existingXTref, cleanXTrefObj);
|
|
97
|
+
|
|
98
|
+
// Ensure xref arrays exist if not present in new object
|
|
99
|
+
if (!cleanXTrefObj.xrefAliases && !existingXTref.xrefAliases) {
|
|
100
|
+
existingXTref.xrefAliases = [];
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Handle properties that should be removed when not present in the new object
|
|
104
|
+
if (!cleanXTrefObj.hasOwnProperty('firstTrefAlias') && existingXTref.hasOwnProperty('firstTrefAlias')) {
|
|
105
|
+
delete existingXTref.firstTrefAlias;
|
|
106
|
+
}
|
|
107
|
+
if (!cleanXTrefObj.hasOwnProperty('firstXrefAlias') && existingXTref.hasOwnProperty('firstXrefAlias')) {
|
|
108
|
+
delete existingXTref.firstXrefAlias;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Restore and update the sourceFiles array
|
|
113
|
+
existingXTref.sourceFiles = existingSourceFiles;
|
|
114
|
+
|
|
115
|
+
if (filename) {
|
|
116
|
+
const newEntry = { file: filename, type: referenceType };
|
|
117
|
+
const alreadyTracked = existingXTref.sourceFiles.some(entry =>
|
|
118
|
+
entry.file === filename && entry.type === referenceType
|
|
119
|
+
);
|
|
120
|
+
|
|
121
|
+
if (!alreadyTracked) {
|
|
122
|
+
existingXTref.sourceFiles.push(newEntry);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return allXTrefs;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Adds new references discovered in markdown to an aggregated collection.
|
|
131
|
+
* This function uses external parsing to maintain separation of concerns
|
|
132
|
+
* between parsing and collection logic.
|
|
133
|
+
*
|
|
134
|
+
* @param {string} markdownContent - Markdown text to scan.
|
|
135
|
+
* @param {{ xtrefs: Array<object> }} allXTrefs - Aggregated reference collection.
|
|
136
|
+
* @param {string|null} filename - Originating filename for bookkeeping.
|
|
137
|
+
* @param {function} processXTrefObject - Parsing function for xtref strings.
|
|
138
|
+
* @returns {{ xtrefs: Array<object> }} Updated reference collection.
|
|
139
|
+
*/
|
|
140
|
+
function addNewXTrefsFromMarkdown(markdownContent, allXTrefs, filename = null, processXTrefObject) {
|
|
141
|
+
if (!processXTrefObject) {
|
|
142
|
+
throw new Error('processXTrefObject function is required. Import from template-tag-parser.');
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
const regex = externalReferences.allXTrefs;
|
|
146
|
+
|
|
147
|
+
if (!regex.test(markdownContent)) {
|
|
148
|
+
return allXTrefs;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
const xtrefs = markdownContent.match(regex) || [];
|
|
152
|
+
|
|
153
|
+
xtrefs.forEach(rawXtref => {
|
|
154
|
+
const xtrefObject = processXTrefObject(rawXtref);
|
|
155
|
+
addXtrefToCollection(xtrefObject, allXTrefs, filename);
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
return allXTrefs;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
module.exports = {
|
|
162
|
+
isXTrefInMarkdown,
|
|
163
|
+
isXTrefInAnyFile,
|
|
164
|
+
addXtrefToCollection,
|
|
165
|
+
addNewXTrefsFromMarkdown
|
|
166
|
+
};
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Handles the core rendering logic for a single spec.
|
|
3
|
+
* Processes markdown files, renders HTML, applies fixes, and writes output.
|
|
4
|
+
* It takes shared variables and config as parameters to maintain modularity.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const fs = require('fs-extra');
|
|
8
|
+
const path = require('path');
|
|
9
|
+
|
|
10
|
+
const { fetchExternalSpecs, validateReferences, mergeXrefTermsIntoAllXTrefs } = require('../references/external-references-service.js');
|
|
11
|
+
const { processEscapedTags, restoreEscapedTags } = require('../preprocessing/escape-placeholder-utils.js');
|
|
12
|
+
const { sortDefinitionTermsInHtml, fixDefinitionListStructure } = require('../postprocessing/definition-list-postprocessor.js');
|
|
13
|
+
const { getGithubRepoInfo } = require('../../utils/git-info.js');
|
|
14
|
+
const { templateTags } = require('../../utils/regex-patterns.js');
|
|
15
|
+
|
|
16
|
+
const { createScriptElementWithXTrefDataForEmbeddingInHtml, applyReplacers } = require('./render-utils.js');
|
|
17
|
+
|
|
18
|
+
async function render(spec, assets, sharedVars, config, template, assetsGlobal, Logger, md, externalSpecsList) {
|
|
19
|
+
let { externalReferences } = sharedVars;
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
global.noticeTitles = {};
|
|
23
|
+
global.specGroups = {};
|
|
24
|
+
Logger.info('Rendering: ' + spec.title);
|
|
25
|
+
|
|
26
|
+
function interpolate(template, variables) {
|
|
27
|
+
return template.replace(templateTags.variableInterpolation, (match, p1) => variables[p1.trim()]);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Add current date in 'DD Month YYYY' format for template injection
|
|
31
|
+
const date = new Date();
|
|
32
|
+
const day = String(date.getDate()).padStart(2, '0');
|
|
33
|
+
const month = date.toLocaleString('en-US', { month: 'long' });
|
|
34
|
+
const year = date.getFullYear();
|
|
35
|
+
const currentDate = `${day} ${month} ${year}`;
|
|
36
|
+
|
|
37
|
+
// Add universal timestamp in ISO 8601 format for template injection
|
|
38
|
+
const universalTimestamp = date.toISOString();
|
|
39
|
+
|
|
40
|
+
// Read all markdown files into an array
|
|
41
|
+
const docs = await Promise.all(
|
|
42
|
+
(spec.markdown_paths || ['spec.md']).map(_path =>
|
|
43
|
+
fs.readFile(spec.spec_directory + _path, 'utf8')
|
|
44
|
+
)
|
|
45
|
+
);
|
|
46
|
+
|
|
47
|
+
const features = (({ source, logo }) => ({ source, logo }))(spec);
|
|
48
|
+
if (spec.external_specs && !externalReferences) {
|
|
49
|
+
// Fetch xref terms and merge them into allXTrefs instead of creating DOM HTML
|
|
50
|
+
const xrefTerms = await fetchExternalSpecs(spec);
|
|
51
|
+
|
|
52
|
+
// Define paths for the xtrefs data files
|
|
53
|
+
const outputPathJSON = path.join('.cache', 'xtrefs-data.json');
|
|
54
|
+
const outputPathJS = path.join('.cache', 'xtrefs-data.js');
|
|
55
|
+
|
|
56
|
+
// Merge xref terms into the unified allXTrefs structure
|
|
57
|
+
await mergeXrefTermsIntoAllXTrefs(xrefTerms, outputPathJSON, outputPathJS);
|
|
58
|
+
|
|
59
|
+
// Set flag to indicate external references have been processed
|
|
60
|
+
externalReferences = true; // Changed from HTML array to boolean flag
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Find the index of the terms-and-definitions-intro.md file
|
|
64
|
+
const termsIndex = (spec.markdown_paths || ['spec.md']).indexOf('terms-and-definitions-intro.md');
|
|
65
|
+
if (termsIndex !== -1) {
|
|
66
|
+
// Append the HTML string to the content of terms-and-definitions-intro.md. This string is used to create a div that is used to insert an alphabet index, and a div that is used as the starting point of the terminology index. The newlines are essential for the correct rendering of the markdown.
|
|
67
|
+
docs[termsIndex] += '\n\n<div id="terminology-section-start"></div>\n\n';
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Set up file tracking for definitions before rendering
|
|
71
|
+
for (let i = 0; i < docs.length; i++) {
|
|
72
|
+
global.currentFile = spec.markdown_paths[i] || 'unknown';
|
|
73
|
+
docs[i] = `<!-- file: ${global.currentFile} -->\n${docs[i]}`;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Concatenate all file contents into one string, separated by newlines
|
|
77
|
+
let doc = docs.join("\n");
|
|
78
|
+
|
|
79
|
+
// Handles backslash escape mechanism for substitution tags
|
|
80
|
+
// Phase 1: Pre-processing - Handle escaped tags
|
|
81
|
+
doc = processEscapedTags(doc);
|
|
82
|
+
|
|
83
|
+
// Handles backslash escape mechanism for substitution tags
|
|
84
|
+
// Phase 2: Tag Processing - Apply normal substitution logic
|
|
85
|
+
doc = applyReplacers(doc);
|
|
86
|
+
|
|
87
|
+
md[spec.katex ? "enable" : "disable"](['math_block', 'math_inline']);
|
|
88
|
+
|
|
89
|
+
// `render` is the rendered HTML
|
|
90
|
+
let renderedHtml = md.render(doc);
|
|
91
|
+
|
|
92
|
+
// Apply the fix for broken definition list structures
|
|
93
|
+
renderedHtml = fixDefinitionListStructure(renderedHtml);
|
|
94
|
+
|
|
95
|
+
// Sort definition terms case-insensitively before final rendering
|
|
96
|
+
renderedHtml = sortDefinitionTermsInHtml(renderedHtml);
|
|
97
|
+
|
|
98
|
+
// Handles backslash escape mechanism for substitution tags
|
|
99
|
+
// Phase 3: Post-processing - Restore escaped sequences as literals
|
|
100
|
+
renderedHtml = restoreEscapedTags(renderedHtml);
|
|
101
|
+
|
|
102
|
+
// External references are now stored in allXTrefs instead of DOM HTML
|
|
103
|
+
// No longer need to inject external references HTML into the template
|
|
104
|
+
|
|
105
|
+
const templateInterpolated = interpolate(template, {
|
|
106
|
+
title: spec.title,
|
|
107
|
+
description: spec.description,
|
|
108
|
+
author: spec.author,
|
|
109
|
+
toc: global.toc,
|
|
110
|
+
render: renderedHtml,
|
|
111
|
+
assetsHead: assets.head,
|
|
112
|
+
assetsBody: assets.body,
|
|
113
|
+
assetsSvg: assets.svg,
|
|
114
|
+
features: Object.keys(features).join(' '),
|
|
115
|
+
externalReferences: '', // No longer inject DOM HTML - xrefs are in allXTrefs
|
|
116
|
+
xtrefsData: createScriptElementWithXTrefDataForEmbeddingInHtml(),
|
|
117
|
+
specLogo: spec.logo,
|
|
118
|
+
specFavicon: spec.favicon,
|
|
119
|
+
specLogoLink: spec.logo_link,
|
|
120
|
+
spec: JSON.stringify(spec),
|
|
121
|
+
externalSpecsList: externalSpecsList,
|
|
122
|
+
currentDate: currentDate,
|
|
123
|
+
universalTimestamp: universalTimestamp,
|
|
124
|
+
githubRepoInfo: getGithubRepoInfo(spec)
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
const outputPath = path.join(spec.destination, 'index.html');
|
|
128
|
+
Logger.info('Attempting to write to:', outputPath);
|
|
129
|
+
|
|
130
|
+
// Use promisified version instead of callback
|
|
131
|
+
await fs.promises.writeFile(outputPath, templateInterpolated, 'utf8');
|
|
132
|
+
Logger.success(`Successfully wrote ${outputPath}`);
|
|
133
|
+
|
|
134
|
+
validateReferences(global.references, global.definitions, renderedHtml);
|
|
135
|
+
global.references = [];
|
|
136
|
+
global.definitions = [];
|
|
137
|
+
} catch (e) {
|
|
138
|
+
Logger.error("Render error: " + e.message);
|
|
139
|
+
throw e;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Update sharedVars
|
|
143
|
+
sharedVars.externalReferences = externalReferences;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
module.exports = { render };
|