spec-up-t 1.2.8 → 1.3.0-beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/copilot-instructions.md +3 -1
- package/assets/compiled/body.js +5 -4
- package/assets/compiled/head.css +1 -0
- package/assets/compiled/refs.json +1 -1
- package/assets/css/highlight-heading-plus-sibling-nodes.css +6 -0
- package/assets/css/index.css +9 -0
- package/assets/js/addAnchorsToTerms.js +13 -5
- package/assets/js/collapse-definitions.js +0 -6
- package/assets/js/fix-last-dd.js +6 -3
- package/assets/js/highlight-heading-plus-sibling-nodes.js +258 -0
- package/assets/js/insert-trefs.js +32 -28
- package/config/asset-map.json +2 -0
- package/gulpfile.js +8 -2
- package/index.js +45 -241
- package/package.json +2 -1
- package/sonar-project.properties +6 -0
- package/src/collect-external-references.js +22 -11
- package/src/collect-external-references.test.js +153 -2
- package/src/collectExternalReferences/fetchTermsFromIndex.js +65 -110
- package/src/collectExternalReferences/processXTrefsData.js +9 -11
- package/src/create-docx.js +332 -0
- package/src/create-pdf.js +243 -122
- package/src/escape-handler.js +67 -0
- package/src/fix-markdown-files.js +31 -34
- package/src/html-dom-processor.js +290 -0
- package/src/init.js +3 -0
- package/src/install-from-boilerplate/boilerplate/.github/workflows/menu.yml +4 -13
- package/src/install-from-boilerplate/boilerplate/spec/example-markup-in-markdown.md +0 -1
- package/src/install-from-boilerplate/boilerplate/spec/terms-and-definitions-intro.md +1 -5
- package/src/install-from-boilerplate/config-scripts-keys.js +4 -4
- package/src/install-from-boilerplate/menu.sh +6 -6
- package/src/markdown-it-extensions.js +60 -31
- package/src/references.js +18 -6
- package/templates/template.html +2 -0
- package/test-default-definitions.js +55 -0
- package/test-edge-cases.md +20 -0
- package/test-fix-markdown.js +11 -0
- package/test-no-def.md +22 -0
package/index.js
CHANGED
|
@@ -20,6 +20,8 @@ module.exports = async function (options = {}) {
|
|
|
20
20
|
const { createTermIndex } = require('./src/create-term-index.js');
|
|
21
21
|
createTermIndex();
|
|
22
22
|
|
|
23
|
+
const { processWithEscapes } = require('./src/escape-handler.js');
|
|
24
|
+
|
|
23
25
|
const { insertTermIndex } = require('./src/insert-term-index.js');
|
|
24
26
|
insertTermIndex();
|
|
25
27
|
|
|
@@ -36,6 +38,7 @@ module.exports = async function (options = {}) {
|
|
|
36
38
|
|
|
37
39
|
const { fixMarkdownFiles } = require('./src/fix-markdown-files.js');
|
|
38
40
|
const { processEscapedTags, restoreEscapedTags } = require('./src/escape-mechanism.js');
|
|
41
|
+
const { sortDefinitionTermsInHtml, fixDefinitionListStructure } = require('./src/html-dom-processor.js');
|
|
39
42
|
|
|
40
43
|
let template = fs.readFileSync(path.join(modulePath, 'templates/template.html'), 'utf8');
|
|
41
44
|
let assets = fs.readJsonSync(modulePath + '/config/asset-map.json');
|
|
@@ -69,7 +72,13 @@ module.exports = async function (options = {}) {
|
|
|
69
72
|
linkify: true,
|
|
70
73
|
typographer: true
|
|
71
74
|
})
|
|
75
|
+
/*
|
|
76
|
+
Configures a Markdown-it plugin by passing it an array of extension objects, each responsible for handling specific custom syntax in Markdown documents.
|
|
77
|
+
*/
|
|
72
78
|
.use(require('./src/markdown-it-extensions.js'), [
|
|
79
|
+
/*
|
|
80
|
+
The first extension (= the first configuration object = the first element of the array) focuses on terminology-related constructs, using a filter to match types against a regular expression (terminologyRegex).
|
|
81
|
+
*/
|
|
73
82
|
{
|
|
74
83
|
filter: type => type.match(terminologyRegex),
|
|
75
84
|
parse(token, type, primary) {
|
|
@@ -90,7 +99,20 @@ module.exports = async function (options = {}) {
|
|
|
90
99
|
href="${url}#term:${term}">${token.info.args[1]}</a>`;
|
|
91
100
|
}
|
|
92
101
|
else if (type === 'tref') {
|
|
93
|
-
|
|
102
|
+
// Support tref with optional alias: [[tref: spec, term, alias]]
|
|
103
|
+
const termName = token.info.args[1];
|
|
104
|
+
const alias = token.info.args[2]; // Optional alias
|
|
105
|
+
|
|
106
|
+
// Create IDs for both the original term and the alias to enable referencing by either
|
|
107
|
+
const termId = `term:${termName.replace(spaceRegex, '-').toLowerCase()}`;
|
|
108
|
+
const aliasId = alias ? `term:${alias.replace(spaceRegex, '-').toLowerCase()}` : '';
|
|
109
|
+
|
|
110
|
+
// Return the term structure similar to def, so it can be processed by markdown-it's definition list parser
|
|
111
|
+
if (aliasId && alias !== termName) {
|
|
112
|
+
return `<span class="transcluded-xref-term" id="${termId}"><span id="${aliasId}">${termName}</span></span>`;
|
|
113
|
+
} else {
|
|
114
|
+
return `<span class="transcluded-xref-term" id="${termId}">${termName}</span>`;
|
|
115
|
+
}
|
|
94
116
|
}
|
|
95
117
|
else {
|
|
96
118
|
references.push(primary);
|
|
@@ -98,6 +120,9 @@ module.exports = async function (options = {}) {
|
|
|
98
120
|
}
|
|
99
121
|
}
|
|
100
122
|
},
|
|
123
|
+
/*
|
|
124
|
+
The second extension is designed for handling specification references.
|
|
125
|
+
*/
|
|
101
126
|
{
|
|
102
127
|
filter: type => type.match(specNameRegex),
|
|
103
128
|
parse(token, type, name) {
|
|
@@ -172,7 +197,7 @@ module.exports = async function (options = {}) {
|
|
|
172
197
|
.use(require('@traptitech/markdown-it-katex'))
|
|
173
198
|
|
|
174
199
|
const katexRules = ['math_block', 'math_inline'];
|
|
175
|
-
const replacerRegex = /\[\[\s*([^\s
|
|
200
|
+
const replacerRegex = /\[\[\s*([^\s[\]:]+):?\s*([^\]\n]+)?\]\]/img;
|
|
176
201
|
const replacerArgsRegex = /\s*,+\s*/;
|
|
177
202
|
const replacers = [
|
|
178
203
|
{
|
|
@@ -181,36 +206,6 @@ module.exports = async function (options = {}) {
|
|
|
181
206
|
if (!path) return '';
|
|
182
207
|
return fs.readFileSync(path, 'utf8');
|
|
183
208
|
}
|
|
184
|
-
},
|
|
185
|
-
{
|
|
186
|
-
test: 'spec',
|
|
187
|
-
transform: function (originalMatch, type, name) {
|
|
188
|
-
// Simply return an empty string or special marker that won't be treated as a definition term
|
|
189
|
-
// The actual rendering will be handled by the markdown-it extension
|
|
190
|
-
return `<span class="spec-marker" data-spec="${name}"></span>`;
|
|
191
|
-
}
|
|
192
|
-
},
|
|
193
|
-
/**
|
|
194
|
-
* Custom replacer for tref tags that converts them directly to HTML definition term elements.
|
|
195
|
-
*
|
|
196
|
-
* This is a critical part of our solution for fixing transcluded terms in definition lists.
|
|
197
|
-
* When a [[tref:spec,term]] tag is found in the markdown, this replacer transforms it into
|
|
198
|
-
* a proper <dt> element with the appropriate structure before the markdown parser processes it.
|
|
199
|
-
*
|
|
200
|
-
* By directly generating the HTML structure (instead of letting the markdown-it parser
|
|
201
|
-
* handle it later), we prevent the issue where transcluded terms break the definition list.
|
|
202
|
-
*
|
|
203
|
-
* @param {string} originalMatch - The original [[tref:spec,term]] tag found in the markdown
|
|
204
|
-
* @param {string} type - The tag type ('tref')
|
|
205
|
-
* @param {string} spec - The specification identifier (e.g., 'wot-1')
|
|
206
|
-
* @param {string} term - The term to transclude (e.g., 'DAR')
|
|
207
|
-
* @returns {string} - HTML representation of the term as a dt element
|
|
208
|
-
*/
|
|
209
|
-
{
|
|
210
|
-
test: 'tref',
|
|
211
|
-
transform: function (originalMatch, type, spec, term) {
|
|
212
|
-
return `<dt class="transcluded-xref-term"><span class="transcluded-xref-term" id="term:${term.replace(/\s+/g, '-').toLowerCase()}">${term}</span></dt>`;
|
|
213
|
-
}
|
|
214
209
|
}
|
|
215
210
|
];
|
|
216
211
|
|
|
@@ -245,13 +240,16 @@ module.exports = async function (options = {}) {
|
|
|
245
240
|
* @returns {string} - The processed document with tags replaced by their HTML equivalents
|
|
246
241
|
*/
|
|
247
242
|
function applyReplacers(doc) {
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
let
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
243
|
+
// Use the escape handler for three-phase processing
|
|
244
|
+
return processWithEscapes(doc, function(content) {
|
|
245
|
+
return content.replace(replacerRegex, function (match, type, args) {
|
|
246
|
+
let replacer = replacers.find(r => type.trim().match(r.test));
|
|
247
|
+
if (replacer) {
|
|
248
|
+
let argsArray = args ? args.trim().split(replacerArgsRegex) : [];
|
|
249
|
+
return replacer.transform(match, type, ...argsArray);
|
|
250
|
+
}
|
|
251
|
+
return match;
|
|
252
|
+
});
|
|
255
253
|
});
|
|
256
254
|
}
|
|
257
255
|
|
|
@@ -297,208 +295,6 @@ module.exports = async function (options = {}) {
|
|
|
297
295
|
throw Error("katex distribution could not be located");
|
|
298
296
|
}
|
|
299
297
|
|
|
300
|
-
function sortDefinitionTermsInHtml(html) {
|
|
301
|
-
const { JSDOM } = require('jsdom');
|
|
302
|
-
const dom = new JSDOM(html);
|
|
303
|
-
const document = dom.window.document;
|
|
304
|
-
|
|
305
|
-
// Find the terms and definitions list
|
|
306
|
-
const dlElement = document.querySelector('.terms-and-definitions-list');
|
|
307
|
-
if (!dlElement) return html; // If not found, return the original HTML
|
|
308
|
-
|
|
309
|
-
// Collect all dt/dd pairs
|
|
310
|
-
const pairs = [];
|
|
311
|
-
let currentDt = null;
|
|
312
|
-
let currentDds = [];
|
|
313
|
-
|
|
314
|
-
// Process each child of the dl element
|
|
315
|
-
Array.from(dlElement.children).forEach(child => {
|
|
316
|
-
if (child.tagName === 'DT') {
|
|
317
|
-
// If we already have a dt, save the current pair
|
|
318
|
-
if (currentDt) {
|
|
319
|
-
pairs.push({
|
|
320
|
-
dt: currentDt,
|
|
321
|
-
dds: [...currentDds],
|
|
322
|
-
text: currentDt.textContent.trim().toLowerCase() // Use lowercase for sorting
|
|
323
|
-
});
|
|
324
|
-
currentDds = []; // Reset dds for the next dt
|
|
325
|
-
}
|
|
326
|
-
currentDt = child;
|
|
327
|
-
} else if (child.tagName === 'DD' && currentDt) {
|
|
328
|
-
currentDds.push(child);
|
|
329
|
-
}
|
|
330
|
-
});
|
|
331
|
-
|
|
332
|
-
// Add the last pair if exists
|
|
333
|
-
if (currentDt) {
|
|
334
|
-
pairs.push({
|
|
335
|
-
dt: currentDt,
|
|
336
|
-
dds: [...currentDds],
|
|
337
|
-
text: currentDt.textContent.trim().toLowerCase()
|
|
338
|
-
});
|
|
339
|
-
}
|
|
340
|
-
|
|
341
|
-
// Sort pairs case-insensitively
|
|
342
|
-
pairs.sort((a, b) => a.text.localeCompare(b.text));
|
|
343
|
-
|
|
344
|
-
// Clear the dl element
|
|
345
|
-
while (dlElement.firstChild) {
|
|
346
|
-
dlElement.removeChild(dlElement.firstChild);
|
|
347
|
-
}
|
|
348
|
-
|
|
349
|
-
// Re-append elements in sorted order
|
|
350
|
-
pairs.forEach(pair => {
|
|
351
|
-
dlElement.appendChild(pair.dt);
|
|
352
|
-
pair.dds.forEach(dd => {
|
|
353
|
-
dlElement.appendChild(dd);
|
|
354
|
-
});
|
|
355
|
-
});
|
|
356
|
-
|
|
357
|
-
// Return the modified HTML
|
|
358
|
-
return dom.serialize();
|
|
359
|
-
}
|
|
360
|
-
|
|
361
|
-
// Function to fix broken definition list structures
|
|
362
|
-
/**
|
|
363
|
-
* This function repairs broken definition list (dl) structures in the HTML output.
|
|
364
|
-
* Specifically, it addresses the issue where transcluded terms (tref tags) break
|
|
365
|
-
* out of the definition list, creating separate lists instead of a continuous one.
|
|
366
|
-
*
|
|
367
|
-
* The strategy:
|
|
368
|
-
* 1. Find all definition lists (dl elements) in the document
|
|
369
|
-
* 2. Use the dl with class 'terms-and-definitions-list' as the main/target list
|
|
370
|
-
* 3. Process each subsequent node after the this main dl:
|
|
371
|
-
* - If another dl is found, merge all its children into the main dl
|
|
372
|
-
* - If a standalone dt is found, move it into the main dl
|
|
373
|
-
* - Remove any empty paragraphs that might be breaking the list continuity
|
|
374
|
-
*
|
|
375
|
-
* This ensures all terms appear in one continuous definition list,
|
|
376
|
-
* regardless of how they were originally rendered in the markdown.
|
|
377
|
-
*
|
|
378
|
-
* @param {string} html - The HTML content to fix
|
|
379
|
-
* @returns {string} - The fixed HTML content with merged definition lists
|
|
380
|
-
*/
|
|
381
|
-
function fixDefinitionListStructure(html) {
|
|
382
|
-
const { JSDOM } = require('jsdom');
|
|
383
|
-
const dom = new JSDOM(html);
|
|
384
|
-
const document = dom.window.document;
|
|
385
|
-
|
|
386
|
-
// Find all dl elements first
|
|
387
|
-
const allDls = Array.from(document.querySelectorAll('dl'));
|
|
388
|
-
|
|
389
|
-
// Then filter to find the one with the terms-and-definitions-list class
|
|
390
|
-
const dlElements = allDls.filter(dl => {
|
|
391
|
-
return dl?.classList?.contains('terms-and-definitions-list');
|
|
392
|
-
});
|
|
393
|
-
|
|
394
|
-
// Find any transcluded term dt elements anywhere in the document
|
|
395
|
-
const transcludedTerms = document.querySelectorAll('dt.transcluded-xref-term');
|
|
396
|
-
|
|
397
|
-
let mainDl = null;
|
|
398
|
-
|
|
399
|
-
// If we have an existing dl with the terms-and-definitions-list class, use it
|
|
400
|
-
if (dlElements.length > 0) {
|
|
401
|
-
mainDl = dlElements[0]; // Use the first one
|
|
402
|
-
}
|
|
403
|
-
// If we have transcluded terms but no main dl, we need to create one
|
|
404
|
-
else if (transcludedTerms.length > 0) {
|
|
405
|
-
// Create a new dl element with the right class
|
|
406
|
-
mainDl = document.createElement('dl');
|
|
407
|
-
mainDl.className = 'terms-and-definitions-list';
|
|
408
|
-
|
|
409
|
-
// Look for the marker
|
|
410
|
-
const marker = document.getElementById('terminology-section-start');
|
|
411
|
-
|
|
412
|
-
if (marker) {
|
|
413
|
-
// Insert the new dl right after the marker
|
|
414
|
-
if (marker.nextSibling) {
|
|
415
|
-
marker.parentNode.insertBefore(mainDl, marker.nextSibling);
|
|
416
|
-
} else {
|
|
417
|
-
marker.parentNode.appendChild(mainDl);
|
|
418
|
-
}
|
|
419
|
-
} else {
|
|
420
|
-
// Fallback to the original approach if marker isn't found
|
|
421
|
-
const firstTerm = transcludedTerms[0];
|
|
422
|
-
const insertPoint = firstTerm.parentNode;
|
|
423
|
-
insertPoint.parentNode.insertBefore(mainDl, insertPoint);
|
|
424
|
-
}
|
|
425
|
-
}
|
|
426
|
-
|
|
427
|
-
// Safety check - if we still don't have a mainDl, exit early to avoid null reference errors
|
|
428
|
-
if (!mainDl) {
|
|
429
|
-
return html; // Return the original HTML without modifications
|
|
430
|
-
}
|
|
431
|
-
|
|
432
|
-
// Now process all transcluded terms and other dt elements
|
|
433
|
-
transcludedTerms.forEach(dt => {
|
|
434
|
-
// Check if this dt is not already inside our main dl
|
|
435
|
-
if (dt.parentElement !== mainDl) {
|
|
436
|
-
// Move it into the main dl
|
|
437
|
-
const dtClone = dt.cloneNode(true);
|
|
438
|
-
mainDl.appendChild(dtClone);
|
|
439
|
-
dt.parentNode.removeChild(dt);
|
|
440
|
-
}
|
|
441
|
-
});
|
|
442
|
-
|
|
443
|
-
// First special case - handle transcluded-xref-term dt that comes BEFORE the main dl
|
|
444
|
-
const transcludedTermsBeforeMainDl = document.querySelectorAll('dt.transcluded-xref-term');
|
|
445
|
-
|
|
446
|
-
// Special handling for transcluded terms that appear BEFORE the main dl
|
|
447
|
-
transcludedTermsBeforeMainDl.forEach(dt => {
|
|
448
|
-
// Check if this dt is not already inside our main list
|
|
449
|
-
if (dt.parentElement !== mainDl) {
|
|
450
|
-
// This is a dt outside our main list - move it into the main dl
|
|
451
|
-
const dtClone = dt.cloneNode(true);
|
|
452
|
-
mainDl.appendChild(dtClone);
|
|
453
|
-
dt.parentNode.removeChild(dt);
|
|
454
|
-
}
|
|
455
|
-
});
|
|
456
|
-
|
|
457
|
-
// Remove any empty dt elements that may exist
|
|
458
|
-
const emptyDts = mainDl.querySelectorAll('dt:empty');
|
|
459
|
-
emptyDts.forEach(emptyDt => {
|
|
460
|
-
emptyDt.parentNode.removeChild(emptyDt);
|
|
461
|
-
});
|
|
462
|
-
|
|
463
|
-
// Process all subsequent content after the main dl
|
|
464
|
-
let currentNode = mainDl.nextSibling;
|
|
465
|
-
|
|
466
|
-
// Process all subsequent content
|
|
467
|
-
while (currentNode) {
|
|
468
|
-
// Save the next node before potentially modifying the DOM
|
|
469
|
-
const nextNode = currentNode.nextSibling;
|
|
470
|
-
|
|
471
|
-
// Handle different node types
|
|
472
|
-
if (currentNode.nodeType === 1) { // 1 = Element node
|
|
473
|
-
if (currentNode.tagName === 'DL') {
|
|
474
|
-
// Found another definition list - move all its children to the main dl
|
|
475
|
-
while (currentNode.firstChild) {
|
|
476
|
-
mainDl.appendChild(currentNode.firstChild);
|
|
477
|
-
}
|
|
478
|
-
// Remove the now-empty dl element
|
|
479
|
-
currentNode.parentNode.removeChild(currentNode);
|
|
480
|
-
}
|
|
481
|
-
else if (currentNode.tagName === 'DT') {
|
|
482
|
-
// Found a standalone dt - move it into the main dl
|
|
483
|
-
const dtClone = currentNode.cloneNode(true);
|
|
484
|
-
mainDl.appendChild(dtClone);
|
|
485
|
-
currentNode.parentNode.removeChild(currentNode);
|
|
486
|
-
}
|
|
487
|
-
else if (currentNode.tagName === 'P' &&
|
|
488
|
-
(!currentNode.textContent || currentNode.textContent.trim() === '')) {
|
|
489
|
-
// Remove empty paragraphs - these break the list structure
|
|
490
|
-
currentNode.parentNode.removeChild(currentNode);
|
|
491
|
-
}
|
|
492
|
-
}
|
|
493
|
-
|
|
494
|
-
// Move to the next node we saved earlier
|
|
495
|
-
currentNode = nextNode;
|
|
496
|
-
}
|
|
497
|
-
|
|
498
|
-
// Return the fixed HTML
|
|
499
|
-
return dom.serialize();
|
|
500
|
-
}
|
|
501
|
-
|
|
502
298
|
async function render(spec, assets) {
|
|
503
299
|
try {
|
|
504
300
|
noticeTitles = {};
|
|
@@ -509,6 +305,13 @@ module.exports = async function (options = {}) {
|
|
|
509
305
|
return template.replace(/\${(.*?)}/g, (match, p1) => variables[p1.trim()]);
|
|
510
306
|
}
|
|
511
307
|
|
|
308
|
+
// Add current date in 'DD Month YYYY' format for template injection
|
|
309
|
+
const date = new Date();
|
|
310
|
+
const day = String(date.getDate()).padStart(2, '0');
|
|
311
|
+
const month = date.toLocaleString('en-US', { month: 'long' });
|
|
312
|
+
const year = date.getFullYear();
|
|
313
|
+
const currentDate = `${day} ${month} ${year}`;
|
|
314
|
+
|
|
512
315
|
const docs = await Promise.all(
|
|
513
316
|
(spec.markdown_paths || ['spec.md']).map(_path =>
|
|
514
317
|
fs.readFile(spec.spec_directory + _path, 'utf8')
|
|
@@ -574,6 +377,7 @@ module.exports = async function (options = {}) {
|
|
|
574
377
|
specLogoLink: spec.logo_link,
|
|
575
378
|
spec: JSON.stringify(spec),
|
|
576
379
|
externalSpecsList: externalSpecsList,
|
|
380
|
+
currentDate: currentDate
|
|
577
381
|
});
|
|
578
382
|
|
|
579
383
|
const outputPath = path.join(spec.destination, 'index.html');
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "spec-up-t",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.3.0-beta",
|
|
4
4
|
"description": "Technical specification drafting tool that generates rich specification documents from markdown. Forked from https://github.com/decentralized-identity/spec-up by Daniel Buchner (https://github.com/csuwildcat)",
|
|
5
5
|
"main": "./index",
|
|
6
6
|
"repository": {
|
|
@@ -28,6 +28,7 @@
|
|
|
28
28
|
"axios": "^1.7.7",
|
|
29
29
|
"dedent": "^1.5.3",
|
|
30
30
|
"diff": "^7.0.0",
|
|
31
|
+
"docx": "^8.5.0",
|
|
31
32
|
"dotenv": "^16.4.7",
|
|
32
33
|
"find-pkg-dir": "^2.0.0",
|
|
33
34
|
"fs-extra": "^11.3.0",
|
|
@@ -54,26 +54,37 @@ const readlineSync = require('readline-sync');
|
|
|
54
54
|
* @returns {boolean} True if the xtref is found in the content
|
|
55
55
|
*/
|
|
56
56
|
function isXTrefInMarkdown(xtref, markdownContent) {
|
|
57
|
-
|
|
58
|
-
|
|
57
|
+
// Escape special regex characters in externalSpec and term
|
|
58
|
+
const escapedSpec = xtref.externalSpec.replace(/[.*+?^${}()|[\]\\-]/g, '\\$&');
|
|
59
|
+
const escapedTerm = xtref.term.replace(/[.*+?^${}()|[\]\\-]/g, '\\$&');
|
|
60
|
+
|
|
61
|
+
// Check for both the term and with any alias (accounting for spaces)
|
|
62
|
+
const regexTerm = new RegExp(`\\[\\[(?:x|t)ref:\\s*${escapedSpec},\\s*${escapedTerm}(?:,\\s*[^\\]]+)?\\]\\]`, 'g');
|
|
63
|
+
return regexTerm.test(markdownContent);
|
|
59
64
|
}
|
|
60
65
|
|
|
61
66
|
/**
|
|
62
67
|
* Helper function to process an XTref string and return an object.
|
|
63
68
|
*
|
|
64
69
|
* @param {string} xtref - The xtref string to process
|
|
65
|
-
* @returns {Object} An object with externalSpec and
|
|
70
|
+
* @returns {Object} An object with externalSpec, term, and optional alias properties
|
|
66
71
|
*/
|
|
67
72
|
function processXTref(xtref) {
|
|
68
|
-
|
|
73
|
+
const parts = xtref
|
|
69
74
|
.replace(/\[\[(?:xref|tref):/, '')
|
|
70
75
|
.replace(/\]\]/, '')
|
|
71
76
|
.trim()
|
|
72
|
-
.split(
|
|
77
|
+
.split(/,/);
|
|
78
|
+
|
|
73
79
|
const xtrefObject = {
|
|
74
|
-
externalSpec:
|
|
75
|
-
term:
|
|
80
|
+
externalSpec: parts[0].trim(),
|
|
81
|
+
term: parts[1].trim()
|
|
76
82
|
};
|
|
83
|
+
|
|
84
|
+
// Add alias if provided (third parameter)
|
|
85
|
+
if (parts.length > 2 && parts[2].trim()) {
|
|
86
|
+
xtrefObject.alias = parts[2].trim();
|
|
87
|
+
}
|
|
77
88
|
|
|
78
89
|
return xtrefObject;
|
|
79
90
|
}
|
|
@@ -129,6 +140,7 @@ function extendXTrefs(config, xtrefs) {
|
|
|
129
140
|
xtref.owner = urlParts[1];
|
|
130
141
|
xtref.repo = urlParts[2];
|
|
131
142
|
xtref.avatarUrl = repo.avatar_url;
|
|
143
|
+
xtref.ghPageUrl = repo.gh_page; // Add GitHub Pages URL
|
|
132
144
|
}
|
|
133
145
|
});
|
|
134
146
|
|
|
@@ -149,9 +161,8 @@ function extendXTrefs(config, xtrefs) {
|
|
|
149
161
|
*
|
|
150
162
|
* @param {Object} config - The configuration object from specs.json
|
|
151
163
|
* @param {string} GITHUB_API_TOKEN - The GitHub API token
|
|
152
|
-
* @param {Object} options - Configuration options
|
|
153
164
|
*/
|
|
154
|
-
function processExternalReferences(config, GITHUB_API_TOKEN
|
|
165
|
+
function processExternalReferences(config, GITHUB_API_TOKEN) {
|
|
155
166
|
const { processXTrefsData } = require('./collectExternalReferences/processXTrefsData.js');
|
|
156
167
|
const { doesUrlExist } = require('./utils/doesUrlExist.js');
|
|
157
168
|
const externalSpecsRepos = config.specs[0].external_specs;
|
|
@@ -257,7 +268,7 @@ function processExternalReferences(config, GITHUB_API_TOKEN, options) {
|
|
|
257
268
|
// }
|
|
258
269
|
// ]
|
|
259
270
|
|
|
260
|
-
processXTrefsData(allXTrefs, GITHUB_API_TOKEN, outputPathJSON, outputPathJS, outputPathJSTimeStamped
|
|
271
|
+
processXTrefsData(allXTrefs, GITHUB_API_TOKEN, outputPathJSON, outputPathJS, outputPathJSTimeStamped);
|
|
261
272
|
}
|
|
262
273
|
|
|
263
274
|
/**
|
|
@@ -320,7 +331,7 @@ function collectExternalReferences(options = {}) {
|
|
|
320
331
|
return;
|
|
321
332
|
}
|
|
322
333
|
} else {
|
|
323
|
-
processExternalReferences(config, GITHUB_API_TOKEN
|
|
334
|
+
processExternalReferences(config, GITHUB_API_TOKEN);
|
|
324
335
|
}
|
|
325
336
|
}
|
|
326
337
|
|
|
@@ -93,6 +93,46 @@ And here we reference it again using [[tref:kmg-1,authentic-chained-data-contain
|
|
|
93
93
|
### Conclusion
|
|
94
94
|
That's all about these references.`,
|
|
95
95
|
shouldMatch: true
|
|
96
|
+
},
|
|
97
|
+
|
|
98
|
+
// Test cases for aliases - the function should match when the original term exists regardless of alias
|
|
99
|
+
{
|
|
100
|
+
name: 'tref with alias should match based on original term',
|
|
101
|
+
xtref: { externalSpec: 'vlei1', term: 'vlei-ecosystem-governance-framework' },
|
|
102
|
+
markdown: '[[tref:vlei1, vlei-ecosystem-governance-framework, vEGF]]',
|
|
103
|
+
shouldMatch: true
|
|
104
|
+
},
|
|
105
|
+
{
|
|
106
|
+
name: 'xref with alias should match based on original term',
|
|
107
|
+
xtref: { externalSpec: 'vlei1', term: 'vlei-ecosystem-governance-framework' },
|
|
108
|
+
markdown: '[[xref:vlei1, vlei-ecosystem-governance-framework, vEGF]]',
|
|
109
|
+
shouldMatch: true
|
|
110
|
+
},
|
|
111
|
+
{
|
|
112
|
+
name: 'multiple aliases for same term should match',
|
|
113
|
+
xtref: { externalSpec: 'spec1', term: 'long-term-name' },
|
|
114
|
+
markdown: 'Text [[tref:spec1, long-term-name, alias1]] and [[tref:spec1, long-term-name, alias2]]',
|
|
115
|
+
shouldMatch: true
|
|
116
|
+
},
|
|
117
|
+
{
|
|
118
|
+
name: 'tref with spaces in alias should match',
|
|
119
|
+
xtref: { externalSpec: 'spec1', term: 'term1' },
|
|
120
|
+
markdown: '[[tref:spec1, term1, alias with spaces]]',
|
|
121
|
+
shouldMatch: true
|
|
122
|
+
},
|
|
123
|
+
|
|
124
|
+
// Test case for the specific issue with hyphens and spaces
|
|
125
|
+
{
|
|
126
|
+
name: 'external spec and term with hyphens and alias should match',
|
|
127
|
+
xtref: { externalSpec: 'vlei-glossary', term: 'vlei-ecosystem-governance-framework' },
|
|
128
|
+
markdown: '[[tref: vlei-glossary, vlei-ecosystem-governance-framework, vegf]]',
|
|
129
|
+
shouldMatch: true
|
|
130
|
+
},
|
|
131
|
+
{
|
|
132
|
+
name: 'external spec and term with hyphens without alias should match',
|
|
133
|
+
xtref: { externalSpec: 'vlei-glossary', term: 'vlei-ecosystem-governance-framework' },
|
|
134
|
+
markdown: '[[tref: vlei-glossary, vlei-ecosystem-governance-framework]]',
|
|
135
|
+
shouldMatch: true
|
|
96
136
|
}
|
|
97
137
|
];
|
|
98
138
|
|
|
@@ -119,12 +159,15 @@ describe('addNewXTrefsFromMarkdown', () => {
|
|
|
119
159
|
});
|
|
120
160
|
});
|
|
121
161
|
|
|
122
|
-
it('should not add duplicate xtrefs', () => {
|
|
123
|
-
const markdownContent = "Content [[xref:specA, termA]] and again [[xref:specA, termA]]";
|
|
162
|
+
it('should not add duplicate xtrefs with same spec and term but different aliases', () => {
|
|
163
|
+
const markdownContent = "Content [[xref:specA, termA]] and again [[xref:specA, termA, aliasA]]";
|
|
124
164
|
const allXTrefs = { xtrefs: [] };
|
|
125
165
|
const updatedXTrefs = addNewXTrefsFromMarkdown(markdownContent, allXTrefs);
|
|
126
166
|
|
|
127
167
|
expect(updatedXTrefs.xtrefs.length).toBe(1);
|
|
168
|
+
expect(updatedXTrefs.xtrefs[0].term).toBe('termA');
|
|
169
|
+
expect(updatedXTrefs.xtrefs[0].externalSpec).toBe('specA');
|
|
170
|
+
// The first one found will be used (without alias in this case)
|
|
128
171
|
});
|
|
129
172
|
|
|
130
173
|
it('should add multiple distinct xtrefs', () => {
|
|
@@ -149,4 +192,112 @@ describe('addNewXTrefsFromMarkdown', () => {
|
|
|
149
192
|
expect(updatedXTrefs.xtrefs.length).toBe(0);
|
|
150
193
|
});
|
|
151
194
|
|
|
195
|
+
it('should add a new tref with alias from markdown content', () => {
|
|
196
|
+
const markdownContent = "Some text [[tref:specA, termA, aliasA]] more text";
|
|
197
|
+
const allXTrefs = { xtrefs: [] };
|
|
198
|
+
const updatedXTrefs = addNewXTrefsFromMarkdown(markdownContent, allXTrefs);
|
|
199
|
+
|
|
200
|
+
expect(updatedXTrefs.xtrefs.length).toBe(1);
|
|
201
|
+
expect(updatedXTrefs.xtrefs[0]).toEqual({
|
|
202
|
+
externalSpec: 'specA',
|
|
203
|
+
term: 'termA',
|
|
204
|
+
alias: 'aliasA'
|
|
205
|
+
});
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
it('should add a new xref with alias from markdown content', () => {
|
|
209
|
+
const markdownContent = "Some text [[xref:specA, termA, aliasA]] more text";
|
|
210
|
+
const allXTrefs = { xtrefs: [] };
|
|
211
|
+
const updatedXTrefs = addNewXTrefsFromMarkdown(markdownContent, allXTrefs);
|
|
212
|
+
|
|
213
|
+
expect(updatedXTrefs.xtrefs.length).toBe(1);
|
|
214
|
+
expect(updatedXTrefs.xtrefs[0]).toEqual({
|
|
215
|
+
externalSpec: 'specA',
|
|
216
|
+
term: 'termA',
|
|
217
|
+
alias: 'aliasA'
|
|
218
|
+
});
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
it('should handle tref without alias (backwards compatibility)', () => {
|
|
222
|
+
const markdownContent = "Some text [[tref:specA, termA]] more text";
|
|
223
|
+
const allXTrefs = { xtrefs: [] };
|
|
224
|
+
const updatedXTrefs = addNewXTrefsFromMarkdown(markdownContent, allXTrefs);
|
|
225
|
+
|
|
226
|
+
expect(updatedXTrefs.xtrefs.length).toBe(1);
|
|
227
|
+
expect(updatedXTrefs.xtrefs[0]).toEqual({
|
|
228
|
+
externalSpec: 'specA',
|
|
229
|
+
term: 'termA'
|
|
230
|
+
});
|
|
231
|
+
expect(updatedXTrefs.xtrefs[0].alias).toBeUndefined();
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
describe('processXTref', () => {
|
|
238
|
+
const processXTref = require('./collect-external-references').processXTref;
|
|
239
|
+
|
|
240
|
+
it('should process basic xref without alias', () => {
|
|
241
|
+
const xtref = '[[xref:specA,termA]]';
|
|
242
|
+
const result = processXTref(xtref);
|
|
243
|
+
|
|
244
|
+
expect(result).toEqual({
|
|
245
|
+
externalSpec: 'specA',
|
|
246
|
+
term: 'termA'
|
|
247
|
+
});
|
|
248
|
+
});
|
|
249
|
+
|
|
250
|
+
it('should process basic tref without alias', () => {
|
|
251
|
+
const xtref = '[[tref:specA,termA]]';
|
|
252
|
+
const result = processXTref(xtref);
|
|
253
|
+
|
|
254
|
+
expect(result).toEqual({
|
|
255
|
+
externalSpec: 'specA',
|
|
256
|
+
term: 'termA'
|
|
257
|
+
});
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
it('should process tref with alias', () => {
|
|
261
|
+
const xtref = '[[tref:specA,termA,aliasA]]';
|
|
262
|
+
const result = processXTref(xtref);
|
|
263
|
+
|
|
264
|
+
expect(result).toEqual({
|
|
265
|
+
externalSpec: 'specA',
|
|
266
|
+
term: 'termA',
|
|
267
|
+
alias: 'aliasA'
|
|
268
|
+
});
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
it('should process xref with alias', () => {
|
|
272
|
+
const xtref = '[[xref:specA,termA,aliasA]]';
|
|
273
|
+
const result = processXTref(xtref);
|
|
274
|
+
|
|
275
|
+
expect(result).toEqual({
|
|
276
|
+
externalSpec: 'specA',
|
|
277
|
+
term: 'termA',
|
|
278
|
+
alias: 'aliasA'
|
|
279
|
+
});
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
it('should handle spaces in parameters', () => {
|
|
283
|
+
const xtref = '[[tref: specA , termA , aliasA ]]';
|
|
284
|
+
const result = processXTref(xtref);
|
|
285
|
+
|
|
286
|
+
expect(result).toEqual({
|
|
287
|
+
externalSpec: 'specA',
|
|
288
|
+
term: 'termA',
|
|
289
|
+
alias: 'aliasA'
|
|
290
|
+
});
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
it('should ignore empty alias parameter', () => {
|
|
294
|
+
const xtref = '[[tref:specA,termA,]]';
|
|
295
|
+
const result = processXTref(xtref);
|
|
296
|
+
|
|
297
|
+
expect(result).toEqual({
|
|
298
|
+
externalSpec: 'specA',
|
|
299
|
+
term: 'termA'
|
|
300
|
+
});
|
|
301
|
+
expect(result.alias).toBeUndefined();
|
|
302
|
+
});
|
|
152
303
|
});
|