@govtechsg/oobee 0.10.83 → 0.10.85
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -1
- package/dist/cli.js +7 -6
- package/dist/constants/common.js +13 -1
- package/dist/crawlers/crawlDomain.js +220 -120
- package/dist/crawlers/crawlIntelligentSitemap.js +22 -7
- package/dist/crawlers/custom/utils.js +81 -40
- package/dist/crawlers/runCustom.js +13 -5
- package/dist/mergeAxeResults/itemReferences.js +55 -0
- package/dist/mergeAxeResults/jsonArtifacts.js +335 -0
- package/dist/mergeAxeResults/scanPages.js +159 -0
- package/dist/mergeAxeResults/sentryTelemetry.js +152 -0
- package/dist/mergeAxeResults/types.js +1 -0
- package/dist/mergeAxeResults/writeCsv.js +125 -0
- package/dist/mergeAxeResults/writeScanDetailsCsv.js +35 -0
- package/dist/mergeAxeResults/writeSitemap.js +10 -0
- package/dist/mergeAxeResults.js +64 -950
- package/dist/proxyService.js +90 -5
- package/dist/utils.js +20 -7
- package/package.json +6 -6
- package/src/cli.ts +20 -15
- package/src/constants/common.ts +13 -1
- package/src/crawlers/crawlDomain.ts +248 -137
- package/src/crawlers/crawlIntelligentSitemap.ts +22 -8
- package/src/crawlers/custom/utils.ts +103 -48
- package/src/crawlers/runCustom.ts +18 -5
- package/src/mergeAxeResults/itemReferences.ts +62 -0
- package/src/mergeAxeResults/jsonArtifacts.ts +451 -0
- package/src/mergeAxeResults/scanPages.ts +207 -0
- package/src/mergeAxeResults/sentryTelemetry.ts +183 -0
- package/src/mergeAxeResults/types.ts +99 -0
- package/src/mergeAxeResults/writeCsv.ts +145 -0
- package/src/mergeAxeResults/writeScanDetailsCsv.ts +51 -0
- package/src/mergeAxeResults/writeSitemap.ts +13 -0
- package/src/mergeAxeResults.ts +125 -1344
- package/src/proxyService.ts +96 -4
- package/src/utils.ts +19 -7
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import { createWriteStream } from 'fs';
|
|
2
|
+
import { AsyncParser } from '@json2csv/node';
|
|
3
|
+
import { a11yRuleShortDescriptionMap } from '../constants/constants.js';
|
|
4
|
+
const writeCsv = async (allIssues, storagePath) => {
|
|
5
|
+
const csvOutput = createWriteStream(`${storagePath}/report.csv`, { encoding: 'utf8' });
|
|
6
|
+
const formatPageViolation = (pageNum) => {
|
|
7
|
+
if (pageNum < 0)
|
|
8
|
+
return 'Document';
|
|
9
|
+
return `Page ${pageNum}`;
|
|
10
|
+
};
|
|
11
|
+
// transform allIssues into the form:
|
|
12
|
+
// [['mustFix', rule1], ['mustFix', rule2], ['goodToFix', rule3], ...]
|
|
13
|
+
const getRulesByCategory = (issues) => {
|
|
14
|
+
return Object.entries(issues.items)
|
|
15
|
+
.filter(([category]) => category !== 'passed')
|
|
16
|
+
.reduce((prev, [category, value]) => {
|
|
17
|
+
const rulesEntries = Object.entries(value.rules);
|
|
18
|
+
rulesEntries.forEach(([, ruleInfo]) => {
|
|
19
|
+
prev.push([category, ruleInfo]);
|
|
20
|
+
});
|
|
21
|
+
return prev;
|
|
22
|
+
}, [])
|
|
23
|
+
.sort((a, b) => {
|
|
24
|
+
// sort rules according to severity, then ruleId
|
|
25
|
+
const compareCategory = -a[0].localeCompare(b[0]);
|
|
26
|
+
return compareCategory === 0 ? a[1].rule.localeCompare(b[1].rule) : compareCategory;
|
|
27
|
+
});
|
|
28
|
+
};
|
|
29
|
+
const flattenRule = (catAndRule) => {
|
|
30
|
+
const [severity, rule] = catAndRule;
|
|
31
|
+
const results = [];
|
|
32
|
+
const { rule: issueId, description: issueDescription, axeImpact, conformance, pagesAffected, helpUrl: learnMore, } = rule;
|
|
33
|
+
// format clauses as a string
|
|
34
|
+
const wcagConformance = conformance.join(',');
|
|
35
|
+
pagesAffected.sort((a, b) => a.url.localeCompare(b.url));
|
|
36
|
+
pagesAffected.forEach(affectedPage => {
|
|
37
|
+
const { url, items } = affectedPage;
|
|
38
|
+
items.forEach(item => {
|
|
39
|
+
const { html, message, xpath } = item;
|
|
40
|
+
const page = item.page;
|
|
41
|
+
const howToFix = message.replace(/(\r\n|\n|\r)/g, '\\n'); // preserve newlines as \n
|
|
42
|
+
const violation = html || formatPageViolation(page); // page is a number, not a string
|
|
43
|
+
const context = violation.replace(/(\r\n|\n|\r)/g, ''); // remove newlines
|
|
44
|
+
results.push({
|
|
45
|
+
customFlowLabel: allIssues.customFlowLabel || '',
|
|
46
|
+
deviceChosen: allIssues.deviceChosen || '',
|
|
47
|
+
scanCompletedAt: allIssues.endTime ? allIssues.endTime.toISOString() : '',
|
|
48
|
+
severity: severity || '',
|
|
49
|
+
issueId: issueId || '',
|
|
50
|
+
issueDescription: a11yRuleShortDescriptionMap[issueId] || issueDescription || '',
|
|
51
|
+
wcagConformance: wcagConformance || '',
|
|
52
|
+
url: url || '',
|
|
53
|
+
pageTitle: affectedPage.pageTitle || 'No page title',
|
|
54
|
+
context: context || '',
|
|
55
|
+
howToFix: howToFix || '',
|
|
56
|
+
axeImpact: axeImpact || '',
|
|
57
|
+
xpath: xpath || '',
|
|
58
|
+
learnMore: learnMore || '',
|
|
59
|
+
});
|
|
60
|
+
});
|
|
61
|
+
});
|
|
62
|
+
if (results.length === 0)
|
|
63
|
+
return {};
|
|
64
|
+
return results;
|
|
65
|
+
};
|
|
66
|
+
const opts = {
|
|
67
|
+
transforms: [getRulesByCategory, flattenRule],
|
|
68
|
+
fields: [
|
|
69
|
+
'customFlowLabel',
|
|
70
|
+
'deviceChosen',
|
|
71
|
+
'scanCompletedAt',
|
|
72
|
+
'severity',
|
|
73
|
+
'issueId',
|
|
74
|
+
'issueDescription',
|
|
75
|
+
'wcagConformance',
|
|
76
|
+
'url',
|
|
77
|
+
'pageTitle',
|
|
78
|
+
'context',
|
|
79
|
+
'howToFix',
|
|
80
|
+
'axeImpact',
|
|
81
|
+
'xpath',
|
|
82
|
+
'learnMore',
|
|
83
|
+
],
|
|
84
|
+
includeEmptyRows: true,
|
|
85
|
+
};
|
|
86
|
+
// Create the parse stream (it's asynchronous)
|
|
87
|
+
const parser = new AsyncParser(opts);
|
|
88
|
+
const parseStream = parser.parse(allIssues);
|
|
89
|
+
// Pipe JSON2CSV output into the file, but don't end automatically
|
|
90
|
+
parseStream.pipe(csvOutput, { end: false });
|
|
91
|
+
// Once JSON2CSV is done writing all normal rows, append any "pagesNotScanned"
|
|
92
|
+
parseStream.on('end', () => {
|
|
93
|
+
if (allIssues.pagesNotScanned && allIssues.pagesNotScanned.length > 0) {
|
|
94
|
+
csvOutput.write('\n');
|
|
95
|
+
allIssues.pagesNotScanned.forEach(page => {
|
|
96
|
+
const skippedPage = {
|
|
97
|
+
customFlowLabel: allIssues.customFlowLabel || '',
|
|
98
|
+
deviceChosen: allIssues.deviceChosen || '',
|
|
99
|
+
scanCompletedAt: allIssues.endTime ? allIssues.endTime.toISOString() : '',
|
|
100
|
+
severity: 'error',
|
|
101
|
+
issueId: 'error-pages-skipped',
|
|
102
|
+
issueDescription: page.metadata
|
|
103
|
+
? page.metadata
|
|
104
|
+
: 'An unknown error caused the page to be skipped',
|
|
105
|
+
wcagConformance: '',
|
|
106
|
+
url: page.url || page || '',
|
|
107
|
+
pageTitle: 'Error',
|
|
108
|
+
context: '',
|
|
109
|
+
howToFix: '',
|
|
110
|
+
axeImpact: '',
|
|
111
|
+
xpath: '',
|
|
112
|
+
learnMore: '',
|
|
113
|
+
};
|
|
114
|
+
csvOutput.write(`${Object.values(skippedPage).join(',')}\n`);
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
// Now close the CSV file
|
|
118
|
+
csvOutput.end();
|
|
119
|
+
});
|
|
120
|
+
parseStream.on('error', (err) => {
|
|
121
|
+
console.error('Error parsing CSV:', err);
|
|
122
|
+
csvOutput.end();
|
|
123
|
+
});
|
|
124
|
+
};
|
|
125
|
+
export default writeCsv;
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import fs from 'fs-extra';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
const streamEncodedDataToFile = async (inputFilePath, writeStream, appendComma) => {
|
|
4
|
+
const readStream = fs.createReadStream(inputFilePath, { encoding: 'utf8' });
|
|
5
|
+
let isFirstChunk = true;
|
|
6
|
+
for await (const chunk of readStream) {
|
|
7
|
+
if (isFirstChunk) {
|
|
8
|
+
isFirstChunk = false;
|
|
9
|
+
writeStream.write(chunk);
|
|
10
|
+
}
|
|
11
|
+
else {
|
|
12
|
+
writeStream.write(chunk);
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
if (appendComma) {
|
|
16
|
+
writeStream.write(',');
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
const writeScanDetailsCsv = async (scanDataFilePath, scanItemsFilePath, scanItemsSummaryFilePath, storagePath) => {
|
|
20
|
+
const filePath = path.join(storagePath, 'scanDetails.csv');
|
|
21
|
+
const csvWriteStream = fs.createWriteStream(filePath, { encoding: 'utf8' });
|
|
22
|
+
const directoryPath = path.dirname(filePath);
|
|
23
|
+
if (!fs.existsSync(directoryPath)) {
|
|
24
|
+
fs.mkdirSync(directoryPath, { recursive: true });
|
|
25
|
+
}
|
|
26
|
+
csvWriteStream.write('scanData_base64,scanItems_base64,scanItemsSummary_base64\n');
|
|
27
|
+
await streamEncodedDataToFile(scanDataFilePath, csvWriteStream, true);
|
|
28
|
+
await streamEncodedDataToFile(scanItemsFilePath, csvWriteStream, true);
|
|
29
|
+
await streamEncodedDataToFile(scanItemsSummaryFilePath, csvWriteStream, false);
|
|
30
|
+
await new Promise((resolve, reject) => {
|
|
31
|
+
csvWriteStream.end(resolve);
|
|
32
|
+
csvWriteStream.on('error', reject);
|
|
33
|
+
});
|
|
34
|
+
};
|
|
35
|
+
export default writeScanDetailsCsv;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import fs from 'fs-extra';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { consoleLogger } from '../logs.js';
|
|
4
|
+
const writeSitemap = async (pagesScanned, storagePath) => {
|
|
5
|
+
const sitemapPath = path.join(storagePath, 'sitemap.txt');
|
|
6
|
+
const content = pagesScanned.map(p => p.url).join('\n');
|
|
7
|
+
await fs.writeFile(sitemapPath, content, { encoding: 'utf-8' });
|
|
8
|
+
consoleLogger.info(`Sitemap written to ${sitemapPath}`);
|
|
9
|
+
};
|
|
10
|
+
export default writeSitemap;
|