@govtechsg/oobee 0.10.21 → 0.10.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/.github/workflows/docker-test.yml +1 -1
  2. package/DETAILS.md +40 -25
  3. package/Dockerfile +41 -47
  4. package/INSTALLATION.md +1 -1
  5. package/LICENSE-3RD-PARTY-REPORT.txt +448 -0
  6. package/LICENSE-3RD-PARTY.txt +19913 -0
  7. package/README.md +10 -2
  8. package/__mocks__/mock-report.html +1503 -1360
  9. package/package.json +8 -4
  10. package/scripts/decodeUnzipParse.js +29 -0
  11. package/scripts/install_oobee_dependencies.command +2 -2
  12. package/scripts/install_oobee_dependencies.ps1 +3 -3
  13. package/src/cli.ts +3 -2
  14. package/src/combine.ts +1 -0
  15. package/src/constants/cliFunctions.ts +17 -3
  16. package/src/constants/common.ts +29 -5
  17. package/src/constants/constants.ts +28 -26
  18. package/src/constants/questions.ts +4 -1
  19. package/src/crawlers/commonCrawlerFunc.ts +159 -187
  20. package/src/crawlers/crawlDomain.ts +29 -30
  21. package/src/crawlers/crawlIntelligentSitemap.ts +7 -1
  22. package/src/crawlers/crawlLocalFile.ts +1 -1
  23. package/src/crawlers/crawlSitemap.ts +1 -1
  24. package/src/crawlers/custom/flagUnlabelledClickableElements.ts +546 -472
  25. package/src/crawlers/customAxeFunctions.ts +2 -2
  26. package/src/index.ts +0 -2
  27. package/src/mergeAxeResults.ts +608 -220
  28. package/src/screenshotFunc/pdfScreenshotFunc.ts +3 -3
  29. package/src/static/ejs/partials/components/wcagCompliance.ejs +10 -29
  30. package/src/static/ejs/partials/footer.ejs +10 -13
  31. package/src/static/ejs/partials/scripts/categorySummary.ejs +2 -2
  32. package/src/static/ejs/partials/scripts/decodeUnzipParse.ejs +3 -0
  33. package/src/static/ejs/partials/scripts/reportSearch.ejs +1 -0
  34. package/src/static/ejs/partials/scripts/ruleOffcanvas.ejs +54 -52
  35. package/src/static/ejs/partials/styles/styles.ejs +4 -0
  36. package/src/static/ejs/partials/summaryMain.ejs +15 -42
  37. package/src/static/ejs/report.ejs +21 -12
  38. package/src/utils.ts +10 -2
  39. package/src/xPathToCss.ts +186 -0
  40. package/a11y-scan-results.zip +0 -0
  41. package/src/types/xpath-to-css.d.ts +0 -3
@@ -9,9 +9,11 @@ import { fileURLToPath } from 'url';
9
9
  import { chromium } from 'playwright';
10
10
  import { createWriteStream } from 'fs';
11
11
  import { AsyncParser, ParserOptions } from '@json2csv/node';
12
- import { v4 as uuidv4 } from 'uuid';
12
+ import zlib from 'zlib';
13
+ import { Base64Encode } from 'base64-stream';
14
+ import { pipeline } from 'stream/promises';
13
15
  import constants, { ScannerTypes } from './constants/constants.js';
14
- import { urlWithoutAuth } from './constants/common.js';
16
+ import { urlWithoutAuth, prepareData } from './constants/common.js';
15
17
  import {
16
18
  createScreenshotsFolder,
17
19
  getStoragePath,
@@ -34,6 +36,7 @@ export type ItemsInfo = {
34
36
 
35
37
  type PageInfo = {
36
38
  items: ItemsInfo[];
39
+ itemsCount?: number;
37
40
  pageTitle: string;
38
41
  url?: string;
39
42
  pageImagePath?: string;
@@ -51,6 +54,13 @@ export type RuleInfo = {
51
54
  helpUrl: string;
52
55
  };
53
56
 
57
+ type Category = {
58
+ description: string;
59
+ totalItems: number;
60
+ totalRuleIssues: number;
61
+ rules: RuleInfo[];
62
+ };
63
+
54
64
  type AllIssues = {
55
65
  storagePath: string;
56
66
  oobeeAi: {
@@ -61,6 +71,7 @@ type AllIssues = {
61
71
  endTime: Date;
62
72
  urlScanned: string;
63
73
  scanType: string;
74
+ deviceChosen: string;
64
75
  formatAboutStartTime: (dateString: any) => string;
65
76
  isCustomFlow: boolean;
66
77
  viewport: string;
@@ -70,14 +81,16 @@ type AllIssues = {
70
81
  totalPagesNotScanned: number;
71
82
  totalItems: number;
72
83
  topFiveMostIssues: Array<any>;
84
+ topTenPagesWithMostIssues: Array<any>;
85
+ topTenIssues: Array<any>;
73
86
  wcagViolations: string[];
74
87
  customFlowLabel: string;
75
88
  phAppVersion: string;
76
89
  items: {
77
- mustFix: { description: string; totalItems: number; rules: RuleInfo[] };
78
- goodToFix: { description: string; totalItems: number; rules: RuleInfo[] };
79
- needsReview: { description: string; totalItems: number; rules: RuleInfo[] };
80
- passed: { description: string; totalItems: number; rules: RuleInfo[] };
90
+ mustFix: Category;
91
+ goodToFix: Category;
92
+ needsReview: Category;
93
+ passed: Category;
81
94
  };
82
95
  cypressScanAboutMetadata: string;
83
96
  wcagLinks: { [key: string]: string };
@@ -135,7 +148,7 @@ const writeCsv = async (allIssues, storagePath) => {
135
148
  return compareCategory === 0 ? a[1].rule.localeCompare(b[1].rule) : compareCategory;
136
149
  });
137
150
  };
138
- // seems to go into
151
+
139
152
  const flattenRule = catAndRule => {
140
153
  const [severity, rule] = catAndRule;
141
154
  const results = [];
@@ -154,39 +167,49 @@ const writeCsv = async (allIssues, storagePath) => {
154
167
  pagesAffected.sort((a, b) => a.url.localeCompare(b.url));
155
168
  // format clauses as a string
156
169
  const wcagConformance = clausesArr.join(',');
170
+
157
171
  pagesAffected.forEach(affectedPage => {
158
172
  const { url, items } = affectedPage;
159
173
  items.forEach(item => {
160
174
  const { html, page, message, xpath } = item;
161
- const howToFix = message.replace(/(\r\n|\n|\r)/g, ' '); // remove newlines
175
+ const howToFix = message.replace(/(\r\n|\n|\r)/g, '\\n'); // preserve newlines as \n
162
176
  const violation = html || formatPageViolation(page); // page is a number, not a string
163
177
  const context = violation.replace(/(\r\n|\n|\r)/g, ''); // remove newlines
164
178
 
165
179
  results.push({
166
- severity,
167
- issueId,
168
- issueDescription,
169
- wcagConformance,
170
- url,
171
- context,
172
- howToFix,
173
- axeImpact,
174
- xpath,
175
- learnMore,
180
+ customFlowLabel: allIssues.customFlowLabel || '',
181
+ deviceChosen: allIssues.deviceChosen || '',
182
+ scanCompletedAt: allIssues.endTime ? allIssues.endTime.toISOString() : '',
183
+ severity: severity || '',
184
+ issueId: issueId || '',
185
+ issueDescription: issueDescription || '',
186
+ wcagConformance: wcagConformance || '',
187
+ url: url || '',
188
+ pageTitle: affectedPage.pageTitle || 'No page title',
189
+ context: context || '',
190
+ howToFix: howToFix || '',
191
+ axeImpact: axeImpact || '',
192
+ xpath: xpath || '',
193
+ learnMore: learnMore || '',
176
194
  });
177
195
  });
178
196
  });
179
197
  if (results.length === 0) return {};
180
198
  return results;
181
199
  };
200
+
182
201
  const opts: ParserOptions<any, any> = {
183
202
  transforms: [getRulesByCategory, flattenRule],
184
203
  fields: [
204
+ 'customFlowLabel',
205
+ 'deviceChosen',
206
+ 'scanCompletedAt',
185
207
  'severity',
186
208
  'issueId',
187
209
  'issueDescription',
188
210
  'wcagConformance',
189
211
  'url',
212
+ 'pageTitle',
190
213
  'context',
191
214
  'howToFix',
192
215
  'axeImpact',
@@ -195,17 +218,61 @@ const writeCsv = async (allIssues, storagePath) => {
195
218
  ],
196
219
  includeEmptyRows: true,
197
220
  };
221
+
222
+ // Create the parse stream (it’s asynchronous)
198
223
  const parser = new AsyncParser(opts);
199
- parser.parse(allIssues).pipe(csvOutput);
224
+ const parseStream = parser.parse(allIssues);
225
+
226
+ // Pipe JSON2CSV output into the file, but don't end automatically
227
+ parseStream.pipe(csvOutput, { end: false });
228
+
229
+ // Once JSON2CSV is done writing all normal rows, append any "pagesNotScanned"
230
+ parseStream.on('end', () => {
231
+ if (allIssues.pagesNotScanned && allIssues.pagesNotScanned.length > 0) {
232
+ csvOutput.write('\n');
233
+ allIssues.pagesNotScanned.forEach(page => {
234
+ const skippedPage = {
235
+ customFlowLabel: allIssues.customFlowLabel || '',
236
+ deviceChosen: allIssues.deviceChosen || '',
237
+ scanCompletedAt: allIssues.endTime ? allIssues.endTime.toISOString() : '',
238
+ severity: 'error',
239
+ issueId: 'error-pages-skipped',
240
+ issueDescription: 'Page was skipped during the scan',
241
+ wcagConformance: '',
242
+ url: page.url || '',
243
+ pageTitle: '',
244
+ context: '',
245
+ howToFix: '',
246
+ axeImpact: '',
247
+ xpath: '',
248
+ learnMore: '',
249
+ };
250
+ csvOutput.write(`${Object.values(skippedPage).join(',')}\n`);
251
+ });
252
+ }
253
+
254
+ // Now close the CSV file
255
+ csvOutput.end();
256
+ });
257
+
258
+ parseStream.on('error', err => {
259
+ console.error('Error parsing CSV:', err);
260
+ csvOutput.end();
261
+ });
200
262
  };
201
263
 
202
- const compileHtmlWithEJS = async (allIssues, storagePath, htmlFilename = 'report') => {
264
+ const compileHtmlWithEJS = async (
265
+ allIssues: AllIssues,
266
+ storagePath: string,
267
+ htmlFilename = 'report',
268
+ ) => {
203
269
  const htmlFilePath = `${path.join(storagePath, htmlFilename)}.html`;
204
270
  const ejsString = fs.readFileSync(path.join(dirname, './static/ejs/report.ejs'), 'utf-8');
205
271
  const template = ejs.compile(ejsString, {
206
272
  filename: path.join(dirname, './static/ejs/report.ejs'),
207
273
  });
208
- const html = template(allIssues);
274
+
275
+ const html = template({ ...allIssues, storagePath: JSON.stringify(storagePath) });
209
276
  await fs.writeFile(htmlFilePath, html);
210
277
 
211
278
  let htmlContent = await fs.readFile(htmlFilePath, { encoding: 'utf8' });
@@ -213,28 +280,8 @@ const compileHtmlWithEJS = async (allIssues, storagePath, htmlFilename = 'report
213
280
  const headIndex = htmlContent.indexOf('</head>');
214
281
  const injectScript = `
215
282
  <script>
216
- try {
217
- const base64DecodeChunkedWithDecoder = (data, chunkSize = ${BUFFER_LIMIT}) => {
218
- const encodedChunks = data.split('|');
219
- const decoder = new TextDecoder();
220
- const jsonParts = [];
221
-
222
- encodedChunks.forEach(chunk => {
223
- for (let i = 0; i < chunk.length; i += chunkSize) {
224
- const chunkPart = chunk.slice(i, i + chunkSize);
225
- const decodedBytes = Uint8Array.from(atob(chunkPart), c => c.charCodeAt(0));
226
- jsonParts.push(decoder.decode(decodedBytes, { stream: true }));
227
- }
228
- });
229
-
230
- return JSON.parse(jsonParts.join(''));
231
-
232
- };
233
-
234
283
  // IMPORTANT! DO NOT REMOVE ME: Decode the encoded data
235
- } catch (error) {
236
- console.error("Error decoding base64 data:", error);
237
- }
284
+
238
285
  </script>
239
286
  `;
240
287
 
@@ -276,40 +323,32 @@ const splitHtmlAndCreateFiles = async (htmlFilePath, storagePath) => {
276
323
  }
277
324
  };
278
325
 
279
- const writeHTML = async (allIssues, storagePath, htmlFilename = 'report') => {
326
+ const writeHTML = async (
327
+ allIssues: AllIssues,
328
+ storagePath: string,
329
+ htmlFilename = 'report',
330
+ scanDetailsFilePath: string,
331
+ scanItemsFilePath: string,
332
+ ) => {
280
333
  const htmlFilePath = await compileHtmlWithEJS(allIssues, storagePath, htmlFilename);
281
- const inputFilePath = path.resolve(storagePath, 'scanDetails.csv');
282
- const outputFilePath = `${storagePath}/${htmlFilename}.html`;
283
-
284
334
  const { topFilePath, bottomFilePath } = await splitHtmlAndCreateFiles(htmlFilePath, storagePath);
285
-
286
335
  const prefixData = fs.readFileSync(path.join(storagePath, 'report-partial-top.htm.txt'), 'utf-8');
287
336
  const suffixData = fs.readFileSync(
288
337
  path.join(storagePath, 'report-partial-bottom.htm.txt'),
289
338
  'utf-8',
290
339
  );
291
340
 
292
- const outputStream = fs.createWriteStream(outputFilePath, { flags: 'a' });
293
-
294
- outputStream.write(prefixData);
295
-
296
- const inputStream = fs.createReadStream(inputFilePath, {
297
- encoding: 'utf-8',
341
+ const scanDetailsReadStream = fs.createReadStream(scanDetailsFilePath, {
342
+ encoding: 'utf8',
343
+ highWaterMark: BUFFER_LIMIT,
344
+ });
345
+ const scanItemsReadStream = fs.createReadStream(scanItemsFilePath, {
346
+ encoding: 'utf8',
298
347
  highWaterMark: BUFFER_LIMIT,
299
348
  });
300
349
 
301
- let isFirstLine = true;
302
- let lineEndingDetected = false;
303
- let isFirstField = true;
304
- let isWritingFirstDataLine = true;
305
- let buffer = '';
306
-
307
- function flushBuffer() {
308
- if (buffer.length > 0) {
309
- outputStream.write(buffer);
310
- buffer = '';
311
- }
312
- }
350
+ const outputFilePath = `${storagePath}/${htmlFilename}.html`;
351
+ const outputStream = fs.createWriteStream(outputFilePath, { flags: 'a' });
313
352
 
314
353
  const cleanupFiles = async () => {
315
354
  try {
@@ -319,75 +358,54 @@ const writeHTML = async (allIssues, storagePath, htmlFilename = 'report') => {
319
358
  }
320
359
  };
321
360
 
322
- inputStream.on('data', chunk => {
323
- let chunkIndex = 0;
324
-
325
- while (chunkIndex < chunk.length) {
326
- const char = chunk[chunkIndex];
327
-
328
- if (isFirstLine) {
329
- if (char === '\n' || char === '\r') {
330
- lineEndingDetected = true;
331
- } else if (lineEndingDetected) {
332
- if (char !== '\n' && char !== '\r') {
333
- isFirstLine = false;
334
-
335
- if (isWritingFirstDataLine) {
336
- buffer += "scanData = base64DecodeChunkedWithDecoder('";
337
- isWritingFirstDataLine = false;
338
- }
339
- buffer += char;
340
- }
341
- lineEndingDetected = false;
342
- }
343
- } else {
344
- if (char === ',') {
345
- buffer += "')\n\n";
346
- buffer += "scanItems = base64DecodeChunkedWithDecoder('";
347
- isFirstField = false;
348
- } else if (char === '\n' || char === '\r') {
349
- if (!isFirstField) {
350
- buffer += "')\n";
351
- }
352
- } else {
353
- buffer += char;
354
- }
355
-
356
- if (buffer.length >= BUFFER_LIMIT) {
357
- flushBuffer();
358
- }
359
- }
361
+ outputStream.write(prefixData);
360
362
 
361
- chunkIndex++;
362
- }
363
+ // outputStream.write("scanData = decompressJsonObject('");
364
+ outputStream.write(
365
+ "let scanDataPromise = (async () => { console.log('Loading scanData...'); scanData = await decodeUnzipParse('",
366
+ );
367
+ scanDetailsReadStream.pipe(outputStream, { end: false });
368
+
369
+ scanDetailsReadStream.on('end', () => {
370
+ // outputStream.write("')\n\n");
371
+ outputStream.write("'); })();\n\n");
372
+ // outputStream.write("(scanItems = decompressJsonObject('");
373
+ outputStream.write(
374
+ "let scanItemsPromise = (async () => { console.log('Loading scanItems...'); scanItems = await decodeUnzipParse('",
375
+ );
376
+ scanItemsReadStream.pipe(outputStream, { end: false });
363
377
  });
364
378
 
365
- inputStream.on('end', async () => {
366
- if (!isFirstField) {
367
- buffer += "')\n";
368
- }
369
- flushBuffer();
379
+ scanDetailsReadStream.on('error', err => {
380
+ console.error('Read stream error:', err);
381
+ outputStream.end();
382
+ });
370
383
 
384
+ scanItemsReadStream.on('end', () => {
385
+ // outputStream.write("')\n\n");
386
+ outputStream.write("'); })();\n\n");
371
387
  outputStream.write(suffixData);
372
388
  outputStream.end();
373
- console.log('Content appended successfully.');
374
-
375
- await cleanupFiles();
376
389
  });
377
390
 
378
- inputStream.on('error', async err => {
379
- console.error('Error reading input file:', err);
391
+ scanItemsReadStream.on('error', err => {
392
+ console.error('Read stream error:', err);
380
393
  outputStream.end();
381
-
382
- await cleanupFiles();
383
394
  });
384
395
 
396
+ consoleLogger.info('Content appended successfully.');
397
+ await cleanupFiles();
398
+
385
399
  outputStream.on('error', err => {
386
- console.error('Error writing to output file:', err);
400
+ consoleLogger.error('Error writing to output file:', err);
387
401
  });
388
402
  };
389
403
 
390
- const writeSummaryHTML = async (allIssues, storagePath, htmlFilename = 'summary') => {
404
+ const writeSummaryHTML = async (
405
+ allIssues: AllIssues,
406
+ storagePath: string,
407
+ htmlFilename = 'summary',
408
+ ) => {
391
409
  const ejsString = fs.readFileSync(path.join(dirname, './static/ejs/summary.ejs'), 'utf-8');
392
410
  const template = ejs.compile(ejsString, {
393
411
  filename: path.join(dirname, './static/ejs/summary.ejs'),
@@ -396,47 +414,56 @@ const writeSummaryHTML = async (allIssues, storagePath, htmlFilename = 'summary'
396
414
  fs.writeFileSync(`${storagePath}/${htmlFilename}.html`, html);
397
415
  };
398
416
 
399
- function writeFormattedValue(value, writeStream) {
400
- if (typeof value === 'function') {
401
- writeStream.write('null');
402
- } else if (value === undefined) {
403
- writeStream.write('null');
404
- } else if (typeof value === 'string' || typeof value === 'boolean' || typeof value === 'number') {
405
- writeStream.write(JSON.stringify(value));
406
- } else if (value === null) {
407
- writeStream.write('null');
408
- }
409
- }
417
+ const cleanUpJsonFiles = async (filesToDelete: string[]) => {
418
+ consoleLogger.info('Cleaning up JSON files...');
419
+ filesToDelete.forEach(file => {
420
+ fs.unlinkSync(file);
421
+ consoleLogger.info(`Deleted ${file}`);
422
+ });
423
+ };
410
424
 
411
- function serializeObject(obj, writeStream, depth = 0, indent = ' ') {
425
+ function* serializeObject(obj: any, depth = 0, indent = ' ') {
412
426
  const currentIndent = indent.repeat(depth);
413
427
  const nextIndent = indent.repeat(depth + 1);
414
428
 
415
429
  if (obj instanceof Date) {
416
- writeStream.write(JSON.stringify(obj.toISOString()));
417
- } else if (Array.isArray(obj)) {
418
- writeStream.write('[\n');
419
- obj.forEach((item, index) => {
420
- if (index > 0) writeStream.write(',\n');
421
- writeStream.write(nextIndent);
422
- serializeObject(item, writeStream, depth + 1, indent);
423
- });
424
- writeStream.write(`\n${currentIndent}]`);
425
- } else if (typeof obj === 'object' && obj !== null) {
426
- writeStream.write('{\n');
430
+ yield JSON.stringify(obj.toISOString());
431
+ return;
432
+ }
433
+
434
+ if (Array.isArray(obj)) {
435
+ yield '[\n';
436
+ for (let i = 0; i < obj.length; i++) {
437
+ if (i > 0) yield ',\n';
438
+ yield nextIndent;
439
+ yield* serializeObject(obj[i], depth + 1, indent);
440
+ }
441
+ yield `\n${currentIndent}]`;
442
+ return;
443
+ }
444
+
445
+ if (obj !== null && typeof obj === 'object') {
446
+ yield '{\n';
427
447
  const keys = Object.keys(obj);
428
- keys.forEach((key, index) => {
429
- if (index > 0) writeStream.write(',\n');
430
- writeStream.write(`${nextIndent}${JSON.stringify(key)}: `);
431
- serializeObject(obj[key], writeStream, depth + 1, indent);
432
- });
433
- writeStream.write(`\n${currentIndent}}`);
434
- } else {
435
- writeFormattedValue(obj, writeStream);
448
+ for (let i = 0; i < keys.length; i++) {
449
+ const key = keys[i];
450
+ if (i > 0) yield ',\n';
451
+ yield `${nextIndent}${JSON.stringify(key)}: `;
452
+ yield* serializeObject(obj[key], depth + 1, indent);
453
+ }
454
+ yield `\n${currentIndent}}`;
455
+ return;
436
456
  }
457
+
458
+ if (obj === null || typeof obj === 'function' || typeof obj === 'undefined') {
459
+ yield 'null';
460
+ return;
461
+ }
462
+
463
+ yield JSON.stringify(obj);
437
464
  }
438
465
 
439
- function writeLargeJsonToFile(obj, filePath) {
466
+ function writeLargeJsonToFile(obj: object, filePath: string) {
440
467
  return new Promise((resolve, reject) => {
441
468
  const writeStream = fs.createWriteStream(filePath, { encoding: 'utf8' });
442
469
 
@@ -446,74 +473,231 @@ function writeLargeJsonToFile(obj, filePath) {
446
473
  });
447
474
 
448
475
  writeStream.on('finish', () => {
449
- consoleLogger.info('Temporary file written successfully:', filePath);
476
+ consoleLogger.info(`JSON file written successfully: ${filePath}`);
450
477
  resolve(true);
451
478
  });
452
479
 
453
- serializeObject(obj, writeStream);
454
- writeStream.end();
480
+ const generator = serializeObject(obj);
481
+
482
+ function write() {
483
+ let next: any;
484
+ while (!(next = generator.next()).done) {
485
+ if (!writeStream.write(next.value)) {
486
+ writeStream.once('drain', write);
487
+ return;
488
+ }
489
+ }
490
+ writeStream.end();
491
+ }
492
+
493
+ write();
455
494
  });
456
495
  }
457
496
 
458
- const base64Encode = async (data, num, storagePath, generateJsonFiles) => {
459
- try {
460
- const tempFilePath =
461
- num === 1
462
- ? path.join(storagePath, 'scanItems.json')
463
- : num === 2
464
- ? path.join(storagePath, 'scanData.json')
465
- : path.join(storagePath, `${uuidv4()}.json`);
497
+ const writeLargeScanItemsJsonToFile = async (obj: object, filePath: string) => {
498
+ return new Promise((resolve, reject) => {
499
+ const writeStream = fs.createWriteStream(filePath, { flags: 'a', encoding: 'utf8' });
500
+ const writeQueue: string[] = [];
501
+ let isWriting = false;
502
+
503
+ const processNextWrite = async () => {
504
+ if (isWriting || writeQueue.length === 0) return;
505
+
506
+ isWriting = true;
507
+ const data = writeQueue.shift()!;
508
+
509
+ try {
510
+ if (!writeStream.write(data)) {
511
+ await new Promise<void>(resolve => {
512
+ writeStream.once('drain', () => {
513
+ resolve();
514
+ });
515
+ });
516
+ }
517
+ } catch (error) {
518
+ writeStream.destroy(error as Error);
519
+ return;
520
+ }
521
+
522
+ isWriting = false;
523
+ processNextWrite();
524
+ };
466
525
 
467
- await writeLargeJsonToFile(data, tempFilePath);
526
+ const queueWrite = (data: string) => {
527
+ writeQueue.push(data);
528
+ processNextWrite();
529
+ };
530
+
531
+ writeStream.on('error', error => {
532
+ consoleLogger.error(`Error writing object to JSON file: ${error}`);
533
+ reject(error);
534
+ });
468
535
 
469
- const outputFilename = `encoded_${uuidv4()}.txt`;
470
- const outputFilePath = path.join(process.cwd(), outputFilename);
536
+ writeStream.on('finish', () => {
537
+ consoleLogger.info(`JSON file written successfully: ${filePath}`);
538
+ resolve(true);
539
+ });
471
540
 
472
541
  try {
473
- const readStream = fs.createReadStream(tempFilePath, {
474
- encoding: 'utf8',
475
- highWaterMark: BUFFER_LIMIT,
476
- });
477
- const writeStream = fs.createWriteStream(outputFilePath, { encoding: 'utf8' });
542
+ queueWrite('{\n');
543
+ const keys = Object.keys(obj);
544
+
545
+ keys.forEach((key, i) => {
546
+ const value = obj[key];
547
+ queueWrite(` "${key}": {\n`);
548
+
549
+ const { rules, ...otherProperties } = value;
550
+
551
+ // Write other properties
552
+ Object.entries(otherProperties).forEach(([propKey, propValue], j) => {
553
+ const propValueString =
554
+ propValue === null ||
555
+ typeof propValue === 'function' ||
556
+ typeof propValue === 'undefined'
557
+ ? 'null'
558
+ : JSON.stringify(propValue);
559
+ queueWrite(` "${propKey}": ${propValueString}`);
560
+ if (j < Object.keys(otherProperties).length - 1 || (rules && rules.length >= 0)) {
561
+ queueWrite(',\n');
562
+ } else {
563
+ queueWrite('\n');
564
+ }
565
+ });
478
566
 
479
- let previousChunk = null;
567
+ if (rules && Array.isArray(rules)) {
568
+ queueWrite(' "rules": [\n');
569
+
570
+ rules.forEach((rule, j) => {
571
+ queueWrite(' {\n');
572
+ const { pagesAffected, ...otherRuleProperties } = rule;
573
+
574
+ Object.entries(otherRuleProperties).forEach(([ruleKey, ruleValue], k) => {
575
+ const ruleValueString =
576
+ ruleValue === null ||
577
+ typeof ruleValue === 'function' ||
578
+ typeof ruleValue === 'undefined'
579
+ ? 'null'
580
+ : JSON.stringify(ruleValue);
581
+ queueWrite(` "${ruleKey}": ${ruleValueString}`);
582
+ if (k < Object.keys(otherRuleProperties).length - 1 || pagesAffected) {
583
+ queueWrite(',\n');
584
+ } else {
585
+ queueWrite('\n');
586
+ }
587
+ });
588
+
589
+ if (pagesAffected && Array.isArray(pagesAffected)) {
590
+ queueWrite(' "pagesAffected": [\n');
591
+
592
+ pagesAffected.forEach((page, p) => {
593
+ const pageJson = JSON.stringify(page, null, 2)
594
+ .split('\n')
595
+ .map((line, idx) => (idx === 0 ? ` ${line}` : ` ${line}`))
596
+ .join('\n');
597
+
598
+ queueWrite(pageJson);
599
+
600
+ if (p < pagesAffected.length - 1) {
601
+ queueWrite(',\n');
602
+ } else {
603
+ queueWrite('\n');
604
+ }
605
+ });
606
+
607
+ queueWrite(' ]');
608
+ }
480
609
 
481
- for await (const chunk of readStream) {
482
- const encodedChunk = Buffer.from(chunk).toString('base64');
610
+ queueWrite('\n }');
611
+ if (j < rules.length - 1) {
612
+ queueWrite(',\n');
613
+ } else {
614
+ queueWrite('\n');
615
+ }
616
+ });
483
617
 
484
- if (previousChunk !== null) {
485
- // Note: Notice the pipe symbol `|`, it is intended to be here as a delimiter
486
- // for the scenario where there are chunking happens
487
- writeStream.write(`${previousChunk}|`);
618
+ queueWrite(' ]');
488
619
  }
489
620
 
490
- previousChunk = encodedChunk;
491
- }
621
+ queueWrite('\n }');
622
+ if (i < keys.length - 1) {
623
+ queueWrite(',\n');
624
+ } else {
625
+ queueWrite('\n');
626
+ }
627
+ });
492
628
 
493
- if (previousChunk !== null) {
494
- writeStream.write(previousChunk);
495
- }
629
+ queueWrite('}\n');
496
630
 
497
- await new Promise((resolve, reject) => {
498
- writeStream.end(resolve);
499
- writeStream.on('error', reject);
500
- });
631
+ // Ensure all queued writes are processed before ending
632
+ const checkQueueAndEnd = () => {
633
+ if (writeQueue.length === 0 && !isWriting) {
634
+ writeStream.end();
635
+ } else {
636
+ setTimeout(checkQueueAndEnd, 100);
637
+ }
638
+ };
501
639
 
502
- return outputFilePath;
503
- } finally {
504
- if (!generateJsonFiles) {
505
- await fs.promises
506
- .unlink(tempFilePath)
507
- .catch(err => console.error('Temp file delete error:', err));
508
- }
640
+ checkQueueAndEnd();
641
+ } catch (err) {
642
+ writeStream.destroy(err as Error);
643
+ reject(err);
509
644
  }
645
+ });
646
+ };
647
+
648
+ async function compressJsonFileStreaming(inputPath: string, outputPath: string) {
649
+ // Create the read and write streams
650
+ const readStream = fs.createReadStream(inputPath);
651
+ const writeStream = fs.createWriteStream(outputPath);
652
+
653
+ // Create a gzip transform stream
654
+ const gzip = zlib.createGzip();
655
+
656
+ // Create a Base64 transform stream
657
+ const base64Encode = new Base64Encode();
658
+
659
+ // Pipe the streams:
660
+ // read -> gzip -> base64 -> write
661
+ await pipeline(readStream, gzip, base64Encode, writeStream);
662
+ console.log(`File successfully compressed and saved to ${outputPath}`);
663
+ }
664
+
665
+ const writeJsonFileAndCompressedJsonFile = async (
666
+ data: object,
667
+ storagePath: string,
668
+ filename: string,
669
+ ): Promise<{ jsonFilePath: string; base64FilePath: string }> => {
670
+ try {
671
+ consoleLogger.info(`Writing JSON to ${filename}.json`);
672
+ const jsonFilePath = path.join(storagePath, `${filename}.json`);
673
+ if (filename === 'scanItems') {
674
+ await writeLargeScanItemsJsonToFile(data, jsonFilePath);
675
+ } else {
676
+ await writeLargeJsonToFile(data, jsonFilePath);
677
+ }
678
+
679
+ consoleLogger.info(
680
+ `Reading ${filename}.json, gzipping and base64 encoding it into ${filename}.json.gz.b64`,
681
+ );
682
+ const base64FilePath = path.join(storagePath, `${filename}.json.gz.b64`);
683
+ await compressJsonFileStreaming(jsonFilePath, base64FilePath);
684
+
685
+ consoleLogger.info(`Finished compression and base64 encoding for ${filename}`);
686
+ return {
687
+ jsonFilePath,
688
+ base64FilePath,
689
+ };
510
690
  } catch (error) {
511
- console.error('Error encoding data to Base64:', error);
691
+ consoleLogger.error(`Error compressing and encoding ${filename}`);
512
692
  throw error;
513
693
  }
514
694
  };
515
695
 
516
- const streamEncodedDataToFile = async (inputFilePath, writeStream, appendComma) => {
696
+ const streamEncodedDataToFile = async (
697
+ inputFilePath: string,
698
+ writeStream: fs.WriteStream,
699
+ appendComma: boolean,
700
+ ) => {
517
701
  const readStream = fs.createReadStream(inputFilePath, { encoding: 'utf8' });
518
702
  let isFirstChunk = true;
519
703
 
@@ -531,35 +715,120 @@ const streamEncodedDataToFile = async (inputFilePath, writeStream, appendComma)
531
715
  }
532
716
  };
533
717
 
534
- const writeBase64 = async (allIssues, storagePath, generateJsonFiles) => {
718
+ const writeJsonAndBase64Files = async (
719
+ allIssues: AllIssues,
720
+ storagePath: string,
721
+ ): Promise<{
722
+ scanDataJsonFilePath: string;
723
+ scanDataBase64FilePath: string;
724
+ scanItemsJsonFilePath: string;
725
+ scanItemsBase64FilePath: string;
726
+ scanItemsSummaryJsonFilePath: string;
727
+ scanItemsSummaryBase64FilePath: string;
728
+ scanDataJsonFileSize: number;
729
+ scanItemsJsonFileSize: number;
730
+ }> => {
535
731
  const { items, ...rest } = allIssues;
536
- const encodedScanItemsPath = await base64Encode(items, 1, storagePath, generateJsonFiles);
537
- const encodedScanDataPath = await base64Encode(rest, 2, storagePath, generateJsonFiles);
732
+ const { jsonFilePath: scanDataJsonFilePath, base64FilePath: scanDataBase64FilePath } =
733
+ await writeJsonFileAndCompressedJsonFile(rest, storagePath, 'scanData');
734
+ const { jsonFilePath: scanItemsJsonFilePath, base64FilePath: scanItemsBase64FilePath } =
735
+ await writeJsonFileAndCompressedJsonFile(items, storagePath, 'scanItems');
736
+
737
+ // scanItemsSummary
738
+ // the below mutates the original items object, since it is expensive to clone
739
+ items.mustFix.rules.forEach(rule => {
740
+ rule.pagesAffected.forEach(page => {
741
+ page.itemsCount = page.items.length;
742
+ page.items = [];
743
+ });
744
+ });
745
+ items.goodToFix.rules.forEach(rule => {
746
+ rule.pagesAffected.forEach(page => {
747
+ page.itemsCount = page.items.length;
748
+ page.items = [];
749
+ });
750
+ });
751
+ items.needsReview.rules.forEach(rule => {
752
+ rule.pagesAffected.forEach(page => {
753
+ page.itemsCount = page.items.length;
754
+ page.items = [];
755
+ });
756
+ });
757
+ items.passed.rules.forEach(rule => {
758
+ rule.pagesAffected.forEach(page => {
759
+ page.itemsCount = page.items.length;
760
+ page.items = [];
761
+ });
762
+ });
763
+
764
+ items.mustFix.totalRuleIssues = items.mustFix.rules.length;
765
+ items.goodToFix.totalRuleIssues = items.goodToFix.rules.length;
766
+ items.needsReview.totalRuleIssues = items.needsReview.rules.length;
767
+ items.passed.totalRuleIssues = items.passed.rules.length;
768
+
769
+ const {
770
+ pagesScanned,
771
+ topTenPagesWithMostIssues,
772
+ pagesNotScanned,
773
+ wcagLinks,
774
+ wcagPassPercentage,
775
+ totalPagesScanned,
776
+ totalPagesNotScanned,
777
+ topTenIssues,
778
+ } = rest;
779
+
780
+ const summaryItems = {
781
+ ...items,
782
+ pagesScanned,
783
+ topTenPagesWithMostIssues,
784
+ pagesNotScanned,
785
+ wcagLinks,
786
+ wcagPassPercentage,
787
+ totalPagesScanned,
788
+ totalPagesNotScanned,
789
+ topTenIssues,
790
+ };
538
791
 
792
+ const {
793
+ jsonFilePath: scanItemsSummaryJsonFilePath,
794
+ base64FilePath: scanItemsSummaryBase64FilePath,
795
+ } = await writeJsonFileAndCompressedJsonFile(summaryItems, storagePath, 'scanItemsSummary');
796
+
797
+ return {
798
+ scanDataJsonFilePath,
799
+ scanDataBase64FilePath,
800
+ scanItemsJsonFilePath,
801
+ scanItemsBase64FilePath,
802
+ scanItemsSummaryJsonFilePath,
803
+ scanItemsSummaryBase64FilePath,
804
+ scanDataJsonFileSize: fs.statSync(scanDataJsonFilePath).size,
805
+ scanItemsJsonFileSize: fs.statSync(scanItemsJsonFilePath).size,
806
+ };
807
+ };
808
+
809
+ const writeScanDetailsCsv = async (
810
+ scanDataFilePath: string,
811
+ scanItemsFilePath: string,
812
+ scanItemsSummaryFilePath: string,
813
+ storagePath: string,
814
+ ) => {
539
815
  const filePath = path.join(storagePath, 'scanDetails.csv');
816
+ const csvWriteStream = fs.createWriteStream(filePath, { encoding: 'utf8' });
540
817
  const directoryPath = path.dirname(filePath);
541
818
 
542
819
  if (!fs.existsSync(directoryPath)) {
543
820
  fs.mkdirSync(directoryPath, { recursive: true });
544
821
  }
545
822
 
546
- const csvWriteStream = fs.createWriteStream(filePath, { encoding: 'utf8' });
547
-
548
- csvWriteStream.write('scanData_base64,scanItems_base64\n');
549
- await streamEncodedDataToFile(encodedScanDataPath, csvWriteStream, true);
550
- await streamEncodedDataToFile(encodedScanItemsPath, csvWriteStream, false);
823
+ csvWriteStream.write('scanData_base64,scanItems_base64,scanItemsSummary_base64\n');
824
+ await streamEncodedDataToFile(scanDataFilePath, csvWriteStream, true);
825
+ await streamEncodedDataToFile(scanItemsFilePath, csvWriteStream, true);
826
+ await streamEncodedDataToFile(scanItemsSummaryFilePath, csvWriteStream, false);
551
827
 
552
828
  await new Promise((resolve, reject) => {
553
829
  csvWriteStream.end(resolve);
554
830
  csvWriteStream.on('error', reject);
555
831
  });
556
-
557
- await fs.promises
558
- .unlink(encodedScanDataPath)
559
- .catch(err => console.error('Encoded file delete error:', err));
560
- await fs.promises
561
- .unlink(encodedScanItemsPath)
562
- .catch(err => console.error('Encoded file delete error:', err));
563
832
  };
564
833
 
565
834
  let browserChannel = 'chrome';
@@ -572,12 +841,13 @@ if (os.platform() === 'linux') {
572
841
  browserChannel = 'chromium';
573
842
  }
574
843
 
575
- const writeSummaryPdf = async (storagePath, pagesScanned, filename = 'summary') => {
844
+ const writeSummaryPdf = async (storagePath: string, pagesScanned: number, filename = 'summary') => {
576
845
  const htmlFilePath = `${storagePath}/${filename}.html`;
577
846
  const fileDestinationPath = `${storagePath}/${filename}.pdf`;
578
847
  const browser = await chromium.launch({
579
- headless: true,
848
+ headless: false,
580
849
  channel: browserChannel,
850
+ args: ['--headless=new', '--no-sandbox'],
581
851
  });
582
852
 
583
853
  const context = await browser.newContext({
@@ -624,7 +894,12 @@ const pushResults = async (pageResults, allIssues, isCustomFlow) => {
624
894
  Object.keys(pageResults.goodToFix.rules).forEach(k => totalIssuesInPage.add(k));
625
895
  Object.keys(pageResults.needsReview.rules).forEach(k => totalIssuesInPage.add(k));
626
896
 
627
- allIssues.topFiveMostIssues.push({ url, pageTitle, totalIssues: totalIssuesInPage.size });
897
+ allIssues.topFiveMostIssues.push({
898
+ url,
899
+ pageTitle,
900
+ totalIssues: totalIssuesInPage.size,
901
+ totalOccurrences: 0,
902
+ });
628
903
 
629
904
  ['mustFix', 'goodToFix', 'needsReview', 'passed'].forEach(category => {
630
905
  if (!pageResults[category]) return;
@@ -706,9 +981,47 @@ const pushResults = async (pageResults, allIssues, isCustomFlow) => {
706
981
  });
707
982
  };
708
983
 
984
+ const getTopTenIssues = allIssues => {
985
+ const categories = ['mustFix', 'goodToFix'];
986
+ const rulesWithCounts = [];
987
+
988
+ const conformanceLevels = {
989
+ wcag2a: 'A',
990
+ wcag2aa: 'AA',
991
+ wcag21aa: 'AA',
992
+ wcag22aa: 'AA',
993
+ wcag2aaa: 'AAA',
994
+ };
995
+
996
+ categories.forEach(category => {
997
+ const rules = allIssues.items[category]?.rules || [];
998
+
999
+ rules.forEach(rule => {
1000
+ const wcagLevel = rule.conformance[0];
1001
+ const aLevel = conformanceLevels[wcagLevel] || wcagLevel;
1002
+
1003
+ rulesWithCounts.push({
1004
+ category,
1005
+ ruleId: rule.rule,
1006
+ description: rule.description,
1007
+ axeImpact: rule.axeImpact,
1008
+ conformance: aLevel,
1009
+ totalItems: rule.totalItems,
1010
+ });
1011
+ });
1012
+ });
1013
+
1014
+ rulesWithCounts.sort((a, b) => b.totalItems - a.totalItems);
1015
+
1016
+ return rulesWithCounts.slice(0, 10);
1017
+ };
1018
+
709
1019
  const flattenAndSortResults = (allIssues: AllIssues, isCustomFlow: boolean) => {
1020
+ const urlOccurrencesMap = new Map<string, number>();
1021
+
710
1022
  ['mustFix', 'goodToFix', 'needsReview', 'passed'].forEach(category => {
711
1023
  allIssues.totalItems += allIssues.items[category].totalItems;
1024
+
712
1025
  allIssues.items[category].rules = Object.entries(allIssues.items[category].rules)
713
1026
  .map(ruleEntry => {
714
1027
  const [rule, ruleInfo] = ruleEntry as [string, RuleInfo];
@@ -716,9 +1029,14 @@ const flattenAndSortResults = (allIssues: AllIssues, isCustomFlow: boolean) => {
716
1029
  .map(pageEntry => {
717
1030
  if (isCustomFlow) {
718
1031
  const [pageIndex, pageInfo] = pageEntry as unknown as [number, PageInfo];
1032
+ urlOccurrencesMap.set(
1033
+ pageInfo.url!,
1034
+ (urlOccurrencesMap.get(pageInfo.url!) || 0) + pageInfo.items.length,
1035
+ );
719
1036
  return { pageIndex, ...pageInfo };
720
1037
  }
721
1038
  const [url, pageInfo] = pageEntry as unknown as [string, PageInfo];
1039
+ urlOccurrencesMap.set(url, (urlOccurrencesMap.get(url) || 0) + pageInfo.items.length);
722
1040
  return { url, ...pageInfo };
723
1041
  })
724
1042
  .sort((page1, page2) => page2.items.length - page1.items.length);
@@ -726,8 +1044,19 @@ const flattenAndSortResults = (allIssues: AllIssues, isCustomFlow: boolean) => {
726
1044
  })
727
1045
  .sort((rule1, rule2) => rule2.totalItems - rule1.totalItems);
728
1046
  });
1047
+
1048
+ const updateIssuesWithOccurrences = (issuesList: Array<any>) => {
1049
+ issuesList.forEach(issue => {
1050
+ issue.totalOccurrences = urlOccurrencesMap.get(issue.url) || 0;
1051
+ });
1052
+ };
1053
+
729
1054
  allIssues.topFiveMostIssues.sort((page1, page2) => page2.totalIssues - page1.totalIssues);
730
1055
  allIssues.topFiveMostIssues = allIssues.topFiveMostIssues.slice(0, 5);
1056
+ allIssues.topTenPagesWithMostIssues = allIssues.topFiveMostIssues.slice(0, 10);
1057
+ updateIssuesWithOccurrences(allIssues.topTenPagesWithMostIssues);
1058
+ const topTenIssues = getTopTenIssues(allIssues);
1059
+ allIssues.topTenIssues = topTenIssues;
731
1060
  };
732
1061
 
733
1062
  const createRuleIdJson = allIssues => {
@@ -827,6 +1156,7 @@ const generateArtifacts = async (
827
1156
  endTime: scanDetails.endTime ? scanDetails.endTime : new Date(),
828
1157
  urlScanned,
829
1158
  scanType,
1159
+ deviceChosen: scanDetails.deviceChosen || 'Desktop',
830
1160
  formatAboutStartTime,
831
1161
  isCustomFlow,
832
1162
  viewport,
@@ -836,21 +1166,43 @@ const generateArtifacts = async (
836
1166
  totalPagesNotScanned: pagesNotScanned.length,
837
1167
  totalItems: 0,
838
1168
  topFiveMostIssues: [],
1169
+ topTenPagesWithMostIssues: [],
1170
+ topTenIssues: [],
839
1171
  wcagViolations: [],
840
1172
  customFlowLabel,
841
1173
  phAppVersion,
842
1174
  items: {
843
- mustFix: { description: itemTypeDescription.mustFix, totalItems: 0, rules: [] },
844
- goodToFix: { description: itemTypeDescription.goodToFix, totalItems: 0, rules: [] },
845
- needsReview: { description: itemTypeDescription.needsReview, totalItems: 0, rules: [] },
846
- passed: { description: itemTypeDescription.passed, totalItems: 0, rules: [] },
1175
+ mustFix: {
1176
+ description: itemTypeDescription.mustFix,
1177
+ totalItems: 0,
1178
+ totalRuleIssues: 0,
1179
+ rules: [],
1180
+ },
1181
+ goodToFix: {
1182
+ description: itemTypeDescription.goodToFix,
1183
+ totalItems: 0,
1184
+ totalRuleIssues: 0,
1185
+ rules: [],
1186
+ },
1187
+ needsReview: {
1188
+ description: itemTypeDescription.needsReview,
1189
+ totalItems: 0,
1190
+ totalRuleIssues: 0,
1191
+ rules: [],
1192
+ },
1193
+ passed: {
1194
+ description: itemTypeDescription.passed,
1195
+ totalItems: 0,
1196
+ totalRuleIssues: 0,
1197
+ rules: [],
1198
+ },
847
1199
  },
848
1200
  cypressScanAboutMetadata,
849
1201
  wcagLinks: constants.wcagLinks,
850
1202
  // Populate boolean values for id="advancedScanOptionsSummary"
851
1203
  advancedScanOptionsSummaryItems: {
852
1204
  showIncludeScreenshots: [true].includes(scanDetails.isIncludeScreenshots),
853
- showAllowSubdomains: [true].includes(scanDetails.isAllowSubdomains),
1205
+ showAllowSubdomains: ['same-domain'].includes(scanDetails.isAllowSubdomains),
854
1206
  showEnableCustomChecks: ['default', 'enable-wcag-aaa'].includes(
855
1207
  scanDetails.isEnableCustomChecks?.[0],
856
1208
  ),
@@ -934,9 +1286,45 @@ const generateArtifacts = async (
934
1286
  }
935
1287
 
936
1288
  await writeCsv(allIssues, storagePath);
937
- await writeBase64(allIssues, storagePath, generateJsonFiles);
1289
+ const {
1290
+ scanDataJsonFilePath,
1291
+ scanDataBase64FilePath,
1292
+ scanItemsJsonFilePath,
1293
+ scanItemsBase64FilePath,
1294
+ scanItemsSummaryJsonFilePath,
1295
+ scanItemsSummaryBase64FilePath,
1296
+ scanDataJsonFileSize,
1297
+ scanItemsJsonFileSize,
1298
+ } = await writeJsonAndBase64Files(allIssues, storagePath);
1299
+ const BIG_RESULTS_THRESHOLD = 500 * 1024 * 1024; // 500 MB
1300
+ const resultsTooBig = scanDataJsonFileSize + scanItemsJsonFileSize > BIG_RESULTS_THRESHOLD;
1301
+
1302
+ await writeScanDetailsCsv(
1303
+ scanDataBase64FilePath,
1304
+ scanItemsBase64FilePath,
1305
+ scanItemsSummaryBase64FilePath,
1306
+ storagePath,
1307
+ );
938
1308
  await writeSummaryHTML(allIssues, storagePath);
939
- await writeHTML(allIssues, storagePath);
1309
+ await writeHTML(
1310
+ allIssues,
1311
+ storagePath,
1312
+ 'report',
1313
+ scanDataBase64FilePath,
1314
+ resultsTooBig ? scanItemsSummaryBase64FilePath : scanItemsBase64FilePath,
1315
+ );
1316
+
1317
+ if (!generateJsonFiles) {
1318
+ await cleanUpJsonFiles([
1319
+ scanDataJsonFilePath,
1320
+ scanDataBase64FilePath,
1321
+ scanItemsJsonFilePath,
1322
+ scanItemsBase64FilePath,
1323
+ scanItemsSummaryJsonFilePath,
1324
+ scanItemsSummaryBase64FilePath,
1325
+ ]);
1326
+ }
1327
+
940
1328
  await retryFunction(() => writeSummaryPdf(storagePath, pagesScanned.length), 1);
941
1329
 
942
1330
  // Take option if set