@govtechsg/oobee 0.10.51 → 0.10.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,4 @@
1
- import crawlee, { LaunchContext, Request, RequestList } from 'crawlee';
2
- import printMessage from 'print-message';
1
+ import crawlee, { LaunchContext, Request, RequestList, Dataset } from 'crawlee';
3
2
  import fs from 'fs';
4
3
  import {
5
4
  createCrawleeSubFolders,
@@ -8,11 +7,15 @@ import {
8
7
  isUrlPdf,
9
8
  } from './commonCrawlerFunc.js';
10
9
 
11
- import constants, { STATUS_CODE_METADATA, guiInfoStatusTypes, UrlsCrawled } from '../constants/constants.js';
10
+ import constants, {
11
+ STATUS_CODE_METADATA,
12
+ guiInfoStatusTypes,
13
+ UrlsCrawled,
14
+ disallowedListOfPatterns,
15
+ } from '../constants/constants.js';
12
16
  import {
13
17
  getLinksFromSitemap,
14
18
  getPlaywrightLaunchOptions,
15
- messageOptions,
16
19
  isSkippedUrl,
17
20
  urlWithoutAuth,
18
21
  waitForPageLoaded,
@@ -24,25 +27,46 @@ import { handlePdfDownload, runPdfScan, mapPdfScanResults } from './pdfScanFunc.
24
27
  import { guiInfoLog } from '../logs.js';
25
28
  import { ViewportSettingsClass } from '../combine.js';
26
29
 
27
- const crawlSitemap = async (
28
- sitemapUrl: string,
29
- randomToken: string,
30
- _host: string,
31
- viewportSettings: ViewportSettingsClass,
32
- maxRequestsPerCrawl: number,
33
- browser: string,
34
- userDataDirectory: string,
35
- specifiedMaxConcurrency: number,
36
- fileTypes: string,
37
- blacklistedPatterns: string[],
38
- includeScreenshots: boolean,
39
- extraHTTPHeaders: Record<string, string>,
40
- fromCrawlIntelligentSitemap = false, // optional
41
- userUrlInputFromIntelligent: string = null, // optional
42
- datasetFromIntelligent: crawlee.Dataset = null, // optional
43
- urlsCrawledFromIntelligent: UrlsCrawled = null, // optional
44
- crawledFromLocalFile = false, // optional
45
- ) => {
30
+ const crawlSitemap = async ({
31
+ sitemapUrl,
32
+ randomToken,
33
+ host,
34
+ viewportSettings,
35
+ maxRequestsPerCrawl,
36
+ browser,
37
+ userDataDirectory,
38
+ specifiedMaxConcurrency,
39
+ fileTypes,
40
+ blacklistedPatterns,
41
+ includeScreenshots,
42
+ extraHTTPHeaders,
43
+ scanDuration = 0,
44
+ fromCrawlIntelligentSitemap = false,
45
+ userUrlInputFromIntelligent = null,
46
+ datasetFromIntelligent = null,
47
+ urlsCrawledFromIntelligent = null,
48
+ crawledFromLocalFile = false,
49
+ }: {
50
+ sitemapUrl: string;
51
+ randomToken: string;
52
+ host: string;
53
+ viewportSettings: ViewportSettingsClass;
54
+ maxRequestsPerCrawl: number;
55
+ browser: string;
56
+ userDataDirectory: string;
57
+ specifiedMaxConcurrency: number;
58
+ fileTypes: string;
59
+ blacklistedPatterns: string[];
60
+ includeScreenshots: boolean;
61
+ extraHTTPHeaders: Record<string, string>;
62
+ scanDuration?: number;
63
+ fromCrawlIntelligentSitemap?: boolean;
64
+ userUrlInputFromIntelligent?: string;
65
+ datasetFromIntelligent?: Dataset;
66
+ urlsCrawledFromIntelligent?: UrlsCrawled;
67
+ crawledFromLocalFile?: boolean;
68
+ }) => {
69
+ const crawlStartTime = Date.now();
46
70
  let dataset: crawlee.Dataset;
47
71
  let urlsCrawled: UrlsCrawled;
48
72
 
@@ -127,14 +151,11 @@ const crawlSitemap = async (
127
151
  const { playwrightDeviceDetailsObject } = viewportSettings;
128
152
  const { maxConcurrency } = constants;
129
153
 
130
- printMessage(['Fetching URLs. This might take some time...'], { border: false });
131
-
132
154
  finalLinks = [...finalLinks, ...linksFromSitemap];
133
155
 
134
156
  const requestList = await RequestList.open({
135
157
  sources: finalLinks,
136
158
  });
137
- printMessage(['Fetch URLs completed. Beginning scan'], messageOptions);
138
159
 
139
160
  let userDataDir = '';
140
161
  if (userDataDirectory) {
@@ -165,7 +186,6 @@ const crawlSitemap = async (
165
186
  },
166
187
  requestList,
167
188
  postNavigationHooks: [
168
-
169
189
  async ({ page }) => {
170
190
  try {
171
191
  // Wait for a quiet period in the DOM, but with safeguards
@@ -173,36 +193,35 @@ const crawlSitemap = async (
173
193
  return new Promise(resolve => {
174
194
  let timeout;
175
195
  let mutationCount = 0;
176
- const MAX_MUTATIONS = 250; // stop if things never quiet down
177
- const OBSERVER_TIMEOUT = 5000; // hard cap on total wait
178
-
196
+ const MAX_MUTATIONS = 250; // stop if things never quiet down
197
+ const OBSERVER_TIMEOUT = 5000; // hard cap on total wait
198
+
179
199
  const observer = new MutationObserver(() => {
180
200
  clearTimeout(timeout);
181
-
201
+
182
202
  mutationCount++;
183
203
  if (mutationCount > MAX_MUTATIONS) {
184
204
  observer.disconnect();
185
205
  resolve('Too many mutations, exiting.');
186
206
  return;
187
207
  }
188
-
208
+
189
209
  // restart quiet‑period timer
190
210
  timeout = setTimeout(() => {
191
211
  observer.disconnect();
192
212
  resolve('DOM stabilized.');
193
213
  }, 1000);
194
214
  });
195
-
215
+
196
216
  // overall timeout in case the page never settles
197
217
  timeout = setTimeout(() => {
198
218
  observer.disconnect();
199
219
  resolve('Observer timeout reached.');
200
220
  }, OBSERVER_TIMEOUT);
201
-
221
+
202
222
  const root = document.documentElement || document.body || document;
203
223
  if (!root || typeof observer.observe !== 'function') {
204
224
  resolve('No root node to observe.');
205
- return;
206
225
  }
207
226
  });
208
227
  });
@@ -214,27 +233,54 @@ const crawlSitemap = async (
214
233
  throw err; // Rethrow unknown errors
215
234
  }
216
235
  },
217
-
218
236
  ],
237
+ preNavigationHooks: [
238
+ async ({ request, page }, gotoOptions) => {
239
+ const url = request.url.toLowerCase();
219
240
 
220
- preNavigationHooks: isBasicAuth
221
- ? [
222
- async ({ page }) => {
241
+ const isNotSupportedDocument = disallowedListOfPatterns.some(pattern =>
242
+ url.startsWith(pattern),
243
+ );
244
+
245
+ if (isNotSupportedDocument) {
246
+ request.skipNavigation = true;
247
+ request.userData.isNotSupportedDocument = true;
248
+
249
+ // Log for verification (optional, but not required for correctness)
250
+ // console.log(`[SKIP] Not supported: ${request.url}`);
251
+
252
+ return;
253
+ }
254
+
255
+ // Set headers if basic auth
256
+ if (isBasicAuth) {
223
257
  await page.setExtraHTTPHeaders({
224
258
  Authorization: authHeader,
225
259
  ...extraHTTPHeaders,
226
260
  });
227
- },
228
- ]
229
- : [
230
- async () => {
261
+ } else {
231
262
  preNavigationHooks(extraHTTPHeaders);
232
- // insert other code here
233
- },
234
- ],
263
+ }
264
+ },
265
+ ],
235
266
  requestHandlerTimeoutSecs: 90,
236
267
  requestHandler: async ({ page, request, response, sendRequest }) => {
237
- await waitForPageLoaded(page, 10000);
268
+ // Log documents that are not supported
269
+ if (request.userData?.isNotSupportedDocument) {
270
+ guiInfoLog(guiInfoStatusTypes.SKIPPED, {
271
+ numScanned: urlsCrawled.scanned.length,
272
+ urlScanned: request.url,
273
+ });
274
+ urlsCrawled.userExcluded.push({
275
+ url: request.url,
276
+ pageTitle: request.url,
277
+ actualUrl: request.url, // because about:blank is not useful
278
+ metadata: STATUS_CODE_METADATA[1],
279
+ httpStatusCode: 0,
280
+ });
281
+
282
+ return;
283
+ }
238
284
 
239
285
  // Set basic auth header if needed
240
286
  if (isBasicAuth) {
@@ -247,39 +293,48 @@ const crawlSitemap = async (
247
293
  request.url = currentUrl.href;
248
294
  }
249
295
 
296
+ await waitForPageLoaded(page, 10000);
297
+
250
298
  const actualUrl = page.url() || request.loadedUrl || request.url;
251
299
 
252
- if (urlsCrawled.scanned.length >= maxRequestsPerCrawl) {
253
- crawler.autoscaledPool.abort();
300
+ const hasExceededDuration =
301
+ scanDuration > 0 && Date.now() - crawlStartTime > scanDuration * 1000;
302
+
303
+ if (urlsCrawled.scanned.length >= maxRequestsPerCrawl || hasExceededDuration) {
304
+ if (hasExceededDuration) {
305
+ console.log(`Crawl duration of ${scanDuration}s exceeded. Aborting sitemap crawl.`);
306
+ }
307
+ crawler.autoscaledPool.abort(); // stops new requests
254
308
  return;
255
309
  }
256
310
 
257
- if (request.skipNavigation && actualUrl === "about:blank") {
258
- if (!isScanPdfs) {
259
- guiInfoLog(guiInfoStatusTypes.SKIPPED, {
260
- numScanned: urlsCrawled.scanned.length,
261
- urlScanned: request.url,
262
- });
263
- urlsCrawled.userExcluded.push({
264
- url: request.url,
265
- pageTitle: request.url,
266
- actualUrl: request.url, // because about:blank is not useful
267
- metadata: STATUS_CODE_METADATA[1],
268
- httpStatusCode: 0,
269
- });
270
-
311
+ if (request.skipNavigation && actualUrl === 'about:blank') {
312
+ if (isScanPdfs) {
313
+ // pushes download promise into pdfDownloads
314
+ const { pdfFileName, url } = handlePdfDownload(
315
+ randomToken,
316
+ pdfDownloads,
317
+ request,
318
+ sendRequest,
319
+ urlsCrawled,
320
+ );
321
+
322
+ uuidToPdfMapping[pdfFileName] = url;
271
323
  return;
272
324
  }
273
- // pushes download promise into pdfDownloads
274
- const { pdfFileName, url } = handlePdfDownload(
275
- randomToken,
276
- pdfDownloads,
277
- request,
278
- sendRequest,
279
- urlsCrawled,
280
- );
281
325
 
282
- uuidToPdfMapping[pdfFileName] = url;
326
+ guiInfoLog(guiInfoStatusTypes.SKIPPED, {
327
+ numScanned: urlsCrawled.scanned.length,
328
+ urlScanned: request.url,
329
+ });
330
+ urlsCrawled.userExcluded.push({
331
+ url: request.url,
332
+ pageTitle: request.url,
333
+ actualUrl: request.url, // because about:blank is not useful
334
+ metadata: STATUS_CODE_METADATA[1],
335
+ httpStatusCode: 0,
336
+ });
337
+
283
338
  return;
284
339
  }
285
340
 
@@ -303,15 +358,11 @@ const crawlSitemap = async (
303
358
  }
304
359
 
305
360
  // This logic is different from crawlDomain, as it also checks if the pae is redirected before checking if it is excluded using exclusions.txt
306
- if (
307
- isRedirected &&
308
- blacklistedPatterns &&
309
- isSkippedUrl(actualUrl, blacklistedPatterns)
310
- ) {
361
+ if (isRedirected && blacklistedPatterns && isSkippedUrl(actualUrl, blacklistedPatterns)) {
311
362
  urlsCrawled.userExcluded.push({
312
363
  url: request.url,
313
364
  pageTitle: request.url,
314
- actualUrl: actualUrl,
365
+ actualUrl,
315
366
  metadata: STATUS_CODE_METADATA[0],
316
367
  httpStatusCode: 0,
317
368
  });
@@ -324,7 +375,7 @@ const crawlSitemap = async (
324
375
  }
325
376
 
326
377
  const results = await runAxeScript({ includeScreenshots, page, randomToken });
327
-
378
+
328
379
  guiInfoLog(guiInfoStatusTypes.SCANNED, {
329
380
  numScanned: urlsCrawled.scanned.length,
330
381
  urlScanned: request.url,
@@ -333,7 +384,7 @@ const crawlSitemap = async (
333
384
  urlsCrawled.scanned.push({
334
385
  url: urlWithoutAuth(request.url),
335
386
  pageTitle: results.pageTitle,
336
- actualUrl: actualUrl, // i.e. actualUrl
387
+ actualUrl, // i.e. actualUrl
337
388
  });
338
389
 
339
390
  urlsCrawled.scannedRedirects.push({
@@ -354,16 +405,17 @@ const crawlSitemap = async (
354
405
  if (isScanHtml) {
355
406
  // carry through the HTTP status metadata
356
407
  const status = response?.status();
357
- const metadata = typeof status === 'number'
358
- ? (STATUS_CODE_METADATA[status] || STATUS_CODE_METADATA[599])
359
- : STATUS_CODE_METADATA[2];
408
+ const metadata =
409
+ typeof status === 'number'
410
+ ? STATUS_CODE_METADATA[status] || STATUS_CODE_METADATA[599]
411
+ : STATUS_CODE_METADATA[2];
360
412
 
361
- urlsCrawled.invalid.push({
413
+ urlsCrawled.invalid.push({
362
414
  actualUrl,
363
415
  url: request.url,
364
416
  pageTitle: request.url,
365
417
  metadata,
366
- httpStatusCode: typeof status === 'number' ? status : 0
418
+ httpStatusCode: typeof status === 'number' ? status : 0,
367
419
  });
368
420
  }
369
421
  }
@@ -384,21 +436,31 @@ const crawlSitemap = async (
384
436
  });
385
437
 
386
438
  const status = response?.status();
387
- const metadata = typeof status === 'number'
388
- ? (STATUS_CODE_METADATA[status] || STATUS_CODE_METADATA[599])
389
- : STATUS_CODE_METADATA[2];
439
+ const metadata =
440
+ typeof status === 'number'
441
+ ? STATUS_CODE_METADATA[status] || STATUS_CODE_METADATA[599]
442
+ : STATUS_CODE_METADATA[2];
390
443
 
391
444
  urlsCrawled.error.push({
392
445
  url: request.url,
393
446
  pageTitle: request.url,
394
447
  actualUrl: request.url,
395
448
  metadata,
396
- httpStatusCode: typeof status === 'number' ? status : 0
449
+ httpStatusCode: typeof status === 'number' ? status : 0,
397
450
  });
398
451
  crawlee.log.error(`Failed Request - ${request.url}: ${request.errorMessages}`);
399
452
  },
400
453
  maxRequestsPerCrawl: Infinity,
401
454
  maxConcurrency: specifiedMaxConcurrency || maxConcurrency,
455
+ ...(process.env.OOBEE_FAST_CRAWLER && {
456
+ autoscaledPoolOptions: {
457
+ minConcurrency: specifiedMaxConcurrency ? Math.min(specifiedMaxConcurrency, 10) : 10,
458
+ maxConcurrency: specifiedMaxConcurrency || maxConcurrency,
459
+ desiredConcurrencyRatio: 0.98, // Increase threshold for scaling up
460
+ scaleUpStepRatio: 0.99, // Scale up faster
461
+ scaleDownStepRatio: 0.1, // Scale down slower
462
+ },
463
+ }),
402
464
  });
403
465
 
404
466
  await crawler.run();
@@ -430,6 +492,11 @@ const crawlSitemap = async (
430
492
  guiInfoLog(guiInfoStatusTypes.COMPLETED, {});
431
493
  }
432
494
 
495
+ if (scanDuration > 0) {
496
+ const elapsed = Math.round((Date.now() - crawlStartTime) / 1000);
497
+ console.log(`Crawl ended after ${elapsed}s (limit: ${scanDuration}s).`);
498
+ }
499
+
433
500
  return urlsCrawled;
434
501
  };
435
502
 
package/src/index.ts CHANGED
@@ -50,6 +50,7 @@ export type Answers = {
50
50
  zip: string;
51
51
  ruleset: RuleFlags[];
52
52
  generateJsonFiles: boolean;
53
+ scanDuration?: number;
53
54
  };
54
55
 
55
56
  export type Data = {
@@ -80,6 +81,7 @@ export type Data = {
80
81
  zip?: string;
81
82
  ruleset: RuleFlags[];
82
83
  generateJsonFiles: boolean;
84
+ scanDuration: number;
83
85
  };
84
86
 
85
87
  const userData = getUserDataTxt();
package/src/logs.ts CHANGED
@@ -23,8 +23,10 @@ const logFormat = printf(({ timestamp, level, message }) => {
23
23
  // All logs in combined.txt, error in errors.txt
24
24
 
25
25
  const consoleLogger = createLogger({
26
+ silent: !(process.env.RUNNING_FROM_PH_GUI || process.env.OOBEE_VERBOSE),
26
27
  format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), logFormat),
27
- transports: [new transports.Console()],
28
+ transports:
29
+ process.env.RUNNING_FROM_PH_GUI || process.env.OOBEE_VERBOSE ? [new transports.Console()] : [],
28
30
  });
29
31
 
30
32
  // No display in consoles, this will mostly be used within the interactive script to avoid disrupting the flow
@@ -33,7 +35,7 @@ const consoleLogger = createLogger({
33
35
  const silentLogger = createLogger({
34
36
  format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), logFormat),
35
37
  transports: [
36
- process.env.OOBEE_VERBOSE
38
+ process.env.OOBEE_VERBOSE || process.env.RUNNING_FROM_PH_GUI
37
39
  ? new transports.Console({ handleExceptions: true })
38
40
  : new transports.File({ filename: 'errors.txt', level: 'warn', handleExceptions: true }),
39
41
  ].filter(Boolean),
@@ -78,6 +78,7 @@ type AllIssues = {
78
78
  htmlETL: any;
79
79
  rules: string[];
80
80
  };
81
+ siteName: string;
81
82
  startTime: Date;
82
83
  endTime: Date;
83
84
  urlScanned: string;
@@ -130,7 +131,6 @@ const extractFileNames = async (directory: string): Promise<string[]> => {
130
131
  .then(allFiles => allFiles.filter(file => path.extname(file).toLowerCase() === '.json'))
131
132
  .catch(readdirError => {
132
133
  consoleLogger.info('An error has occurred when retrieving files, please try again.');
133
- silentLogger.error(`(extractFileNames) - ${readdirError}`);
134
134
  throw readdirError;
135
135
  });
136
136
  };
@@ -140,7 +140,6 @@ const parseContentToJson = async rPath =>
140
140
  .then(content => JSON.parse(content))
141
141
  .catch(parseError => {
142
142
  consoleLogger.info('An error has occurred when parsing the content, please try again.');
143
- silentLogger.error(`(parseContentToJson) - ${parseError}`);
144
143
  });
145
144
 
146
145
  const writeCsv = async (allIssues, storagePath) => {
@@ -684,7 +683,7 @@ async function compressJsonFileStreaming(inputPath: string, outputPath: string)
684
683
  // Pipe the streams:
685
684
  // read -> gzip -> base64 -> write
686
685
  await pipeline(readStream, gzip, base64Encode, writeStream);
687
- console.log(`File successfully compressed and saved to ${outputPath}`);
686
+ consoleLogger.info(`File successfully compressed and saved to ${outputPath}`);
688
687
  }
689
688
 
690
689
  const writeJsonFileAndCompressedJsonFile = async (
@@ -1559,7 +1558,7 @@ const sendWcagBreakdownToSentry = async (
1559
1558
  const wcagCriteriaBreakdown: Record<string, any> = {};
1560
1559
 
1561
1560
  // Tag app version
1562
- tags['version'] = appVersion;
1561
+ tags.version = appVersion;
1563
1562
 
1564
1563
  // Get dynamic WCAG criteria map once
1565
1564
  const wcagCriteriaMap = await getWcagCriteriaMap();
@@ -1638,7 +1637,7 @@ const sendWcagBreakdownToSentry = async (
1638
1637
  tags['WCAG-MustFix-Occurrences'] = String(allIssues.items.mustFix.totalItems);
1639
1638
  tags['WCAG-GoodToFix-Occurrences'] = String(allIssues.items.goodToFix.totalItems);
1640
1639
  tags['WCAG-NeedsReview-Occurrences'] = String(allIssues.items.needsReview.totalItems);
1641
-
1640
+
1642
1641
  // Add number of pages scanned tag
1643
1642
  tags['Pages-Scanned-Count'] = String(allIssues.totalPagesScanned);
1644
1643
  } else if (pagesScannedCount > 0) {
@@ -1667,7 +1666,7 @@ const sendWcagBreakdownToSentry = async (
1667
1666
  ...(userData && userData.userId ? { id: userData.userId } : {}),
1668
1667
  },
1669
1668
  extra: {
1670
- additionalScanMetadata: ruleIdJson != null ? JSON.stringify(ruleIdJson) : "{}",
1669
+ additionalScanMetadata: ruleIdJson != null ? JSON.stringify(ruleIdJson) : '{}',
1671
1670
  wcagBreakdown: wcagCriteriaBreakdown,
1672
1671
  reportCounts: allIssues
1673
1672
  ? {
@@ -1766,6 +1765,7 @@ const generateArtifacts = async (
1766
1765
  htmlETL: oobeeAiHtmlETL,
1767
1766
  rules: oobeeAiRules,
1768
1767
  },
1768
+ siteName: (pagesScanned[0]?.pageTitle ?? '').replace(/^\d+\s*:\s*/, '').trim(),
1769
1769
  startTime: scanDetails.startTime ? scanDetails.startTime : new Date(),
1770
1770
  endTime: scanDetails.endTime ? scanDetails.endTime : new Date(),
1771
1771
  urlScanned,
@@ -1845,7 +1845,6 @@ const generateArtifacts = async (
1845
1845
  }),
1846
1846
  ).catch(flattenIssuesError => {
1847
1847
  consoleLogger.info('An error has occurred when flattening the issues, please try again.');
1848
- silentLogger.error(flattenIssuesError.stack);
1849
1848
  });
1850
1849
 
1851
1850
  flattenAndSortResults(allIssues, isCustomFlow);
@@ -1853,6 +1852,10 @@ const generateArtifacts = async (
1853
1852
  printMessage([
1854
1853
  'Scan Summary',
1855
1854
  '',
1855
+ `Site Name: ${allIssues.siteName}`,
1856
+ `URL: ${allIssues.urlScanned}`,
1857
+ `Pages Scanned: ${allIssues.totalPagesScanned}`,
1858
+ '',
1856
1859
  `Must Fix: ${allIssues.items.mustFix.rules.length} ${Object.keys(allIssues.items.mustFix.rules).length === 1 ? 'issue' : 'issues'} / ${allIssues.items.mustFix.totalItems} ${allIssues.items.mustFix.totalItems === 1 ? 'occurrence' : 'occurrences'}`,
1857
1860
  `Good to Fix: ${allIssues.items.goodToFix.rules.length} ${Object.keys(allIssues.items.goodToFix.rules).length === 1 ? 'issue' : 'issues'} / ${allIssues.items.goodToFix.totalItems} ${allIssues.items.goodToFix.totalItems === 1 ? 'occurrence' : 'occurrences'}`,
1858
1861
  `Manual Review Required: ${allIssues.items.needsReview.rules.length} ${Object.keys(allIssues.items.needsReview.rules).length === 1 ? 'issue' : 'issues'} / ${allIssues.items.needsReview.totalItems} ${allIssues.items.needsReview.totalItems === 1 ? 'occurrence' : 'occurrences'}`,
@@ -1882,8 +1885,13 @@ const generateArtifacts = async (
1882
1885
  allIssues.advancedScanOptionsSummaryItems.disableOobee,
1883
1886
  );
1884
1887
 
1885
- // console.log(allIssues.progressPercentage);
1886
- // console.log(allIssues.issuesPercentage);
1888
+ consoleLogger.info(`Site Name: ${allIssues.siteName}`);
1889
+ consoleLogger.info(`URL: ${allIssues.urlScanned}`);
1890
+ consoleLogger.info(`Pages Scanned: ${allIssues.totalPagesScanned}`);
1891
+ consoleLogger.info(`Start Time: ${allIssues.startTime}`);
1892
+ consoleLogger.info(`End Time: ${allIssues.endTime}`);
1893
+ const elapsedSeconds = (new Date(allIssues.endTime).getTime() - new Date(allIssues.startTime).getTime()) / 1000;
1894
+ consoleLogger.info(`Elapsed Time: ${elapsedSeconds}s`);
1887
1895
 
1888
1896
  const getAxeImpactCount = (allIssues: AllIssues) => {
1889
1897
  const impactCount = {
@@ -2044,6 +2052,9 @@ const generateArtifacts = async (
2044
2052
  console.error('Error sending WCAG data to Sentry:', error);
2045
2053
  }
2046
2054
 
2055
+ if (process.env.RUNNING_FROM_PH_GUI || process.env.OOBEE_VERBOSE)
2056
+ console.log('Report generated successfully');
2057
+
2047
2058
  return ruleIdJson;
2048
2059
  };
2049
2060
 
package/src/npmIndex.ts CHANGED
@@ -219,7 +219,7 @@ export const init = async ({
219
219
  try {
220
220
  await page.locator(elem).click();
221
221
  } catch (e) {
222
- silentLogger.info(e);
222
+ // do nothing if element is not found or not clickable
223
223
  }
224
224
  });
225
225
 
@@ -2,7 +2,7 @@
2
2
  import { createHash } from 'crypto';
3
3
  import fs from 'fs';
4
4
  import path from 'path';
5
- import { consoleLogger } from '../logs.js';
5
+ // import { silentLogger } from '../logs.js';
6
6
  import { Result } from 'axe-core';
7
7
  import { Page } from 'playwright';
8
8
  import { NodeResultWithScreenshot, ResultWithScreenshot } from '../crawlers/commonCrawlerFunc.js';
@@ -21,9 +21,11 @@ export const takeScreenshotForHTMLElements = async (
21
21
 
22
22
  for (const violation of violations) {
23
23
  if (screenshotCount >= maxScreenshots) {
24
- consoleLogger.warn(
24
+ /*
25
+ silentLogger.warn(
25
26
  `Skipping screenshots for ${violation.id} as maxScreenshots (${maxScreenshots}) exceeded. You can increase it by specifying a higher value when calling takeScreenshotForHTMLElements.`,
26
27
  );
28
+ */
27
29
  newViolations.push(violation);
28
30
  continue;
29
31
  }
@@ -32,7 +34,7 @@ export const takeScreenshotForHTMLElements = async (
32
34
 
33
35
  // Check if rule ID is 'oobee-grading-text-contents' and skip screenshot logic
34
36
  if (rule === 'oobee-grading-text-contents') {
35
- // consoleLogger.info('Skipping screenshot for rule oobee-grading-text-contents');
37
+ // silentLogger.info('Skipping screenshot for rule oobee-grading-text-contents');
36
38
  newViolations.push(violation); // Make sure it gets added
37
39
  continue;
38
40
  }
@@ -57,13 +59,13 @@ export const takeScreenshotForHTMLElements = async (
57
59
  nodeWithScreenshotPath.screenshotPath = screenshotPath;
58
60
  screenshotCount++;
59
61
  } else {
60
- consoleLogger.info(`Element at ${currLocator} is not visible`);
62
+ // silentLogger.info(`Element at ${currLocator} is not visible`);
61
63
  }
62
64
 
63
65
  break; // Stop looping after finding the first visible locator
64
66
  }
65
67
  } catch (e) {
66
- consoleLogger.info(`Unable to take element screenshot at ${selector}`);
68
+ // silentLogger.info(`Unable to take element screenshot at ${selector}`);
67
69
  }
68
70
  }
69
71
  newViolationNodes.push(nodeWithScreenshotPath);
@@ -12,7 +12,7 @@ import { Canvas, createCanvas, SKRSContext2D } from '@napi-rs/canvas';
12
12
  import assert from 'assert';
13
13
  import path from 'path';
14
14
  import { fileURLToPath } from 'url';
15
- import { silentLogger } from '../logs.js';
15
+ import { consoleLogger, silentLogger } from '../logs.js';
16
16
  import { TransformedRuleObject } from '../crawlers/pdfScanFunc.js';
17
17
  import { IBboxLocation, StructureTree, ViewportSize } from '../types/types.js';
18
18
 
@@ -213,7 +213,7 @@ const annotateAndSave = (origCanvas: Canvas, screenshotPath: string, viewport: V
213
213
  try {
214
214
  fs.writeFileSync(indexedScreenshotPath, croppedImage);
215
215
  } catch (e) {
216
- silentLogger.error('Error in writing screenshot:', e);
216
+ consoleLogger.error('Error in writing screenshot:', e);
217
217
  }
218
218
 
219
219
  canvasFactory.destroy({ canvas: croppedCanvas, context: croppedCtx });
@@ -29,7 +29,7 @@
29
29
  <button class="accordion-button collapsed" type="button" data-bs-toggle="collapse"
30
30
  data-bs-target="#wcagLinksAccordionContent" aria-expanded="false"
31
31
  aria-controls="wcagLinksAccordionContent">
32
- 20 (A & AA) and 5 (AAA) WCAG Success Criteria
32
+ 20 (A & AA) and 6 (AAA) WCAG Success Criteria
33
33
  </button>
34
34
  </div>
35
35
  <div id="wcagLinksAccordionContent" class="accordion-collapse collapse"
@@ -22,6 +22,7 @@ category summary is clicked %>
22
22
  // wcag1410: 'https://www.w3.org/TR/WCAG22/#reflow',
23
23
  wcag1412: 'https://www.w3.org/TR/WCAG22/#text-spacing',
24
24
  wcag211: 'https://www.w3.org/TR/WCAG22/#keyboard',
25
+ wcag213: 'https://www.w3.org/WAI/WCAG22/Understanding/keyboard-no-exception.html', // AAA
25
26
  wcag221: 'https://www.w3.org/TR/WCAG22/#timing-adjustable',
26
27
  wcag222: 'https://www.w3.org/TR/WCAG22/#pause-stop-hide',
27
28
  wcag224: 'https://www.w3.org/TR/WCAG22/#interruptions', // AAA
@@ -668,6 +668,17 @@
668
668
  #pagesScannedModal .not-scanned-url {
669
669
  word-wrap: break-word;
670
670
  }
671
+ #pagesScannedModal .metadata-text {
672
+ word-wrap: break-word;
673
+ font-size: 0.875rem;
674
+ color: var(--coral-red-100);
675
+ }
676
+ #pagesScannedModal .metadata-inline {
677
+ display: flex;
678
+ align-items: center;
679
+ gap: 0.5rem;
680
+ height: 16px;
681
+ }
671
682
  #pagesScannedModal p {
672
683
  overflow: hidden;
673
684
  white-space: nowrap;