@projectwallace/css-code-coverage 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -23,11 +23,13 @@ npm install @projectwallace/css-code-coverage
23
23
  ```ts
24
24
  import { calculate_coverage } from '@projectwallace/css-code-coverage'
25
25
 
26
- function parse_html(html) {
27
- return new DOMParser().parseFromString(html, 'text/html')
28
- }
26
+ let report = await calculcate_coverage(coverage_data)
29
27
 
30
- let report = calculcate_coverage(coverage_data, parse_html)
28
+ // => report.line_coverage_ratio: 0.80
29
+ // => report.byte_coverage_ratio: 0.85
30
+ // => report.total_lines: 1000
31
+ // => report.covered_lines: 800
32
+ // etc.
31
33
  ```
32
34
 
33
35
  See [src/index.ts](https://github.com/projectwallace/css-code-coverage/blob/main/src/index.ts) for the data that's returned.
@@ -36,12 +38,30 @@ See [src/index.ts](https://github.com/projectwallace/css-code-coverage/blob/main
36
38
 
37
39
  There are two principal ways of collecting CSS Coverage data:
38
40
 
41
+ ### Coverage API (preferred)
42
+
43
+ Both Puppeteer and Playwright provide an API to programmatically get the coverage data, allowing you to put that directly into this library. Here is the gist:
44
+
45
+ ```ts
46
+ // Start collecting coverage
47
+ await page.coverage.startCSSCoverage()
48
+ // Load the page, do all sorts of interactions to increase coverage, etc.
49
+ await page.goto('http://example.com')
50
+ // Stop the coverage and store the result in a variable to pass along
51
+ let coverage = await page.coverage.stopCSSCoverage()
52
+
53
+ // Now we can process it
54
+ import { calculate_coverage } from '@projectwallace/css-code-coverage'
55
+
56
+ let report = await calculcate_coverage(coverage)
57
+ ```
58
+
39
59
  ### Browser devtools
40
60
 
41
61
  In Edge, Chrome or chromium you can manually collect coverage in the browser's DevTools. In all cases you'll generate coverage data manually and the browser will let you export the data to a JSON file. Note that this JSON contains both JS coverage as well as the CSS coverage. Learn how it works:
42
62
 
43
63
  - Collect coverage in Microsoft Edge: https://learn.microsoft.com/en-us/microsoft-edge/devtools-guide-chromium/coverage/
44
- - Collect coevrage in Google Chrome: https://developer.chrome.com/docs/devtools/coverage/
64
+ - Collect coverage in Google Chrome: https://developer.chrome.com/docs/devtools/coverage/
45
65
 
46
66
  Additionally, DevTools Tips writes about it in their [explainer](https://devtoolstips.org/tips/en/detect-unused-code/).
47
67
 
@@ -67,46 +87,3 @@ for (let file of files) {
67
87
  coverage_data.push(...parse_coverage(json_content))
68
88
  }
69
89
  ```
70
-
71
- ### Coverage API
72
-
73
- Both Puppeteer and Playwright provide an API to programmatically get the coverage data, allowing you to put that directly into this library. Here is the gist:
74
-
75
- ```ts
76
- // Start collecting coverage
77
- await page.coverage.startCSSCoverage()
78
- // Load the page, do all sorts of interactions to increase coverage, etc.
79
- await page.goto('http://example.com')
80
- // Stop the coverage and store the result in a variable to pass along
81
- let coverage = await page.coverage.stopCSSCoverage()
82
-
83
- // Now we can process it
84
- import { calculate_coverage } from '@projectwallace/css-code-coverage'
85
-
86
- function parse_html(html) {
87
- return new DOMParser().parseFromString(html, 'text/html')
88
- }
89
-
90
- let report = calculcate_coverage(coverage, parse_html)
91
- ```
92
-
93
- ### Optional: coverage from `<style>` blocks
94
-
95
- Coverage generators also create coverage ranges for `<style>` blocks in HTML. If this applies to your code you should provide a HTML parser that we use to 'scrape' the HTML in case the browser gives us not just plain CSS contents. Depending on where you run this analysis you can use:
96
-
97
- 1. Browser:
98
- ```ts
99
- function parse_html(html) {
100
- return new DOMParser().parseFromString(html, 'text/html')
101
- }
102
- ```
103
- 1. Node (using [linkedom](https://github.com/WebReflection/linkedom) in this example, but other parsers could work, too):
104
-
105
- ```ts
106
- // $ npm install linkedom
107
- import { DOMParser } from 'linkedom'
108
-
109
- function parse_html(html: string) {
110
- return new DOMParser().parseFromString(html, 'text/html')
111
- }
112
- ```
package/dist/cli.js CHANGED
@@ -2,7 +2,6 @@
2
2
  import { parseArgs, styleText } from "node:util";
3
3
  import * as v from "valibot";
4
4
  import { format } from "@projectwallace/format-css";
5
- import { tokenTypes, tokenize } from "css-tree/tokenizer";
6
5
  import { readFile, readdir, stat } from "node:fs/promises";
7
6
  import { join } from "node:path";
8
7
 
@@ -90,69 +89,131 @@ function parse_coverage(input) {
90
89
  }
91
90
 
92
91
  //#endregion
93
- //#region src/lib/prettify.ts
94
- let irrelevant_tokens = new Set([
95
- tokenTypes.EOF,
96
- tokenTypes.BadString,
97
- tokenTypes.BadUrl,
98
- tokenTypes.WhiteSpace,
99
- tokenTypes.Semicolon,
100
- tokenTypes.Comment,
101
- tokenTypes.Colon
102
- ]);
103
- function prettify(coverage) {
104
- return coverage.map(({ url, text, ranges }) => {
105
- let formatted = format(text);
106
- let ext_ranges = ranges.map(({ start, end }) => ({
107
- start,
108
- end,
109
- tokens: []
110
- }));
111
- function is_in_range(start, end) {
112
- let range_index = 0;
113
- for (let range of ext_ranges) {
114
- if (range.start > end) return -1;
115
- if (range.start <= start && range.end >= end) return range_index;
116
- range_index++;
92
+ //#region src/lib/chunkify.ts
93
+ function merge(stylesheet) {
94
+ let new_chunks = [];
95
+ let previous_chunk;
96
+ for (let i = 0; i < stylesheet.chunks.length; i++) {
97
+ let chunk = stylesheet.chunks.at(i);
98
+ if (/^\s+$/.test(stylesheet.text.slice(chunk.start_offset, chunk.end_offset))) continue;
99
+ let latest_chunk = new_chunks.at(-1);
100
+ if (i > 0 && previous_chunk && latest_chunk) {
101
+ if (previous_chunk.is_covered === chunk.is_covered) {
102
+ latest_chunk.end_offset = chunk.end_offset;
103
+ previous_chunk = chunk;
104
+ continue;
105
+ } else if (/^\s+$/.test(stylesheet.text.slice(chunk.start_offset, chunk.end_offset)) || chunk.end_offset === chunk.start_offset) {
106
+ latest_chunk.end_offset = chunk.end_offset;
107
+ continue;
117
108
  }
118
- return -1;
119
109
  }
120
- let index = 0;
121
- tokenize(text, (type, start, end) => {
122
- if (irrelevant_tokens.has(type)) return;
123
- index++;
124
- let range_index = is_in_range(start, end);
125
- if (range_index !== -1) ext_ranges[range_index].tokens.push(index);
126
- });
127
- let new_tokens = /* @__PURE__ */ new Map();
128
- index = 0;
129
- tokenize(formatted, (type, start, end) => {
130
- if (irrelevant_tokens.has(type)) return;
131
- index++;
132
- new_tokens.set(index, {
133
- start,
134
- end
110
+ previous_chunk = chunk;
111
+ new_chunks.push(chunk);
112
+ }
113
+ return {
114
+ ...stylesheet,
115
+ chunks: new_chunks
116
+ };
117
+ }
118
+ function chunkify(stylesheet) {
119
+ let chunks = [];
120
+ let offset = 0;
121
+ for (let range of stylesheet.ranges) {
122
+ if (offset !== range.start) {
123
+ chunks.push({
124
+ start_offset: offset,
125
+ end_offset: range.start,
126
+ is_covered: false
135
127
  });
128
+ offset = range.start;
129
+ }
130
+ chunks.push({
131
+ start_offset: range.start,
132
+ end_offset: range.end,
133
+ is_covered: true
136
134
  });
137
- let new_ranges = [];
138
- for (let range of ext_ranges) {
139
- let start_token = new_tokens.get(range.tokens.at(0));
140
- let end_token = new_tokens.get(range.tokens.at(-1));
141
- if (start_token !== void 0 && end_token !== void 0) new_ranges.push({
142
- start: start_token.start,
143
- end: end_token.end
144
- });
135
+ offset = range.end;
136
+ }
137
+ if (offset !== stylesheet.text.length - 1) chunks.push({
138
+ start_offset: offset,
139
+ end_offset: stylesheet.text.length,
140
+ is_covered: false
141
+ });
142
+ return merge({
143
+ url: stylesheet.url,
144
+ text: stylesheet.text,
145
+ chunks
146
+ });
147
+ }
148
+
149
+ //#endregion
150
+ //#region src/lib/prettify.ts
151
+ function prettify(stylesheet) {
152
+ let line = 1;
153
+ let offset = 0;
154
+ let pretty_chunks = stylesheet.chunks.map((chunk, index) => {
155
+ let css = format(stylesheet.text.slice(chunk.start_offset, chunk.end_offset - 1)).trim();
156
+ if (chunk.is_covered) {
157
+ let is_last = index === stylesheet.chunks.length - 1;
158
+ if (index === 0) css = css + (is_last ? "" : "\n");
159
+ else if (index === stylesheet.chunks.length - 1) css = "\n" + css;
160
+ else css = "\n" + css + "\n";
145
161
  }
162
+ let line_count = css.split("\n").length;
163
+ let start_offset = offset;
164
+ let end_offset = offset + css.length - 1;
165
+ let start_line = line;
166
+ let end_line = line + line_count;
167
+ line = end_line;
168
+ offset = end_offset;
146
169
  return {
147
- url,
148
- text: formatted,
149
- ranges: new_ranges
170
+ ...chunk,
171
+ start_offset,
172
+ start_line,
173
+ end_line: end_line - 1,
174
+ end_offset,
175
+ css,
176
+ total_lines: end_line - start_line
150
177
  };
151
178
  });
179
+ return {
180
+ ...stylesheet,
181
+ chunks: pretty_chunks,
182
+ text: pretty_chunks.map(({ css }) => css).join("\n")
183
+ };
152
184
  }
153
185
 
154
186
  //#endregion
155
187
  //#region src/lib/decuplicate.ts
188
+ function concatenate(ranges) {
189
+ let result = [];
190
+ for (let range of ranges) if (result.length > 0 && (result.at(-1).end === range.start - 1 || result.at(-1).end === range.start)) result.at(-1).end = range.end;
191
+ else result.push(range);
192
+ return result;
193
+ }
194
+ function dedupe_list(ranges) {
195
+ let new_ranges = /* @__PURE__ */ new Set();
196
+ outer: for (let range of ranges) {
197
+ for (let processed_range of new_ranges) {
198
+ if (range.start <= processed_range.start && range.end >= processed_range.end) {
199
+ new_ranges.delete(processed_range);
200
+ new_ranges.add(range);
201
+ continue outer;
202
+ }
203
+ if (range.start >= processed_range.start && range.end <= processed_range.end) continue outer;
204
+ if (range.start < processed_range.end && range.start > processed_range.start && range.end > processed_range.end) {
205
+ new_ranges.delete(processed_range);
206
+ new_ranges.add({
207
+ start: processed_range.start,
208
+ end: range.end
209
+ });
210
+ continue outer;
211
+ }
212
+ }
213
+ new_ranges.add(range);
214
+ }
215
+ return new_ranges;
216
+ }
156
217
  /**
157
218
  * @description
158
219
  * prerequisites
@@ -182,7 +243,7 @@ function deduplicate_entries(entries) {
182
243
  return Array.from(checked_stylesheets, ([text, { url, ranges }]) => ({
183
244
  text,
184
245
  url,
185
- ranges
246
+ ranges: concatenate(dedupe_list(ranges.sort((a, b) => a.start - b.start))).sort((a, b) => a.start - b.start)
186
247
  }));
187
248
  }
188
249
 
@@ -201,9 +262,7 @@ function ext(url) {
201
262
  //#endregion
202
263
  //#region src/lib/remap-html.ts
203
264
  async function get_dom_parser() {
204
- if (typeof window !== "undefined" && "DOMParser" in window)
205
- /* v8 ignore */
206
- return new window.DOMParser();
265
+ if (typeof window !== "undefined" && "DOMParser" in window) return new window.DOMParser();
207
266
  let { DOMParser } = await import("./esm--VCpEgdH.js");
208
267
  return new DOMParser();
209
268
  }
@@ -263,12 +322,85 @@ async function filter_coverage(coverage) {
263
322
  return result;
264
323
  }
265
324
 
325
+ //#endregion
326
+ //#region src/lib/extend-ranges.ts
327
+ const AT_SIGN = 64;
328
+ const LONGEST_ATRULE_NAME = 28;
329
+ function extend_ranges(coverage) {
330
+ return coverage.map(({ text, ranges, url }) => {
331
+ return {
332
+ text,
333
+ ranges: ranges.map((range, index) => {
334
+ let prev_range = ranges[index - 1];
335
+ for (let i = range.start; i >= range.start - LONGEST_ATRULE_NAME; i--) {
336
+ if (prev_range && prev_range.end > i) break;
337
+ let char_position = i;
338
+ if (text.charCodeAt(char_position) === AT_SIGN) {
339
+ range.start = char_position;
340
+ let next_offset = range.end;
341
+ let next_char$1 = text.charAt(next_offset);
342
+ while (/\s/.test(next_char$1)) {
343
+ next_offset++;
344
+ next_char$1 = text.charAt(next_offset);
345
+ }
346
+ if (next_char$1 === "{") range.end = range.end + 1;
347
+ break;
348
+ }
349
+ }
350
+ let offset = range.end;
351
+ let next_char = text.charAt(offset);
352
+ while (/\s/.test(next_char)) {
353
+ offset++;
354
+ next_char = text.charAt(offset);
355
+ }
356
+ if (next_char === "}") range.end = offset + 1;
357
+ return range;
358
+ }),
359
+ url
360
+ };
361
+ });
362
+ }
363
+
266
364
  //#endregion
267
365
  //#region src/lib/index.ts
268
366
  function ratio(fraction, total) {
269
367
  if (total === 0) return 0;
270
368
  return fraction / total;
271
369
  }
370
+ function calculate_stylesheet_coverage({ text, url, chunks }) {
371
+ let uncovered_bytes = 0;
372
+ let covered_bytes = 0;
373
+ let total_bytes = 0;
374
+ let total_lines = 0;
375
+ let covered_lines = 0;
376
+ let uncovered_lines = 0;
377
+ for (let chunk of chunks) {
378
+ let lines = chunk.total_lines;
379
+ let bytes = chunk.end_offset - chunk.start_offset;
380
+ total_lines += lines;
381
+ total_bytes += bytes;
382
+ if (chunk.is_covered) {
383
+ covered_lines += lines;
384
+ covered_bytes += bytes;
385
+ } else {
386
+ uncovered_lines += lines;
387
+ uncovered_bytes += bytes;
388
+ }
389
+ }
390
+ return {
391
+ url,
392
+ text,
393
+ uncovered_bytes,
394
+ covered_bytes,
395
+ total_bytes,
396
+ line_coverage_ratio: ratio(covered_lines, total_lines),
397
+ byte_coverage_ratio: ratio(covered_bytes, total_bytes),
398
+ total_lines,
399
+ covered_lines,
400
+ uncovered_lines,
401
+ chunks
402
+ };
403
+ }
272
404
  /**
273
405
  * @description
274
406
  * CSS Code Coverage calculation
@@ -282,91 +414,14 @@ function ratio(fraction, total) {
282
414
  */
283
415
  async function calculate_coverage(coverage) {
284
416
  let total_files_found = coverage.length;
285
- let coverage_per_stylesheet = deduplicate_entries(prettify(await filter_coverage(coverage))).map(({ text, url, ranges }) => {
286
- function is_line_covered(line, start_offset) {
287
- let end = start_offset + line.length;
288
- let next_offset = end + 1;
289
- let is_empty = /^\s*$/.test(line);
290
- let is_closing_brace = line.endsWith("}");
291
- if (!is_empty && !is_closing_brace) for (let range of ranges) {
292
- if (range.start > end || range.end < start_offset) continue;
293
- if (range.start <= start_offset && range.end >= end) return true;
294
- else if (line.startsWith("@") && range.start > start_offset && range.start < next_offset) return true;
295
- }
296
- return false;
297
- }
298
- let lines = text.split("\n");
299
- let total_file_lines = lines.length;
300
- let line_coverage = new Uint8Array(total_file_lines);
301
- let file_lines_covered = 0;
302
- let file_total_bytes = text.length;
303
- let file_bytes_covered = 0;
304
- let offset = 0;
305
- for (let index = 0; index < lines.length; index++) {
306
- let line = lines[index];
307
- let start = offset;
308
- let next_offset = offset + line.length + 1;
309
- let is_empty = /^\s*$/.test(line);
310
- let is_closing_brace = line.endsWith("}");
311
- let is_in_range = is_line_covered(line, start);
312
- let is_covered = false;
313
- let prev_is_covered = index > 0 && line_coverage[index - 1] === 1;
314
- if (is_in_range && !is_closing_brace && !is_empty) is_covered = true;
315
- else if ((is_empty || is_closing_brace) && prev_is_covered) is_covered = true;
316
- else if (is_empty && !prev_is_covered && is_line_covered(lines[index + 1], next_offset)) is_covered = true;
317
- line_coverage[index] = is_covered ? 1 : 0;
318
- if (is_covered) {
319
- file_lines_covered++;
320
- file_bytes_covered += line.length + 1;
321
- }
322
- offset = next_offset;
323
- }
324
- let chunks = [{
325
- start_line: 1,
326
- is_covered: line_coverage[0] === 1,
327
- end_line: 1,
328
- total_lines: 1
329
- }];
330
- for (let index = 1; index < line_coverage.length; index++) {
331
- let is_covered = line_coverage.at(index);
332
- if (is_covered !== line_coverage.at(index - 1)) {
333
- let last_chunk$1 = chunks.at(-1);
334
- last_chunk$1.end_line = index;
335
- last_chunk$1.total_lines = index - last_chunk$1.start_line + 1;
336
- chunks.push({
337
- start_line: index + 1,
338
- is_covered: is_covered === 1,
339
- end_line: index,
340
- total_lines: 0
341
- });
342
- }
343
- }
344
- let last_chunk = chunks.at(-1);
345
- last_chunk.total_lines = line_coverage.length + 1 - last_chunk.start_line;
346
- last_chunk.end_line = line_coverage.length;
347
- return {
348
- url,
349
- text,
350
- ranges,
351
- unused_bytes: file_total_bytes - file_bytes_covered,
352
- used_bytes: file_bytes_covered,
353
- total_bytes: file_total_bytes,
354
- line_coverage_ratio: ratio(file_lines_covered, total_file_lines),
355
- byte_coverage_ratio: ratio(file_bytes_covered, file_total_bytes),
356
- line_coverage,
357
- total_lines: total_file_lines,
358
- covered_lines: file_lines_covered,
359
- uncovered_lines: total_file_lines - file_lines_covered,
360
- chunks
361
- };
362
- });
417
+ let coverage_per_stylesheet = extend_ranges(deduplicate_entries(await filter_coverage(coverage))).map((sheet) => chunkify(sheet)).map((sheet) => prettify(sheet)).map((stylesheet) => calculate_stylesheet_coverage(stylesheet));
363
418
  let { total_lines, total_covered_lines, total_uncovered_lines, total_bytes, total_used_bytes, total_unused_bytes } = coverage_per_stylesheet.reduce((totals, sheet) => {
364
419
  totals.total_lines += sheet.total_lines;
365
420
  totals.total_covered_lines += sheet.covered_lines;
366
421
  totals.total_uncovered_lines += sheet.uncovered_lines;
367
422
  totals.total_bytes += sheet.total_bytes;
368
- totals.total_used_bytes += sheet.used_bytes;
369
- totals.total_unused_bytes += sheet.unused_bytes;
423
+ totals.total_used_bytes += sheet.covered_bytes;
424
+ totals.total_unused_bytes += sheet.uncovered_bytes;
370
425
  return totals;
371
426
  }, {
372
427
  total_lines: 0,
@@ -380,9 +435,9 @@ async function calculate_coverage(coverage) {
380
435
  total_files_found,
381
436
  total_bytes,
382
437
  total_lines,
383
- used_bytes: total_used_bytes,
438
+ covered_bytes: total_used_bytes,
384
439
  covered_lines: total_covered_lines,
385
- unused_bytes: total_unused_bytes,
440
+ uncovered_bytes: total_unused_bytes,
386
441
  uncovered_lines: total_uncovered_lines,
387
442
  byte_coverage_ratio: ratio(total_used_bytes, total_bytes),
388
443
  line_coverage_ratio: ratio(total_covered_lines, total_lines),
@@ -480,15 +535,10 @@ function print({ report, context }, params) {
480
535
  }
481
536
  console.log(styleText("dim", "─".repeat(terminal_width)));
482
537
  let lines = sheet.text.split("\n");
483
- let line_coverage = sheet.line_coverage;
484
- for (let i = 0; i < lines.length; i++) {
485
- if (line_coverage[i] === 1) continue;
486
- for (let j = i - NUM_LEADING_LINES; j < i; j++) if (j >= 0) console.log(styleText("dim", line_number(j)), styleText("dim", indent(lines[j])));
487
- while (line_coverage[i] === 0) {
488
- console.log(styleText("red", line_number(i, false)), indent(lines[i]));
489
- i++;
490
- }
491
- for (let end = i + NUM_TRAILING_LINES; i < end && i < lines.length; i++) console.log(styleText("dim", line_number(i)), styleText("dim", indent(lines[i])));
538
+ for (let chunk of sheet.chunks.filter((chunk$1) => !chunk$1.is_covered)) {
539
+ for (let x = Math.max(chunk.start_line - NUM_LEADING_LINES, 0); x < chunk.start_line; x++) console.log(styleText("dim", line_number(x)), styleText("dim", indent(lines[x - 1])));
540
+ for (let i = chunk.start_line; i <= chunk.end_line; i++) console.log(styleText("red", line_number(i, false)), indent(lines[i - 1]));
541
+ for (let y = chunk.end_line; y < Math.min(chunk.end_line + NUM_TRAILING_LINES, lines.length); y++) console.log(styleText("dim", line_number(y)), styleText("dim", indent(lines[y - 1])));
492
542
  console.log();
493
543
  }
494
544
  }
package/dist/index.d.ts CHANGED
@@ -24,10 +24,28 @@ declare function parse_coverage(input: string): {
24
24
  }[];
25
25
  }[];
26
26
  //#endregion
27
+ //#region src/lib/chunkify.d.ts
28
+ type Chunk = {
29
+ start_offset: number;
30
+ end_offset: number;
31
+ is_covered: boolean;
32
+ };
33
+ type ChunkedCoverage = Omit<Coverage, 'ranges'> & {
34
+ chunks: Chunk[];
35
+ };
36
+ //#endregion
37
+ //#region src/lib/prettify.d.ts
38
+ type PrettifiedChunk = ChunkedCoverage['chunks'][number] & {
39
+ start_line: number;
40
+ end_line: number;
41
+ total_lines: number;
42
+ css: string;
43
+ };
44
+ //#endregion
27
45
  //#region src/lib/index.d.ts
28
46
  type CoverageData = {
29
- unused_bytes: number;
30
- used_bytes: number;
47
+ uncovered_bytes: number;
48
+ covered_bytes: number;
31
49
  total_bytes: number;
32
50
  line_coverage_ratio: number;
33
51
  byte_coverage_ratio: number;
@@ -38,14 +56,7 @@ type CoverageData = {
38
56
  type StylesheetCoverage = CoverageData & {
39
57
  url: string;
40
58
  text: string;
41
- ranges: Range[];
42
- line_coverage: Uint8Array;
43
- chunks: {
44
- is_covered: boolean;
45
- start_line: number;
46
- end_line: number;
47
- total_lines: number;
48
- }[];
59
+ chunks: PrettifiedChunk[];
49
60
  };
50
61
  type CoverageResult = CoverageData & {
51
62
  total_files_found: number;
package/dist/index.js CHANGED
@@ -1,6 +1,5 @@
1
1
  import * as v from "valibot";
2
2
  import { format } from "@projectwallace/format-css";
3
- import { tokenTypes, tokenize } from "css-tree/tokenizer";
4
3
 
5
4
  //#region src/lib/parse-coverage.ts
6
5
  let RangeSchema = v.object({
@@ -25,69 +24,131 @@ function parse_coverage(input) {
25
24
  }
26
25
 
27
26
  //#endregion
28
- //#region src/lib/prettify.ts
29
- let irrelevant_tokens = new Set([
30
- tokenTypes.EOF,
31
- tokenTypes.BadString,
32
- tokenTypes.BadUrl,
33
- tokenTypes.WhiteSpace,
34
- tokenTypes.Semicolon,
35
- tokenTypes.Comment,
36
- tokenTypes.Colon
37
- ]);
38
- function prettify(coverage) {
39
- return coverage.map(({ url, text, ranges }) => {
40
- let formatted = format(text);
41
- let ext_ranges = ranges.map(({ start, end }) => ({
42
- start,
43
- end,
44
- tokens: []
45
- }));
46
- function is_in_range(start, end) {
47
- let range_index = 0;
48
- for (let range of ext_ranges) {
49
- if (range.start > end) return -1;
50
- if (range.start <= start && range.end >= end) return range_index;
51
- range_index++;
27
+ //#region src/lib/chunkify.ts
28
+ function merge(stylesheet) {
29
+ let new_chunks = [];
30
+ let previous_chunk;
31
+ for (let i = 0; i < stylesheet.chunks.length; i++) {
32
+ let chunk = stylesheet.chunks.at(i);
33
+ if (/^\s+$/.test(stylesheet.text.slice(chunk.start_offset, chunk.end_offset))) continue;
34
+ let latest_chunk = new_chunks.at(-1);
35
+ if (i > 0 && previous_chunk && latest_chunk) {
36
+ if (previous_chunk.is_covered === chunk.is_covered) {
37
+ latest_chunk.end_offset = chunk.end_offset;
38
+ previous_chunk = chunk;
39
+ continue;
40
+ } else if (/^\s+$/.test(stylesheet.text.slice(chunk.start_offset, chunk.end_offset)) || chunk.end_offset === chunk.start_offset) {
41
+ latest_chunk.end_offset = chunk.end_offset;
42
+ continue;
52
43
  }
53
- return -1;
54
44
  }
55
- let index = 0;
56
- tokenize(text, (type, start, end) => {
57
- if (irrelevant_tokens.has(type)) return;
58
- index++;
59
- let range_index = is_in_range(start, end);
60
- if (range_index !== -1) ext_ranges[range_index].tokens.push(index);
61
- });
62
- let new_tokens = /* @__PURE__ */ new Map();
63
- index = 0;
64
- tokenize(formatted, (type, start, end) => {
65
- if (irrelevant_tokens.has(type)) return;
66
- index++;
67
- new_tokens.set(index, {
68
- start,
69
- end
45
+ previous_chunk = chunk;
46
+ new_chunks.push(chunk);
47
+ }
48
+ return {
49
+ ...stylesheet,
50
+ chunks: new_chunks
51
+ };
52
+ }
53
+ function chunkify(stylesheet) {
54
+ let chunks = [];
55
+ let offset = 0;
56
+ for (let range of stylesheet.ranges) {
57
+ if (offset !== range.start) {
58
+ chunks.push({
59
+ start_offset: offset,
60
+ end_offset: range.start,
61
+ is_covered: false
70
62
  });
63
+ offset = range.start;
64
+ }
65
+ chunks.push({
66
+ start_offset: range.start,
67
+ end_offset: range.end,
68
+ is_covered: true
71
69
  });
72
- let new_ranges = [];
73
- for (let range of ext_ranges) {
74
- let start_token = new_tokens.get(range.tokens.at(0));
75
- let end_token = new_tokens.get(range.tokens.at(-1));
76
- if (start_token !== void 0 && end_token !== void 0) new_ranges.push({
77
- start: start_token.start,
78
- end: end_token.end
79
- });
70
+ offset = range.end;
71
+ }
72
+ if (offset !== stylesheet.text.length - 1) chunks.push({
73
+ start_offset: offset,
74
+ end_offset: stylesheet.text.length,
75
+ is_covered: false
76
+ });
77
+ return merge({
78
+ url: stylesheet.url,
79
+ text: stylesheet.text,
80
+ chunks
81
+ });
82
+ }
83
+
84
+ //#endregion
85
+ //#region src/lib/prettify.ts
86
+ function prettify(stylesheet) {
87
+ let line = 1;
88
+ let offset = 0;
89
+ let pretty_chunks = stylesheet.chunks.map((chunk, index) => {
90
+ let css = format(stylesheet.text.slice(chunk.start_offset, chunk.end_offset - 1)).trim();
91
+ if (chunk.is_covered) {
92
+ let is_last = index === stylesheet.chunks.length - 1;
93
+ if (index === 0) css = css + (is_last ? "" : "\n");
94
+ else if (index === stylesheet.chunks.length - 1) css = "\n" + css;
95
+ else css = "\n" + css + "\n";
80
96
  }
97
+ let line_count = css.split("\n").length;
98
+ let start_offset = offset;
99
+ let end_offset = offset + css.length - 1;
100
+ let start_line = line;
101
+ let end_line = line + line_count;
102
+ line = end_line;
103
+ offset = end_offset;
81
104
  return {
82
- url,
83
- text: formatted,
84
- ranges: new_ranges
105
+ ...chunk,
106
+ start_offset,
107
+ start_line,
108
+ end_line: end_line - 1,
109
+ end_offset,
110
+ css,
111
+ total_lines: end_line - start_line
85
112
  };
86
113
  });
114
+ return {
115
+ ...stylesheet,
116
+ chunks: pretty_chunks,
117
+ text: pretty_chunks.map(({ css }) => css).join("\n")
118
+ };
87
119
  }
88
120
 
89
121
  //#endregion
90
122
  //#region src/lib/decuplicate.ts
123
+ function concatenate(ranges) {
124
+ let result = [];
125
+ for (let range of ranges) if (result.length > 0 && (result.at(-1).end === range.start - 1 || result.at(-1).end === range.start)) result.at(-1).end = range.end;
126
+ else result.push(range);
127
+ return result;
128
+ }
129
+ function dedupe_list(ranges) {
130
+ let new_ranges = /* @__PURE__ */ new Set();
131
+ outer: for (let range of ranges) {
132
+ for (let processed_range of new_ranges) {
133
+ if (range.start <= processed_range.start && range.end >= processed_range.end) {
134
+ new_ranges.delete(processed_range);
135
+ new_ranges.add(range);
136
+ continue outer;
137
+ }
138
+ if (range.start >= processed_range.start && range.end <= processed_range.end) continue outer;
139
+ if (range.start < processed_range.end && range.start > processed_range.start && range.end > processed_range.end) {
140
+ new_ranges.delete(processed_range);
141
+ new_ranges.add({
142
+ start: processed_range.start,
143
+ end: range.end
144
+ });
145
+ continue outer;
146
+ }
147
+ }
148
+ new_ranges.add(range);
149
+ }
150
+ return new_ranges;
151
+ }
91
152
  /**
92
153
  * @description
93
154
  * prerequisites
@@ -117,7 +178,7 @@ function deduplicate_entries(entries) {
117
178
  return Array.from(checked_stylesheets, ([text, { url, ranges }]) => ({
118
179
  text,
119
180
  url,
120
- ranges
181
+ ranges: concatenate(dedupe_list(ranges.sort((a, b) => a.start - b.start))).sort((a, b) => a.start - b.start)
121
182
  }));
122
183
  }
123
184
 
@@ -136,9 +197,7 @@ function ext(url) {
136
197
  //#endregion
137
198
  //#region src/lib/remap-html.ts
138
199
  async function get_dom_parser() {
139
- if (typeof window !== "undefined" && "DOMParser" in window)
140
- /* v8 ignore */
141
- return new window.DOMParser();
200
+ if (typeof window !== "undefined" && "DOMParser" in window) return new window.DOMParser();
142
201
  let { DOMParser } = await import("./esm-CWr4VY0v.js");
143
202
  return new DOMParser();
144
203
  }
@@ -198,12 +257,85 @@ async function filter_coverage(coverage) {
198
257
  return result;
199
258
  }
200
259
 
260
+ //#endregion
261
+ //#region src/lib/extend-ranges.ts
262
+ const AT_SIGN = 64;
263
+ const LONGEST_ATRULE_NAME = 28;
264
+ function extend_ranges(coverage) {
265
+ return coverage.map(({ text, ranges, url }) => {
266
+ return {
267
+ text,
268
+ ranges: ranges.map((range, index) => {
269
+ let prev_range = ranges[index - 1];
270
+ for (let i = range.start; i >= range.start - LONGEST_ATRULE_NAME; i--) {
271
+ if (prev_range && prev_range.end > i) break;
272
+ let char_position = i;
273
+ if (text.charCodeAt(char_position) === AT_SIGN) {
274
+ range.start = char_position;
275
+ let next_offset = range.end;
276
+ let next_char$1 = text.charAt(next_offset);
277
+ while (/\s/.test(next_char$1)) {
278
+ next_offset++;
279
+ next_char$1 = text.charAt(next_offset);
280
+ }
281
+ if (next_char$1 === "{") range.end = range.end + 1;
282
+ break;
283
+ }
284
+ }
285
+ let offset = range.end;
286
+ let next_char = text.charAt(offset);
287
+ while (/\s/.test(next_char)) {
288
+ offset++;
289
+ next_char = text.charAt(offset);
290
+ }
291
+ if (next_char === "}") range.end = offset + 1;
292
+ return range;
293
+ }),
294
+ url
295
+ };
296
+ });
297
+ }
298
+
201
299
  //#endregion
202
300
  //#region src/lib/index.ts
203
301
  function ratio(fraction, total) {
204
302
  if (total === 0) return 0;
205
303
  return fraction / total;
206
304
  }
305
+ function calculate_stylesheet_coverage({ text, url, chunks }) {
306
+ let uncovered_bytes = 0;
307
+ let covered_bytes = 0;
308
+ let total_bytes = 0;
309
+ let total_lines = 0;
310
+ let covered_lines = 0;
311
+ let uncovered_lines = 0;
312
+ for (let chunk of chunks) {
313
+ let lines = chunk.total_lines;
314
+ let bytes = chunk.end_offset - chunk.start_offset;
315
+ total_lines += lines;
316
+ total_bytes += bytes;
317
+ if (chunk.is_covered) {
318
+ covered_lines += lines;
319
+ covered_bytes += bytes;
320
+ } else {
321
+ uncovered_lines += lines;
322
+ uncovered_bytes += bytes;
323
+ }
324
+ }
325
+ return {
326
+ url,
327
+ text,
328
+ uncovered_bytes,
329
+ covered_bytes,
330
+ total_bytes,
331
+ line_coverage_ratio: ratio(covered_lines, total_lines),
332
+ byte_coverage_ratio: ratio(covered_bytes, total_bytes),
333
+ total_lines,
334
+ covered_lines,
335
+ uncovered_lines,
336
+ chunks
337
+ };
338
+ }
207
339
  /**
208
340
  * @description
209
341
  * CSS Code Coverage calculation
@@ -217,91 +349,14 @@ function ratio(fraction, total) {
217
349
  */
218
350
  async function calculate_coverage(coverage) {
219
351
  let total_files_found = coverage.length;
220
- let coverage_per_stylesheet = deduplicate_entries(prettify(await filter_coverage(coverage))).map(({ text, url, ranges }) => {
221
- function is_line_covered(line, start_offset) {
222
- let end = start_offset + line.length;
223
- let next_offset = end + 1;
224
- let is_empty = /^\s*$/.test(line);
225
- let is_closing_brace = line.endsWith("}");
226
- if (!is_empty && !is_closing_brace) for (let range of ranges) {
227
- if (range.start > end || range.end < start_offset) continue;
228
- if (range.start <= start_offset && range.end >= end) return true;
229
- else if (line.startsWith("@") && range.start > start_offset && range.start < next_offset) return true;
230
- }
231
- return false;
232
- }
233
- let lines = text.split("\n");
234
- let total_file_lines = lines.length;
235
- let line_coverage = new Uint8Array(total_file_lines);
236
- let file_lines_covered = 0;
237
- let file_total_bytes = text.length;
238
- let file_bytes_covered = 0;
239
- let offset = 0;
240
- for (let index = 0; index < lines.length; index++) {
241
- let line = lines[index];
242
- let start = offset;
243
- let next_offset = offset + line.length + 1;
244
- let is_empty = /^\s*$/.test(line);
245
- let is_closing_brace = line.endsWith("}");
246
- let is_in_range = is_line_covered(line, start);
247
- let is_covered = false;
248
- let prev_is_covered = index > 0 && line_coverage[index - 1] === 1;
249
- if (is_in_range && !is_closing_brace && !is_empty) is_covered = true;
250
- else if ((is_empty || is_closing_brace) && prev_is_covered) is_covered = true;
251
- else if (is_empty && !prev_is_covered && is_line_covered(lines[index + 1], next_offset)) is_covered = true;
252
- line_coverage[index] = is_covered ? 1 : 0;
253
- if (is_covered) {
254
- file_lines_covered++;
255
- file_bytes_covered += line.length + 1;
256
- }
257
- offset = next_offset;
258
- }
259
- let chunks = [{
260
- start_line: 1,
261
- is_covered: line_coverage[0] === 1,
262
- end_line: 1,
263
- total_lines: 1
264
- }];
265
- for (let index = 1; index < line_coverage.length; index++) {
266
- let is_covered = line_coverage.at(index);
267
- if (is_covered !== line_coverage.at(index - 1)) {
268
- let last_chunk$1 = chunks.at(-1);
269
- last_chunk$1.end_line = index;
270
- last_chunk$1.total_lines = index - last_chunk$1.start_line + 1;
271
- chunks.push({
272
- start_line: index + 1,
273
- is_covered: is_covered === 1,
274
- end_line: index,
275
- total_lines: 0
276
- });
277
- }
278
- }
279
- let last_chunk = chunks.at(-1);
280
- last_chunk.total_lines = line_coverage.length + 1 - last_chunk.start_line;
281
- last_chunk.end_line = line_coverage.length;
282
- return {
283
- url,
284
- text,
285
- ranges,
286
- unused_bytes: file_total_bytes - file_bytes_covered,
287
- used_bytes: file_bytes_covered,
288
- total_bytes: file_total_bytes,
289
- line_coverage_ratio: ratio(file_lines_covered, total_file_lines),
290
- byte_coverage_ratio: ratio(file_bytes_covered, file_total_bytes),
291
- line_coverage,
292
- total_lines: total_file_lines,
293
- covered_lines: file_lines_covered,
294
- uncovered_lines: total_file_lines - file_lines_covered,
295
- chunks
296
- };
297
- });
352
+ let coverage_per_stylesheet = extend_ranges(deduplicate_entries(await filter_coverage(coverage))).map((sheet) => chunkify(sheet)).map((sheet) => prettify(sheet)).map((stylesheet) => calculate_stylesheet_coverage(stylesheet));
298
353
  let { total_lines, total_covered_lines, total_uncovered_lines, total_bytes, total_used_bytes, total_unused_bytes } = coverage_per_stylesheet.reduce((totals, sheet) => {
299
354
  totals.total_lines += sheet.total_lines;
300
355
  totals.total_covered_lines += sheet.covered_lines;
301
356
  totals.total_uncovered_lines += sheet.uncovered_lines;
302
357
  totals.total_bytes += sheet.total_bytes;
303
- totals.total_used_bytes += sheet.used_bytes;
304
- totals.total_unused_bytes += sheet.unused_bytes;
358
+ totals.total_used_bytes += sheet.covered_bytes;
359
+ totals.total_unused_bytes += sheet.uncovered_bytes;
305
360
  return totals;
306
361
  }, {
307
362
  total_lines: 0,
@@ -315,9 +370,9 @@ async function calculate_coverage(coverage) {
315
370
  total_files_found,
316
371
  total_bytes,
317
372
  total_lines,
318
- used_bytes: total_used_bytes,
373
+ covered_bytes: total_used_bytes,
319
374
  covered_lines: total_covered_lines,
320
- unused_bytes: total_unused_bytes,
375
+ uncovered_bytes: total_unused_bytes,
321
376
  uncovered_lines: total_uncovered_lines,
322
377
  byte_coverage_ratio: ratio(total_used_bytes, total_bytes),
323
378
  line_coverage_ratio: ratio(total_covered_lines, total_lines),
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@projectwallace/css-code-coverage",
3
- "version": "0.5.0",
4
- "description": "",
3
+ "version": "0.6.0",
4
+ "description": "Generate useful CSS Code Coverage report from browser-reported coverage",
5
5
  "author": "Bart Veneman <bart@projectwallace.com>",
6
6
  "repository": {
7
7
  "type": "git",
@@ -55,7 +55,6 @@
55
55
  },
56
56
  "dependencies": {
57
57
  "@projectwallace/format-css": "^2.1.1",
58
- "css-tree": "^3.1.0",
59
58
  "valibot": "^1.1.0"
60
59
  }
61
60
  }