@projectwallace/css-code-coverage 0.4.1 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -24,20 +24,28 @@ declare function parse_coverage(input: string): {
24
24
  }[];
25
25
  }[];
26
26
  //#endregion
27
- //#region src/lib/types.d.ts
28
- type NodeList = Iterable<{
29
- textContent: string;
30
- }> | NodeListOf<HTMLStyleElement>;
31
- interface Parser {
32
- (html: string): {
33
- querySelectorAll: (selector: string) => NodeList;
34
- };
35
- }
27
+ //#region src/lib/chunkify.d.ts
28
+ type Chunk = {
29
+ start_offset: number;
30
+ end_offset: number;
31
+ is_covered: boolean;
32
+ };
33
+ type ChunkedCoverage = Omit<Coverage, 'ranges'> & {
34
+ chunks: Chunk[];
35
+ };
36
+ //#endregion
37
+ //#region src/lib/prettify.d.ts
38
+ type PrettifiedChunk = ChunkedCoverage['chunks'][number] & {
39
+ start_line: number;
40
+ end_line: number;
41
+ total_lines: number;
42
+ css: string;
43
+ };
36
44
  //#endregion
37
45
  //#region src/lib/index.d.ts
38
46
  type CoverageData = {
39
- unused_bytes: number;
40
- used_bytes: number;
47
+ uncovered_bytes: number;
48
+ covered_bytes: number;
41
49
  total_bytes: number;
42
50
  line_coverage_ratio: number;
43
51
  byte_coverage_ratio: number;
@@ -48,14 +56,7 @@ type CoverageData = {
48
56
  type StylesheetCoverage = CoverageData & {
49
57
  url: string;
50
58
  text: string;
51
- ranges: Range[];
52
- line_coverage: Uint8Array;
53
- chunks: {
54
- is_covered: boolean;
55
- start_line: number;
56
- end_line: number;
57
- total_lines: number;
58
- }[];
59
+ chunks: PrettifiedChunk[];
59
60
  };
60
61
  type CoverageResult = CoverageData & {
61
62
  total_files_found: number;
@@ -73,6 +74,6 @@ type CoverageResult = CoverageData & {
73
74
  * 4. Calculate used/unused CSS bytes (fastest path, no inspection of the actual CSS needed)
74
75
  * 5. Calculate line-coverage, byte-coverage per stylesheet
75
76
  */
76
- declare function calculate_coverage(coverage: Coverage[], parse_html?: Parser): CoverageResult;
77
+ declare function calculate_coverage(coverage: Coverage[]): Promise<CoverageResult>;
77
78
  //#endregion
78
- export { type Coverage, CoverageData, CoverageResult, type Parser, type Range, StylesheetCoverage, calculate_coverage, parse_coverage };
79
+ export { type Coverage, CoverageData, CoverageResult, type Range, StylesheetCoverage, calculate_coverage, parse_coverage };
package/dist/index.js CHANGED
@@ -1,6 +1,5 @@
1
1
  import * as v from "valibot";
2
2
  import { format } from "@projectwallace/format-css";
3
- import { tokenTypes, tokenize } from "css-tree/tokenizer";
4
3
 
5
4
  //#region src/lib/parse-coverage.ts
6
5
  let RangeSchema = v.object({
@@ -25,69 +24,131 @@ function parse_coverage(input) {
25
24
  }
26
25
 
27
26
  //#endregion
28
- //#region src/lib/prettify.ts
29
- let irrelevant_tokens = new Set([
30
- tokenTypes.EOF,
31
- tokenTypes.BadString,
32
- tokenTypes.BadUrl,
33
- tokenTypes.WhiteSpace,
34
- tokenTypes.Semicolon,
35
- tokenTypes.Comment,
36
- tokenTypes.Colon
37
- ]);
38
- function prettify(coverage) {
39
- return coverage.map(({ url, text, ranges }) => {
40
- let formatted = format(text);
41
- let ext_ranges = ranges.map(({ start, end }) => ({
42
- start,
43
- end,
44
- tokens: []
45
- }));
46
- function is_in_range(start, end) {
47
- let range_index = 0;
48
- for (let range of ext_ranges) {
49
- if (range.start > end) return -1;
50
- if (range.start <= start && range.end >= end) return range_index;
51
- range_index++;
27
+ //#region src/lib/chunkify.ts
28
+ function merge(stylesheet) {
29
+ let new_chunks = [];
30
+ let previous_chunk;
31
+ for (let i = 0; i < stylesheet.chunks.length; i++) {
32
+ let chunk = stylesheet.chunks.at(i);
33
+ if (/^\s+$/.test(stylesheet.text.slice(chunk.start_offset, chunk.end_offset))) continue;
34
+ let latest_chunk = new_chunks.at(-1);
35
+ if (i > 0 && previous_chunk && latest_chunk) {
36
+ if (previous_chunk.is_covered === chunk.is_covered) {
37
+ latest_chunk.end_offset = chunk.end_offset;
38
+ previous_chunk = chunk;
39
+ continue;
40
+ } else if (/^\s+$/.test(stylesheet.text.slice(chunk.start_offset, chunk.end_offset)) || chunk.end_offset === chunk.start_offset) {
41
+ latest_chunk.end_offset = chunk.end_offset;
42
+ continue;
52
43
  }
53
- return -1;
54
44
  }
55
- let index = 0;
56
- tokenize(text, (type, start, end) => {
57
- if (irrelevant_tokens.has(type)) return;
58
- index++;
59
- let range_index = is_in_range(start, end);
60
- if (range_index !== -1) ext_ranges[range_index].tokens.push(index);
61
- });
62
- let new_tokens = /* @__PURE__ */ new Map();
63
- index = 0;
64
- tokenize(formatted, (type, start, end) => {
65
- if (irrelevant_tokens.has(type)) return;
66
- index++;
67
- new_tokens.set(index, {
68
- start,
69
- end
45
+ previous_chunk = chunk;
46
+ new_chunks.push(chunk);
47
+ }
48
+ return {
49
+ ...stylesheet,
50
+ chunks: new_chunks
51
+ };
52
+ }
53
+ function chunkify(stylesheet) {
54
+ let chunks = [];
55
+ let offset = 0;
56
+ for (let range of stylesheet.ranges) {
57
+ if (offset !== range.start) {
58
+ chunks.push({
59
+ start_offset: offset,
60
+ end_offset: range.start,
61
+ is_covered: false
70
62
  });
63
+ offset = range.start;
64
+ }
65
+ chunks.push({
66
+ start_offset: range.start,
67
+ end_offset: range.end,
68
+ is_covered: true
71
69
  });
72
- let new_ranges = [];
73
- for (let range of ext_ranges) {
74
- let start_token = new_tokens.get(range.tokens.at(0));
75
- let end_token = new_tokens.get(range.tokens.at(-1));
76
- if (start_token !== void 0 && end_token !== void 0) new_ranges.push({
77
- start: start_token.start,
78
- end: end_token.end
79
- });
70
+ offset = range.end;
71
+ }
72
+ if (offset !== stylesheet.text.length - 1) chunks.push({
73
+ start_offset: offset,
74
+ end_offset: stylesheet.text.length,
75
+ is_covered: false
76
+ });
77
+ return merge({
78
+ url: stylesheet.url,
79
+ text: stylesheet.text,
80
+ chunks
81
+ });
82
+ }
83
+
84
+ //#endregion
85
+ //#region src/lib/prettify.ts
86
+ function prettify(stylesheet) {
87
+ let line = 1;
88
+ let offset = 0;
89
+ let pretty_chunks = stylesheet.chunks.map((chunk, index) => {
90
+ let css = format(stylesheet.text.slice(chunk.start_offset, chunk.end_offset - 1)).trim();
91
+ if (chunk.is_covered) {
92
+ let is_last = index === stylesheet.chunks.length - 1;
93
+ if (index === 0) css = css + (is_last ? "" : "\n");
94
+ else if (index === stylesheet.chunks.length - 1) css = "\n" + css;
95
+ else css = "\n" + css + "\n";
80
96
  }
97
+ let line_count = css.split("\n").length;
98
+ let start_offset = offset;
99
+ let end_offset = offset + css.length - 1;
100
+ let start_line = line;
101
+ let end_line = line + line_count;
102
+ line = end_line;
103
+ offset = end_offset;
81
104
  return {
82
- url,
83
- text: formatted,
84
- ranges: new_ranges
105
+ ...chunk,
106
+ start_offset,
107
+ start_line,
108
+ end_line: end_line - 1,
109
+ end_offset,
110
+ css,
111
+ total_lines: end_line - start_line
85
112
  };
86
113
  });
114
+ return {
115
+ ...stylesheet,
116
+ chunks: pretty_chunks,
117
+ text: pretty_chunks.map(({ css }) => css).join("\n")
118
+ };
87
119
  }
88
120
 
89
121
  //#endregion
90
122
  //#region src/lib/decuplicate.ts
123
+ function concatenate(ranges) {
124
+ let result = [];
125
+ for (let range of ranges) if (result.length > 0 && (result.at(-1).end === range.start - 1 || result.at(-1).end === range.start)) result.at(-1).end = range.end;
126
+ else result.push(range);
127
+ return result;
128
+ }
129
+ function dedupe_list(ranges) {
130
+ let new_ranges = /* @__PURE__ */ new Set();
131
+ outer: for (let range of ranges) {
132
+ for (let processed_range of new_ranges) {
133
+ if (range.start <= processed_range.start && range.end >= processed_range.end) {
134
+ new_ranges.delete(processed_range);
135
+ new_ranges.add(range);
136
+ continue outer;
137
+ }
138
+ if (range.start >= processed_range.start && range.end <= processed_range.end) continue outer;
139
+ if (range.start < processed_range.end && range.start > processed_range.start && range.end > processed_range.end) {
140
+ new_ranges.delete(processed_range);
141
+ new_ranges.add({
142
+ start: processed_range.start,
143
+ end: range.end
144
+ });
145
+ continue outer;
146
+ }
147
+ }
148
+ new_ranges.add(range);
149
+ }
150
+ return new_ranges;
151
+ }
91
152
  /**
92
153
  * @description
93
154
  * prerequisites
@@ -117,7 +178,7 @@ function deduplicate_entries(entries) {
117
178
  return Array.from(checked_stylesheets, ([text, { url, ranges }]) => ({
118
179
  text,
119
180
  url,
120
- ranges
181
+ ranges: concatenate(dedupe_list(ranges.sort((a, b) => a.start - b.start))).sort((a, b) => a.start - b.start)
121
182
  }));
122
183
  }
123
184
 
@@ -135,8 +196,13 @@ function ext(url) {
135
196
 
136
197
  //#endregion
137
198
  //#region src/lib/remap-html.ts
138
- function remap_html(parse_html, html, old_ranges) {
139
- let doc = parse_html(html);
199
+ async function get_dom_parser() {
200
+ if (typeof window !== "undefined" && "DOMParser" in window) return new window.DOMParser();
201
+ let { DOMParser } = await import("./esm-CWr4VY0v.js");
202
+ return new DOMParser();
203
+ }
204
+ async function remap_html(html, old_ranges) {
205
+ let doc = (await get_dom_parser()).parseFromString(html, "text/html");
140
206
  let combined_css = "";
141
207
  let new_ranges = [];
142
208
  let current_offset = 0;
@@ -164,7 +230,7 @@ function remap_html(parse_html, html, old_ranges) {
164
230
  function is_html(text) {
165
231
  return /<\/?(html|body|head|div|span|script|style)/i.test(text);
166
232
  }
167
- function filter_coverage(coverage, parse_html) {
233
+ async function filter_coverage(coverage) {
168
234
  let result = [];
169
235
  for (let entry of coverage) {
170
236
  let extension = ext(entry.url).toLowerCase();
@@ -174,8 +240,7 @@ function filter_coverage(coverage, parse_html) {
174
240
  continue;
175
241
  }
176
242
  if (is_html(entry.text)) {
177
- if (!parse_html) continue;
178
- let { css, ranges } = remap_html(parse_html, entry.text, entry.ranges);
243
+ let { css, ranges } = await remap_html(entry.text, entry.ranges);
179
244
  result.push({
180
245
  url: entry.url,
181
246
  text: css,
@@ -192,12 +257,85 @@ function filter_coverage(coverage, parse_html) {
192
257
  return result;
193
258
  }
194
259
 
260
+ //#endregion
261
+ //#region src/lib/extend-ranges.ts
262
+ const AT_SIGN = 64;
263
+ const LONGEST_ATRULE_NAME = 28;
264
+ function extend_ranges(coverage) {
265
+ return coverage.map(({ text, ranges, url }) => {
266
+ return {
267
+ text,
268
+ ranges: ranges.map((range, index) => {
269
+ let prev_range = ranges[index - 1];
270
+ for (let i = range.start; i >= range.start - LONGEST_ATRULE_NAME; i--) {
271
+ if (prev_range && prev_range.end > i) break;
272
+ let char_position = i;
273
+ if (text.charCodeAt(char_position) === AT_SIGN) {
274
+ range.start = char_position;
275
+ let next_offset = range.end;
276
+ let next_char$1 = text.charAt(next_offset);
277
+ while (/\s/.test(next_char$1)) {
278
+ next_offset++;
279
+ next_char$1 = text.charAt(next_offset);
280
+ }
281
+ if (next_char$1 === "{") range.end = range.end + 1;
282
+ break;
283
+ }
284
+ }
285
+ let offset = range.end;
286
+ let next_char = text.charAt(offset);
287
+ while (/\s/.test(next_char)) {
288
+ offset++;
289
+ next_char = text.charAt(offset);
290
+ }
291
+ if (next_char === "}") range.end = offset + 1;
292
+ return range;
293
+ }),
294
+ url
295
+ };
296
+ });
297
+ }
298
+
195
299
  //#endregion
196
300
  //#region src/lib/index.ts
197
301
  function ratio(fraction, total) {
198
302
  if (total === 0) return 0;
199
303
  return fraction / total;
200
304
  }
305
+ function calculate_stylesheet_coverage({ text, url, chunks }) {
306
+ let uncovered_bytes = 0;
307
+ let covered_bytes = 0;
308
+ let total_bytes = 0;
309
+ let total_lines = 0;
310
+ let covered_lines = 0;
311
+ let uncovered_lines = 0;
312
+ for (let chunk of chunks) {
313
+ let lines = chunk.total_lines;
314
+ let bytes = chunk.end_offset - chunk.start_offset;
315
+ total_lines += lines;
316
+ total_bytes += bytes;
317
+ if (chunk.is_covered) {
318
+ covered_lines += lines;
319
+ covered_bytes += bytes;
320
+ } else {
321
+ uncovered_lines += lines;
322
+ uncovered_bytes += bytes;
323
+ }
324
+ }
325
+ return {
326
+ url,
327
+ text,
328
+ uncovered_bytes,
329
+ covered_bytes,
330
+ total_bytes,
331
+ line_coverage_ratio: ratio(covered_lines, total_lines),
332
+ byte_coverage_ratio: ratio(covered_bytes, total_bytes),
333
+ total_lines,
334
+ covered_lines,
335
+ uncovered_lines,
336
+ chunks
337
+ };
338
+ }
201
339
  /**
202
340
  * @description
203
341
  * CSS Code Coverage calculation
@@ -209,94 +347,16 @@ function ratio(fraction, total) {
209
347
  * 4. Calculate used/unused CSS bytes (fastest path, no inspection of the actual CSS needed)
210
348
  * 5. Calculate line-coverage, byte-coverage per stylesheet
211
349
  */
212
- function calculate_coverage(coverage, parse_html) {
350
+ async function calculate_coverage(coverage) {
213
351
  let total_files_found = coverage.length;
214
- if (!is_valid_coverage(coverage)) throw new TypeError("No valid coverage data found");
215
- let coverage_per_stylesheet = deduplicate_entries(prettify(filter_coverage(coverage, parse_html))).map(({ text, url, ranges }) => {
216
- function is_line_covered(line, start_offset) {
217
- let end = start_offset + line.length;
218
- let next_offset = end + 1;
219
- let is_empty = /^\s*$/.test(line);
220
- let is_closing_brace = line.endsWith("}");
221
- if (!is_empty && !is_closing_brace) for (let range of ranges) {
222
- if (range.start > end || range.end < start_offset) continue;
223
- if (range.start <= start_offset && range.end >= end) return true;
224
- else if (line.startsWith("@") && range.start > start_offset && range.start < next_offset) return true;
225
- }
226
- return false;
227
- }
228
- let lines = text.split("\n");
229
- let total_file_lines = lines.length;
230
- let line_coverage = new Uint8Array(total_file_lines);
231
- let file_lines_covered = 0;
232
- let file_total_bytes = text.length;
233
- let file_bytes_covered = 0;
234
- let offset = 0;
235
- for (let index = 0; index < lines.length; index++) {
236
- let line = lines[index];
237
- let start = offset;
238
- let next_offset = offset + line.length + 1;
239
- let is_empty = /^\s*$/.test(line);
240
- let is_closing_brace = line.endsWith("}");
241
- let is_in_range = is_line_covered(line, start);
242
- let is_covered = false;
243
- let prev_is_covered = index > 0 && line_coverage[index - 1] === 1;
244
- if (is_in_range && !is_closing_brace && !is_empty) is_covered = true;
245
- else if ((is_empty || is_closing_brace) && prev_is_covered) is_covered = true;
246
- else if (is_empty && !prev_is_covered && is_line_covered(lines[index + 1], next_offset)) is_covered = true;
247
- line_coverage[index] = is_covered ? 1 : 0;
248
- if (is_covered) {
249
- file_lines_covered++;
250
- file_bytes_covered += line.length + 1;
251
- }
252
- offset = next_offset;
253
- }
254
- let chunks = [{
255
- start_line: 1,
256
- is_covered: line_coverage[0] === 1,
257
- end_line: 1,
258
- total_lines: 1
259
- }];
260
- for (let index = 1; index < line_coverage.length; index++) {
261
- let is_covered = line_coverage.at(index);
262
- if (is_covered !== line_coverage.at(index - 1)) {
263
- let last_chunk$1 = chunks.at(-1);
264
- last_chunk$1.end_line = index;
265
- last_chunk$1.total_lines = index - last_chunk$1.start_line + 1;
266
- chunks.push({
267
- start_line: index + 1,
268
- is_covered: is_covered === 1,
269
- end_line: index,
270
- total_lines: 0
271
- });
272
- }
273
- }
274
- let last_chunk = chunks.at(-1);
275
- last_chunk.total_lines = line_coverage.length + 1 - last_chunk.start_line;
276
- last_chunk.end_line = line_coverage.length;
277
- return {
278
- url,
279
- text,
280
- ranges,
281
- unused_bytes: file_total_bytes - file_bytes_covered,
282
- used_bytes: file_bytes_covered,
283
- total_bytes: file_total_bytes,
284
- line_coverage_ratio: ratio(file_lines_covered, total_file_lines),
285
- byte_coverage_ratio: ratio(file_bytes_covered, file_total_bytes),
286
- line_coverage,
287
- total_lines: total_file_lines,
288
- covered_lines: file_lines_covered,
289
- uncovered_lines: total_file_lines - file_lines_covered,
290
- chunks
291
- };
292
- });
352
+ let coverage_per_stylesheet = extend_ranges(deduplicate_entries(await filter_coverage(coverage))).map((sheet) => chunkify(sheet)).map((sheet) => prettify(sheet)).map((stylesheet) => calculate_stylesheet_coverage(stylesheet));
293
353
  let { total_lines, total_covered_lines, total_uncovered_lines, total_bytes, total_used_bytes, total_unused_bytes } = coverage_per_stylesheet.reduce((totals, sheet) => {
294
354
  totals.total_lines += sheet.total_lines;
295
355
  totals.total_covered_lines += sheet.covered_lines;
296
356
  totals.total_uncovered_lines += sheet.uncovered_lines;
297
357
  totals.total_bytes += sheet.total_bytes;
298
- totals.total_used_bytes += sheet.used_bytes;
299
- totals.total_unused_bytes += sheet.unused_bytes;
358
+ totals.total_used_bytes += sheet.covered_bytes;
359
+ totals.total_unused_bytes += sheet.uncovered_bytes;
300
360
  return totals;
301
361
  }, {
302
362
  total_lines: 0,
@@ -310,9 +370,9 @@ function calculate_coverage(coverage, parse_html) {
310
370
  total_files_found,
311
371
  total_bytes,
312
372
  total_lines,
313
- used_bytes: total_used_bytes,
373
+ covered_bytes: total_used_bytes,
314
374
  covered_lines: total_covered_lines,
315
- unused_bytes: total_unused_bytes,
375
+ uncovered_bytes: total_unused_bytes,
316
376
  uncovered_lines: total_uncovered_lines,
317
377
  byte_coverage_ratio: ratio(total_used_bytes, total_bytes),
318
378
  line_coverage_ratio: ratio(total_covered_lines, total_lines),
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@projectwallace/css-code-coverage",
3
- "version": "0.4.1",
4
- "description": "",
3
+ "version": "0.6.0",
4
+ "description": "Generate useful CSS Code Coverage report from browser-reported coverage",
5
5
  "author": "Bart Veneman <bart@projectwallace.com>",
6
6
  "repository": {
7
7
  "type": "git",
@@ -55,7 +55,6 @@
55
55
  },
56
56
  "dependencies": {
57
57
  "@projectwallace/format-css": "^2.1.1",
58
- "css-tree": "^3.1.0",
59
58
  "valibot": "^1.1.0"
60
59
  }
61
60
  }