@projectwallace/css-code-coverage 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +11748 -0
- package/dist/index.d.ts +78 -2
- package/dist/index.js +325 -0
- package/package.json +12 -8
- package/dist/css-code-coverage.js +0 -224
- package/dist/src/chunkify.d.ts +0 -9
- package/dist/src/chunkify.test.d.ts +0 -1
- package/dist/src/decuplicate.d.ts +0 -9
- package/dist/src/deduplicate.test.d.ts +0 -1
- package/dist/src/ext.d.ts +0 -1
- package/dist/src/ext.test.d.ts +0 -1
- package/dist/src/extend-ranges.d.ts +0 -5
- package/dist/src/extend-ranges.test.d.ts +0 -1
- package/dist/src/filter-entries.d.ts +0 -3
- package/dist/src/filter-entries.test.d.ts +0 -1
- package/dist/src/index.d.ts +0 -38
- package/dist/src/index.test.d.ts +0 -1
- package/dist/src/parse-coverage.d.ts +0 -11
- package/dist/src/parse-coverage.test.d.ts +0 -1
- package/dist/src/prettify.d.ts +0 -12
- package/dist/src/prettify.test.d.ts +0 -1
- package/dist/src/remap-html.d.ts +0 -9
- package/dist/src/remap-html.test.d.ts +0 -1
- package/dist/src/types.d.ts +0 -9
package/dist/index.d.ts
CHANGED
|
@@ -1,2 +1,78 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import * as v from "valibot";
|
|
2
|
+
|
|
3
|
+
//#region src/lib/parse-coverage.d.ts
|
|
4
|
+
declare let RangeSchema: v.ObjectSchema<{
|
|
5
|
+
readonly start: v.NumberSchema<undefined>;
|
|
6
|
+
readonly end: v.NumberSchema<undefined>;
|
|
7
|
+
}, undefined>;
|
|
8
|
+
type Range = v.InferInput<typeof RangeSchema>;
|
|
9
|
+
declare let CoverageSchema: v.ObjectSchema<{
|
|
10
|
+
readonly text: v.StringSchema<undefined>;
|
|
11
|
+
readonly url: v.StringSchema<undefined>;
|
|
12
|
+
readonly ranges: v.ArraySchema<v.ObjectSchema<{
|
|
13
|
+
readonly start: v.NumberSchema<undefined>;
|
|
14
|
+
readonly end: v.NumberSchema<undefined>;
|
|
15
|
+
}, undefined>, undefined>;
|
|
16
|
+
}, undefined>;
|
|
17
|
+
type Coverage = v.InferInput<typeof CoverageSchema>;
|
|
18
|
+
declare function parse_coverage(input: string): {
|
|
19
|
+
text: string;
|
|
20
|
+
url: string;
|
|
21
|
+
ranges: {
|
|
22
|
+
start: number;
|
|
23
|
+
end: number;
|
|
24
|
+
}[];
|
|
25
|
+
}[];
|
|
26
|
+
//#endregion
|
|
27
|
+
//#region src/lib/types.d.ts
|
|
28
|
+
type NodeList = Iterable<{
|
|
29
|
+
textContent: string;
|
|
30
|
+
}> | NodeListOf<HTMLStyleElement>;
|
|
31
|
+
interface Parser {
|
|
32
|
+
(html: string): {
|
|
33
|
+
querySelectorAll: (selector: string) => NodeList;
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
//#endregion
|
|
37
|
+
//#region src/lib/index.d.ts
|
|
38
|
+
type CoverageData = {
|
|
39
|
+
unused_bytes: number;
|
|
40
|
+
used_bytes: number;
|
|
41
|
+
total_bytes: number;
|
|
42
|
+
line_coverage_ratio: number;
|
|
43
|
+
byte_coverage_ratio: number;
|
|
44
|
+
total_lines: number;
|
|
45
|
+
covered_lines: number;
|
|
46
|
+
uncovered_lines: number;
|
|
47
|
+
};
|
|
48
|
+
type StylesheetCoverage = CoverageData & {
|
|
49
|
+
url: string;
|
|
50
|
+
text: string;
|
|
51
|
+
ranges: Range[];
|
|
52
|
+
line_coverage: Uint8Array;
|
|
53
|
+
chunks: {
|
|
54
|
+
is_covered: boolean;
|
|
55
|
+
start_line: number;
|
|
56
|
+
end_line: number;
|
|
57
|
+
total_lines: number;
|
|
58
|
+
}[];
|
|
59
|
+
};
|
|
60
|
+
type CoverageResult = CoverageData & {
|
|
61
|
+
total_files_found: number;
|
|
62
|
+
total_stylesheets: number;
|
|
63
|
+
coverage_per_stylesheet: StylesheetCoverage[];
|
|
64
|
+
};
|
|
65
|
+
/**
|
|
66
|
+
* @description
|
|
67
|
+
* CSS Code Coverage calculation
|
|
68
|
+
*
|
|
69
|
+
* These are the steps performed to calculate coverage:
|
|
70
|
+
* 1. Filter eligible files / validate input
|
|
71
|
+
* 2. Prettify the CSS dicovered in each Coverage and update their ranges
|
|
72
|
+
* 3. De-duplicate Coverages: merge all ranges for CSS sources occurring multiple times
|
|
73
|
+
* 4. Calculate used/unused CSS bytes (fastest path, no inspection of the actual CSS needed)
|
|
74
|
+
* 5. Calculate line-coverage, byte-coverage per stylesheet
|
|
75
|
+
*/
|
|
76
|
+
declare function calculate_coverage(coverage: Coverage[], parse_html?: Parser): CoverageResult;
|
|
77
|
+
//#endregion
|
|
78
|
+
export { type Coverage, CoverageData, CoverageResult, type Parser, type Range, StylesheetCoverage, calculate_coverage, parse_coverage };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import * as v from "valibot";
|
|
2
|
+
import { format } from "@projectwallace/format-css";
|
|
3
|
+
import { tokenTypes, tokenize } from "css-tree/tokenizer";
|
|
4
|
+
|
|
5
|
+
//#region src/lib/parse-coverage.ts
|
|
6
|
+
let RangeSchema = v.object({
|
|
7
|
+
start: v.number(),
|
|
8
|
+
end: v.number()
|
|
9
|
+
});
|
|
10
|
+
let CoverageSchema = v.object({
|
|
11
|
+
text: v.string(),
|
|
12
|
+
url: v.string(),
|
|
13
|
+
ranges: v.array(RangeSchema)
|
|
14
|
+
});
|
|
15
|
+
function is_valid_coverage(input) {
|
|
16
|
+
return v.safeParse(v.array(CoverageSchema), input).success;
|
|
17
|
+
}
|
|
18
|
+
function parse_coverage(input) {
|
|
19
|
+
try {
|
|
20
|
+
let parse_result = JSON.parse(input);
|
|
21
|
+
return is_valid_coverage(parse_result) ? parse_result : [];
|
|
22
|
+
} catch {
|
|
23
|
+
return [];
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
//#endregion
|
|
28
|
+
//#region src/lib/prettify.ts
|
|
29
|
+
let irrelevant_tokens = new Set([
|
|
30
|
+
tokenTypes.EOF,
|
|
31
|
+
tokenTypes.BadString,
|
|
32
|
+
tokenTypes.BadUrl,
|
|
33
|
+
tokenTypes.WhiteSpace,
|
|
34
|
+
tokenTypes.Semicolon,
|
|
35
|
+
tokenTypes.Comment,
|
|
36
|
+
tokenTypes.Colon
|
|
37
|
+
]);
|
|
38
|
+
function prettify(coverage) {
|
|
39
|
+
return coverage.map(({ url, text, ranges }) => {
|
|
40
|
+
let formatted = format(text);
|
|
41
|
+
let ext_ranges = ranges.map(({ start, end }) => ({
|
|
42
|
+
start,
|
|
43
|
+
end,
|
|
44
|
+
tokens: []
|
|
45
|
+
}));
|
|
46
|
+
function is_in_range(start, end) {
|
|
47
|
+
let range_index = 0;
|
|
48
|
+
for (let range of ext_ranges) {
|
|
49
|
+
if (range.start > end) return -1;
|
|
50
|
+
if (range.start <= start && range.end >= end) return range_index;
|
|
51
|
+
range_index++;
|
|
52
|
+
}
|
|
53
|
+
return -1;
|
|
54
|
+
}
|
|
55
|
+
let index = 0;
|
|
56
|
+
tokenize(text, (type, start, end) => {
|
|
57
|
+
if (irrelevant_tokens.has(type)) return;
|
|
58
|
+
index++;
|
|
59
|
+
let range_index = is_in_range(start, end);
|
|
60
|
+
if (range_index !== -1) ext_ranges[range_index].tokens.push(index);
|
|
61
|
+
});
|
|
62
|
+
let new_tokens = /* @__PURE__ */ new Map();
|
|
63
|
+
index = 0;
|
|
64
|
+
tokenize(formatted, (type, start, end) => {
|
|
65
|
+
if (irrelevant_tokens.has(type)) return;
|
|
66
|
+
index++;
|
|
67
|
+
new_tokens.set(index, {
|
|
68
|
+
start,
|
|
69
|
+
end
|
|
70
|
+
});
|
|
71
|
+
});
|
|
72
|
+
let new_ranges = [];
|
|
73
|
+
for (let range of ext_ranges) {
|
|
74
|
+
let start_token = new_tokens.get(range.tokens.at(0));
|
|
75
|
+
let end_token = new_tokens.get(range.tokens.at(-1));
|
|
76
|
+
if (start_token !== void 0 && end_token !== void 0) new_ranges.push({
|
|
77
|
+
start: start_token.start,
|
|
78
|
+
end: end_token.end
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
return {
|
|
82
|
+
url,
|
|
83
|
+
text: formatted,
|
|
84
|
+
ranges: new_ranges
|
|
85
|
+
};
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
//#endregion
|
|
90
|
+
//#region src/lib/decuplicate.ts
|
|
91
|
+
/**
|
|
92
|
+
* @description
|
|
93
|
+
* prerequisites
|
|
94
|
+
* - we check each stylesheet content only once (to avoid counting the same content multiple times)
|
|
95
|
+
* - if a duplicate stylesheet enters the room, we add it's ranges to the existing stylesheet's ranges
|
|
96
|
+
* - only bytes of deduplicated stylesheets are counted
|
|
97
|
+
*/
|
|
98
|
+
function deduplicate_entries(entries) {
|
|
99
|
+
let checked_stylesheets = /* @__PURE__ */ new Map();
|
|
100
|
+
for (let entry of entries) {
|
|
101
|
+
let text = entry.text;
|
|
102
|
+
if (checked_stylesheets.has(text)) {
|
|
103
|
+
let ranges = checked_stylesheets.get(text).ranges;
|
|
104
|
+
for (let range of entry.ranges) {
|
|
105
|
+
let found = false;
|
|
106
|
+
for (let checked_range of ranges) if (checked_range.start === range.start && checked_range.end === range.end) {
|
|
107
|
+
found = true;
|
|
108
|
+
break;
|
|
109
|
+
}
|
|
110
|
+
if (!found) ranges.push(range);
|
|
111
|
+
}
|
|
112
|
+
} else checked_stylesheets.set(text, {
|
|
113
|
+
url: entry.url,
|
|
114
|
+
ranges: entry.ranges
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
return Array.from(checked_stylesheets, ([text, { url, ranges }]) => ({
|
|
118
|
+
text,
|
|
119
|
+
url,
|
|
120
|
+
ranges
|
|
121
|
+
}));
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
//#endregion
|
|
125
|
+
//#region src/lib/ext.ts
|
|
126
|
+
function ext(url) {
|
|
127
|
+
try {
|
|
128
|
+
let parsed_url = new URL(url);
|
|
129
|
+
return parsed_url.pathname.slice(parsed_url.pathname.lastIndexOf(".") + 1);
|
|
130
|
+
} catch {
|
|
131
|
+
let ext_index = url.lastIndexOf(".");
|
|
132
|
+
return url.slice(ext_index, url.indexOf("/", ext_index) + 1);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
//#endregion
|
|
137
|
+
//#region src/lib/remap-html.ts
|
|
138
|
+
function remap_html(parse_html, html, old_ranges) {
|
|
139
|
+
let doc = parse_html(html);
|
|
140
|
+
let combined_css = "";
|
|
141
|
+
let new_ranges = [];
|
|
142
|
+
let current_offset = 0;
|
|
143
|
+
let style_elements = doc.querySelectorAll("style");
|
|
144
|
+
for (let style_element of Array.from(style_elements)) {
|
|
145
|
+
let style_content = style_element.textContent;
|
|
146
|
+
if (!style_content.trim()) continue;
|
|
147
|
+
combined_css += style_content;
|
|
148
|
+
let start_index = html.indexOf(style_content);
|
|
149
|
+
let end_index = start_index + style_content.length;
|
|
150
|
+
for (let range of old_ranges) if (range.start >= start_index && range.end <= end_index) new_ranges.push({
|
|
151
|
+
start: current_offset + (range.start - start_index),
|
|
152
|
+
end: current_offset + (range.end - start_index)
|
|
153
|
+
});
|
|
154
|
+
current_offset += style_content.length;
|
|
155
|
+
}
|
|
156
|
+
return {
|
|
157
|
+
css: combined_css,
|
|
158
|
+
ranges: new_ranges
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
//#endregion
|
|
163
|
+
//#region src/lib/filter-entries.ts
|
|
164
|
+
function is_html(text) {
|
|
165
|
+
return /<\/?(html|body|head|div|span|script|style)/i.test(text);
|
|
166
|
+
}
|
|
167
|
+
function filter_coverage(coverage, parse_html) {
|
|
168
|
+
let result = [];
|
|
169
|
+
for (let entry of coverage) {
|
|
170
|
+
let extension = ext(entry.url).toLowerCase();
|
|
171
|
+
if (extension === "js") continue;
|
|
172
|
+
if (extension === "css") {
|
|
173
|
+
result.push(entry);
|
|
174
|
+
continue;
|
|
175
|
+
}
|
|
176
|
+
if (is_html(entry.text)) {
|
|
177
|
+
if (!parse_html) continue;
|
|
178
|
+
let { css, ranges } = remap_html(parse_html, entry.text, entry.ranges);
|
|
179
|
+
result.push({
|
|
180
|
+
url: entry.url,
|
|
181
|
+
text: css,
|
|
182
|
+
ranges
|
|
183
|
+
});
|
|
184
|
+
continue;
|
|
185
|
+
}
|
|
186
|
+
result.push({
|
|
187
|
+
url: entry.url,
|
|
188
|
+
text: entry.text,
|
|
189
|
+
ranges: entry.ranges
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
return result;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
//#endregion
|
|
196
|
+
//#region src/lib/index.ts
|
|
197
|
+
function ratio(fraction, total) {
|
|
198
|
+
if (total === 0) return 0;
|
|
199
|
+
return fraction / total;
|
|
200
|
+
}
|
|
201
|
+
/**
|
|
202
|
+
* @description
|
|
203
|
+
* CSS Code Coverage calculation
|
|
204
|
+
*
|
|
205
|
+
* These are the steps performed to calculate coverage:
|
|
206
|
+
* 1. Filter eligible files / validate input
|
|
207
|
+
* 2. Prettify the CSS dicovered in each Coverage and update their ranges
|
|
208
|
+
* 3. De-duplicate Coverages: merge all ranges for CSS sources occurring multiple times
|
|
209
|
+
* 4. Calculate used/unused CSS bytes (fastest path, no inspection of the actual CSS needed)
|
|
210
|
+
* 5. Calculate line-coverage, byte-coverage per stylesheet
|
|
211
|
+
*/
|
|
212
|
+
function calculate_coverage(coverage, parse_html) {
|
|
213
|
+
let total_files_found = coverage.length;
|
|
214
|
+
if (!is_valid_coverage(coverage)) throw new TypeError("No valid coverage data found");
|
|
215
|
+
let coverage_per_stylesheet = deduplicate_entries(prettify(filter_coverage(coverage, parse_html))).map(({ text, url, ranges }) => {
|
|
216
|
+
function is_line_covered(line, start_offset) {
|
|
217
|
+
let end = start_offset + line.length;
|
|
218
|
+
let next_offset = end + 1;
|
|
219
|
+
let is_empty = /^\s*$/.test(line);
|
|
220
|
+
let is_closing_brace = line.endsWith("}");
|
|
221
|
+
if (!is_empty && !is_closing_brace) for (let range of ranges) {
|
|
222
|
+
if (range.start > end || range.end < start_offset) continue;
|
|
223
|
+
if (range.start <= start_offset && range.end >= end) return true;
|
|
224
|
+
else if (line.startsWith("@") && range.start > start_offset && range.start < next_offset) return true;
|
|
225
|
+
}
|
|
226
|
+
return false;
|
|
227
|
+
}
|
|
228
|
+
let lines = text.split("\n");
|
|
229
|
+
let total_file_lines = lines.length;
|
|
230
|
+
let line_coverage = new Uint8Array(total_file_lines);
|
|
231
|
+
let file_lines_covered = 0;
|
|
232
|
+
let file_total_bytes = text.length;
|
|
233
|
+
let file_bytes_covered = 0;
|
|
234
|
+
let offset = 0;
|
|
235
|
+
for (let index = 0; index < lines.length; index++) {
|
|
236
|
+
let line = lines[index];
|
|
237
|
+
let start = offset;
|
|
238
|
+
let next_offset = offset + line.length + 1;
|
|
239
|
+
let is_empty = /^\s*$/.test(line);
|
|
240
|
+
let is_closing_brace = line.endsWith("}");
|
|
241
|
+
let is_in_range = is_line_covered(line, start);
|
|
242
|
+
let is_covered = false;
|
|
243
|
+
let prev_is_covered = index > 0 && line_coverage[index - 1] === 1;
|
|
244
|
+
if (is_in_range && !is_closing_brace && !is_empty) is_covered = true;
|
|
245
|
+
else if ((is_empty || is_closing_brace) && prev_is_covered) is_covered = true;
|
|
246
|
+
else if (is_empty && !prev_is_covered && is_line_covered(lines[index + 1], next_offset)) is_covered = true;
|
|
247
|
+
line_coverage[index] = is_covered ? 1 : 0;
|
|
248
|
+
if (is_covered) {
|
|
249
|
+
file_lines_covered++;
|
|
250
|
+
file_bytes_covered += line.length + 1;
|
|
251
|
+
}
|
|
252
|
+
offset = next_offset;
|
|
253
|
+
}
|
|
254
|
+
let chunks = [{
|
|
255
|
+
start_line: 1,
|
|
256
|
+
is_covered: line_coverage[0] === 1,
|
|
257
|
+
end_line: 1,
|
|
258
|
+
total_lines: 1
|
|
259
|
+
}];
|
|
260
|
+
for (let index = 1; index < line_coverage.length; index++) {
|
|
261
|
+
let is_covered = line_coverage.at(index);
|
|
262
|
+
if (is_covered !== line_coverage.at(index - 1)) {
|
|
263
|
+
let last_chunk$1 = chunks.at(-1);
|
|
264
|
+
last_chunk$1.end_line = index;
|
|
265
|
+
last_chunk$1.total_lines = index - last_chunk$1.start_line + 1;
|
|
266
|
+
chunks.push({
|
|
267
|
+
start_line: index + 1,
|
|
268
|
+
is_covered: is_covered === 1,
|
|
269
|
+
end_line: index,
|
|
270
|
+
total_lines: 0
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
let last_chunk = chunks.at(-1);
|
|
275
|
+
last_chunk.total_lines = line_coverage.length + 1 - last_chunk.start_line;
|
|
276
|
+
last_chunk.end_line = line_coverage.length;
|
|
277
|
+
return {
|
|
278
|
+
url,
|
|
279
|
+
text,
|
|
280
|
+
ranges,
|
|
281
|
+
unused_bytes: file_total_bytes - file_bytes_covered,
|
|
282
|
+
used_bytes: file_bytes_covered,
|
|
283
|
+
total_bytes: file_total_bytes,
|
|
284
|
+
line_coverage_ratio: ratio(file_lines_covered, total_file_lines),
|
|
285
|
+
byte_coverage_ratio: ratio(file_bytes_covered, file_total_bytes),
|
|
286
|
+
line_coverage,
|
|
287
|
+
total_lines: total_file_lines,
|
|
288
|
+
covered_lines: file_lines_covered,
|
|
289
|
+
uncovered_lines: total_file_lines - file_lines_covered,
|
|
290
|
+
chunks
|
|
291
|
+
};
|
|
292
|
+
});
|
|
293
|
+
let { total_lines, total_covered_lines, total_uncovered_lines, total_bytes, total_used_bytes, total_unused_bytes } = coverage_per_stylesheet.reduce((totals, sheet) => {
|
|
294
|
+
totals.total_lines += sheet.total_lines;
|
|
295
|
+
totals.total_covered_lines += sheet.covered_lines;
|
|
296
|
+
totals.total_uncovered_lines += sheet.uncovered_lines;
|
|
297
|
+
totals.total_bytes += sheet.total_bytes;
|
|
298
|
+
totals.total_used_bytes += sheet.used_bytes;
|
|
299
|
+
totals.total_unused_bytes += sheet.unused_bytes;
|
|
300
|
+
return totals;
|
|
301
|
+
}, {
|
|
302
|
+
total_lines: 0,
|
|
303
|
+
total_covered_lines: 0,
|
|
304
|
+
total_uncovered_lines: 0,
|
|
305
|
+
total_bytes: 0,
|
|
306
|
+
total_used_bytes: 0,
|
|
307
|
+
total_unused_bytes: 0
|
|
308
|
+
});
|
|
309
|
+
return {
|
|
310
|
+
total_files_found,
|
|
311
|
+
total_bytes,
|
|
312
|
+
total_lines,
|
|
313
|
+
used_bytes: total_used_bytes,
|
|
314
|
+
covered_lines: total_covered_lines,
|
|
315
|
+
unused_bytes: total_unused_bytes,
|
|
316
|
+
uncovered_lines: total_uncovered_lines,
|
|
317
|
+
byte_coverage_ratio: ratio(total_used_bytes, total_bytes),
|
|
318
|
+
line_coverage_ratio: ratio(total_covered_lines, total_lines),
|
|
319
|
+
coverage_per_stylesheet,
|
|
320
|
+
total_stylesheets: coverage_per_stylesheet.length
|
|
321
|
+
};
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
//#endregion
|
|
325
|
+
export { calculate_coverage, parse_coverage };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@projectwallace/css-code-coverage",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.4.0",
|
|
4
4
|
"description": "",
|
|
5
5
|
"author": "Bart Veneman <bart@projectwallace.com>",
|
|
6
6
|
"repository": {
|
|
@@ -21,37 +21,41 @@
|
|
|
21
21
|
],
|
|
22
22
|
"license": "EUPL-1.2",
|
|
23
23
|
"engines": {
|
|
24
|
-
"node": ">=18.0
|
|
24
|
+
"node": ">=22.18.0"
|
|
25
25
|
},
|
|
26
26
|
"type": "module",
|
|
27
27
|
"files": [
|
|
28
28
|
"dist"
|
|
29
29
|
],
|
|
30
|
-
"
|
|
30
|
+
"bin": {
|
|
31
|
+
"css-coverage": "dist/cli.js"
|
|
32
|
+
},
|
|
33
|
+
"main": "dist/index.js",
|
|
31
34
|
"exports": {
|
|
32
35
|
"types": "./dist/index.d.ts",
|
|
33
|
-
"default": "./dist/
|
|
36
|
+
"default": "./dist/index.js"
|
|
34
37
|
},
|
|
35
38
|
"types": "dist/index.d.ts",
|
|
36
39
|
"scripts": {
|
|
37
40
|
"test": "c8 --reporter=text playwright test",
|
|
38
|
-
"build": "
|
|
41
|
+
"build": "tsdown",
|
|
39
42
|
"check": "tsc --noEmit",
|
|
40
43
|
"lint": "oxlint --config .oxlintrc.json",
|
|
41
44
|
"lint-package": "publint"
|
|
42
45
|
},
|
|
43
46
|
"devDependencies": {
|
|
44
47
|
"@playwright/test": "^1.56.0",
|
|
48
|
+
"@types/node": "^24.8.1",
|
|
45
49
|
"c8": "^10.1.3",
|
|
46
50
|
"linkedom": "^0.18.12",
|
|
47
51
|
"oxlint": "^1.22.0",
|
|
48
52
|
"publint": "^0.3.14",
|
|
49
|
-
"
|
|
50
|
-
"
|
|
51
|
-
"vite-plugin-dts": "^4.5.4"
|
|
53
|
+
"tsdown": "^0.15.8",
|
|
54
|
+
"typescript": "^5.9.3"
|
|
52
55
|
},
|
|
53
56
|
"dependencies": {
|
|
54
57
|
"@projectwallace/format-css": "^2.1.1",
|
|
58
|
+
"css-tree": "^3.1.0",
|
|
55
59
|
"valibot": "^1.1.0"
|
|
56
60
|
}
|
|
57
61
|
}
|
|
@@ -1,224 +0,0 @@
|
|
|
1
|
-
import * as g from "valibot";
|
|
2
|
-
import { format as x } from "@projectwallace/format-css";
|
|
3
|
-
let m = g.array(
|
|
4
|
-
g.object({
|
|
5
|
-
text: g.string(),
|
|
6
|
-
url: g.string(),
|
|
7
|
-
ranges: g.array(
|
|
8
|
-
g.object({
|
|
9
|
-
start: g.number(),
|
|
10
|
-
end: g.number()
|
|
11
|
-
})
|
|
12
|
-
)
|
|
13
|
-
})
|
|
14
|
-
);
|
|
15
|
-
function p(e) {
|
|
16
|
-
return g.safeParse(m, e).success;
|
|
17
|
-
}
|
|
18
|
-
function M(e) {
|
|
19
|
-
try {
|
|
20
|
-
let t = JSON.parse(e);
|
|
21
|
-
return p(t) ? t : [];
|
|
22
|
-
} catch {
|
|
23
|
-
return [];
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
function k(e) {
|
|
27
|
-
let t = /* @__PURE__ */ new Map();
|
|
28
|
-
for (let n of e) {
|
|
29
|
-
let r = n.text;
|
|
30
|
-
if (t.has(r)) {
|
|
31
|
-
let l = t.get(r).ranges;
|
|
32
|
-
for (let a of n.ranges) {
|
|
33
|
-
let s = !1;
|
|
34
|
-
for (let f of l)
|
|
35
|
-
if (f.start === a.start && f.end === a.end) {
|
|
36
|
-
s = !0;
|
|
37
|
-
break;
|
|
38
|
-
}
|
|
39
|
-
s || l.push(a);
|
|
40
|
-
}
|
|
41
|
-
} else
|
|
42
|
-
t.set(r, {
|
|
43
|
-
url: n.url,
|
|
44
|
-
ranges: n.ranges
|
|
45
|
-
});
|
|
46
|
-
}
|
|
47
|
-
return Array.from(t, ([n, { url: r, ranges: o }]) => ({ text: n, url: r, ranges: o }));
|
|
48
|
-
}
|
|
49
|
-
function A(e) {
|
|
50
|
-
try {
|
|
51
|
-
let t = new URL(e);
|
|
52
|
-
return t.pathname.slice(t.pathname.lastIndexOf(".") + 1);
|
|
53
|
-
} catch {
|
|
54
|
-
let t = e.lastIndexOf(".");
|
|
55
|
-
return e.slice(t, e.indexOf("/", t) + 1);
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
function w(e, t, n) {
|
|
59
|
-
let r = e(t), o = "", l = [], a = 0, s = r.querySelectorAll("style");
|
|
60
|
-
for (let f of Array.from(s)) {
|
|
61
|
-
let c = f.textContent;
|
|
62
|
-
if (!c.trim()) continue;
|
|
63
|
-
o += c;
|
|
64
|
-
let i = t.indexOf(c), d = i + c.length;
|
|
65
|
-
for (let _ of n)
|
|
66
|
-
_.start >= i && _.end <= d && l.push({
|
|
67
|
-
start: a + (_.start - i),
|
|
68
|
-
end: a + (_.end - i)
|
|
69
|
-
});
|
|
70
|
-
a += c.length;
|
|
71
|
-
}
|
|
72
|
-
return {
|
|
73
|
-
css: o,
|
|
74
|
-
ranges: l
|
|
75
|
-
};
|
|
76
|
-
}
|
|
77
|
-
function E(e) {
|
|
78
|
-
return /<\/?(html|body|head|div|span|script|style)/i.test(e);
|
|
79
|
-
}
|
|
80
|
-
function O(e, t) {
|
|
81
|
-
let n = [];
|
|
82
|
-
for (let r of e) {
|
|
83
|
-
let o = A(r.url).toLowerCase();
|
|
84
|
-
if (o !== "js") {
|
|
85
|
-
if (o === "css") {
|
|
86
|
-
n.push(r);
|
|
87
|
-
continue;
|
|
88
|
-
}
|
|
89
|
-
if (E(r.text)) {
|
|
90
|
-
if (!t)
|
|
91
|
-
continue;
|
|
92
|
-
let { css: l, ranges: a } = w(t, r.text, r.ranges);
|
|
93
|
-
n.push({
|
|
94
|
-
url: r.url,
|
|
95
|
-
text: l,
|
|
96
|
-
ranges: a
|
|
97
|
-
});
|
|
98
|
-
continue;
|
|
99
|
-
}
|
|
100
|
-
n.push({
|
|
101
|
-
url: r.url,
|
|
102
|
-
text: r.text,
|
|
103
|
-
ranges: r.ranges
|
|
104
|
-
});
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
return n;
|
|
108
|
-
}
|
|
109
|
-
function L(e) {
|
|
110
|
-
let t = [], n = 0;
|
|
111
|
-
for (let r of e.ranges)
|
|
112
|
-
n !== r.start && (t.push({
|
|
113
|
-
start_offset: n,
|
|
114
|
-
end_offset: r.start,
|
|
115
|
-
is_covered: !1
|
|
116
|
-
}), n = r.start), t.push({
|
|
117
|
-
start_offset: r.start,
|
|
118
|
-
end_offset: r.end,
|
|
119
|
-
is_covered: !0
|
|
120
|
-
}), n = r.end;
|
|
121
|
-
return n !== e.text.length && t.push({
|
|
122
|
-
start_offset: n,
|
|
123
|
-
end_offset: e.text.length,
|
|
124
|
-
is_covered: !1
|
|
125
|
-
}), {
|
|
126
|
-
...e,
|
|
127
|
-
chunks: t
|
|
128
|
-
};
|
|
129
|
-
}
|
|
130
|
-
function N(e) {
|
|
131
|
-
for (let n of e)
|
|
132
|
-
for (let r of n.ranges)
|
|
133
|
-
for (let o = 1; o >= -28; o--) {
|
|
134
|
-
let l = r.start + o;
|
|
135
|
-
if (n.text.charAt(l) === "@") {
|
|
136
|
-
r.start = l;
|
|
137
|
-
break;
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
return e;
|
|
141
|
-
}
|
|
142
|
-
function S(e) {
|
|
143
|
-
let t = 1, n = 0, r = e.chunks.map((l, a) => {
|
|
144
|
-
let s = x(e.text.slice(l.start_offset, l.end_offset));
|
|
145
|
-
l.is_covered && (a === 0 ? s = s + `
|
|
146
|
-
` : a === e.chunks.length - 1 ? s = `
|
|
147
|
-
` + s : s = `
|
|
148
|
-
` + s + `
|
|
149
|
-
`);
|
|
150
|
-
let f = s.split(`
|
|
151
|
-
`).length, c = n, i = Math.max(n + s.length - 1, 0), d = t, _ = t + f;
|
|
152
|
-
return t = _, n = i, {
|
|
153
|
-
...l,
|
|
154
|
-
start_offset: c,
|
|
155
|
-
start_line: d,
|
|
156
|
-
end_line: _ - 1,
|
|
157
|
-
end_offset: i,
|
|
158
|
-
css: s,
|
|
159
|
-
total_lines: _ - d
|
|
160
|
-
};
|
|
161
|
-
});
|
|
162
|
-
return {
|
|
163
|
-
...e,
|
|
164
|
-
// TODO: update ranges as well?? Or remove them because we have chunks now
|
|
165
|
-
chunks: r,
|
|
166
|
-
text: r.map(({ css: l }) => l).join("")
|
|
167
|
-
};
|
|
168
|
-
}
|
|
169
|
-
function y(e, t) {
|
|
170
|
-
return t === 0 ? 0 : e / t;
|
|
171
|
-
}
|
|
172
|
-
function T(e) {
|
|
173
|
-
let { text: t, url: n, chunks: r } = e, o = 0, l = 0, a = 0, s = 0, f = 0, c = 0;
|
|
174
|
-
for (let i of r) {
|
|
175
|
-
let d = i.total_lines, _ = i.end_offset - i.start_offset;
|
|
176
|
-
s += d, a += _, i.is_covered ? (f += d, l += _) : (c += d, o += _);
|
|
177
|
-
}
|
|
178
|
-
return {
|
|
179
|
-
url: n,
|
|
180
|
-
text: t,
|
|
181
|
-
uncovered_bytes: o,
|
|
182
|
-
covered_bytes: l,
|
|
183
|
-
total_bytes: a,
|
|
184
|
-
line_coverage_ratio: y(f, s),
|
|
185
|
-
byte_coverage_ratio: y(l, a),
|
|
186
|
-
total_lines: s,
|
|
187
|
-
covered_lines: f,
|
|
188
|
-
uncovered_lines: c,
|
|
189
|
-
chunks: r
|
|
190
|
-
};
|
|
191
|
-
}
|
|
192
|
-
function C(e, t) {
|
|
193
|
-
let n = e.length;
|
|
194
|
-
if (!p(e))
|
|
195
|
-
throw new TypeError("No valid coverage data found");
|
|
196
|
-
let r = O(e, t), o = k(r), f = N(o).map((u) => L(u)).map((u) => S(u)).map((u) => T(u)), { total_lines: c, total_covered_lines: i, total_uncovered_lines: d, total_bytes: _, total_covered_bytes: h, total_uncovered_bytes: b } = f.reduce(
|
|
197
|
-
(u, v) => (u.total_lines += v.total_lines, u.total_covered_lines += v.covered_lines, u.total_uncovered_lines += v.uncovered_lines, u.total_bytes += v.total_bytes, u.total_covered_bytes += v.covered_bytes, u.total_uncovered_bytes += v.uncovered_bytes, u),
|
|
198
|
-
{
|
|
199
|
-
total_lines: 0,
|
|
200
|
-
total_covered_lines: 0,
|
|
201
|
-
total_uncovered_lines: 0,
|
|
202
|
-
total_bytes: 0,
|
|
203
|
-
total_covered_bytes: 0,
|
|
204
|
-
total_uncovered_bytes: 0
|
|
205
|
-
}
|
|
206
|
-
);
|
|
207
|
-
return {
|
|
208
|
-
total_files_found: n,
|
|
209
|
-
total_bytes: _,
|
|
210
|
-
total_lines: c,
|
|
211
|
-
covered_bytes: h,
|
|
212
|
-
covered_lines: i,
|
|
213
|
-
uncovered_bytes: b,
|
|
214
|
-
uncovered_lines: d,
|
|
215
|
-
byte_coverage_ratio: y(h, _),
|
|
216
|
-
line_coverage_ratio: y(i, c),
|
|
217
|
-
coverage_per_stylesheet: f,
|
|
218
|
-
total_stylesheets: f.length
|
|
219
|
-
};
|
|
220
|
-
}
|
|
221
|
-
export {
|
|
222
|
-
C as calculate_coverage,
|
|
223
|
-
M as parse_coverage
|
|
224
|
-
};
|
package/dist/src/chunkify.d.ts
DELETED
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { Coverage } from './parse-coverage';
|
|
2
|
-
export type ChunkedCoverage = Coverage & {
|
|
3
|
-
chunks: {
|
|
4
|
-
start_offset: number;
|
|
5
|
-
end_offset: number;
|
|
6
|
-
is_covered: boolean;
|
|
7
|
-
}[];
|
|
8
|
-
};
|
|
9
|
-
export declare function chunkify_stylesheet(stylesheet: Coverage): ChunkedCoverage;
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { Coverage } from './parse-coverage.ts';
|
|
2
|
-
/**
|
|
3
|
-
* @description
|
|
4
|
-
* prerequisites
|
|
5
|
-
* - we check each stylesheet content only once (to avoid counting the same content multiple times)
|
|
6
|
-
* - if a duplicate stylesheet enters the room, we add it's ranges to the existing stylesheet's ranges
|
|
7
|
-
* - only bytes of deduplicated stylesheets are counted
|
|
8
|
-
*/
|
|
9
|
-
export declare function deduplicate_entries(entries: Coverage[]): Coverage[];
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|