playwright-slack-report-burak 2.0.1 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/ResultsParser.js +9 -165
- package/package.json +1 -1
|
@@ -6,118 +6,38 @@
|
|
|
6
6
|
/* eslint-disable class-methods-use-this */
|
|
7
7
|
/* eslint-disable no-param-reassign */
|
|
8
8
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
-
const fs = require('fs');
|
|
10
|
-
const path = require('path');
|
|
11
|
-
const axios = require('axios');
|
|
12
|
-
const { exec } = require('child_process');
|
|
13
|
-
|
|
14
9
|
class ResultsParser {
|
|
15
10
|
result;
|
|
16
11
|
separateFlakyTests;
|
|
17
|
-
totalShardCount = process.env.CIRCLE_NODE_TOTAL || 1;
|
|
18
|
-
shardIndex = process.env.CIRCLE_NODE_INDEX || 0;
|
|
19
12
|
constructor(options = { separateFlakyTests: false }) {
|
|
20
13
|
this.result = [];
|
|
21
14
|
this.separateFlakyTests = options.separateFlakyTests;
|
|
22
15
|
}
|
|
23
16
|
async getParsedResults() {
|
|
24
|
-
const summary = {
|
|
25
|
-
total: 0,
|
|
26
|
-
passed: 0,
|
|
27
|
-
failed: 0,
|
|
28
|
-
flaky: 0,
|
|
29
|
-
skipped: 0,
|
|
30
|
-
failures: [],
|
|
31
|
-
tests: [],
|
|
32
|
-
};
|
|
33
17
|
const failures = await this.getFailures();
|
|
34
18
|
const flakes = await this.getFlakes();
|
|
35
19
|
let passes = await this.getPasses();
|
|
36
20
|
/*if (this.separateFlakyTests) {
|
|
37
21
|
passes = this.doSeparateFlakyTests(passes, flakes);
|
|
38
22
|
}*/
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
fs.mkdirSync(summariesDir, { recursive: true });
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
// Determine the current node index
|
|
48
|
-
const currentNodeIndex = this.shardIndex;
|
|
49
|
-
|
|
50
|
-
// Define the file for the current node's summary
|
|
51
|
-
const nodeSummaryFile = path.join(summariesDir, `node_summary_${currentNodeIndex}.json`);
|
|
52
|
-
|
|
53
|
-
const totalTestCasesForNode = this.result.reduce((acc, suite) => acc + suite.testSuite.tests.length, 0);
|
|
54
|
-
|
|
55
|
-
// Initialize or read existing summary from file
|
|
56
|
-
let nodeSummary = {
|
|
57
|
-
total: totalTestCasesForNode,
|
|
58
|
-
passed: 0,
|
|
59
|
-
failed: 0,
|
|
60
|
-
flaky: 0,
|
|
23
|
+
const summary = {
|
|
24
|
+
passed: passes.length,
|
|
25
|
+
failed: failures.length,
|
|
26
|
+
flaky: (this.separateFlakyTests && flakes.length > 0) ?
|
|
27
|
+
flakes.length : 0,
|
|
61
28
|
skipped: 0,
|
|
62
|
-
failures
|
|
29
|
+
failures,
|
|
63
30
|
tests: [],
|
|
64
31
|
};
|
|
65
|
-
|
|
66
|
-
// Create the file if it doesn't exist
|
|
67
|
-
if (!fs.existsSync(nodeSummaryFile)) {
|
|
68
|
-
fs.writeFileSync(nodeSummaryFile, JSON.stringify(nodeSummary, null, 2));
|
|
69
|
-
} else {
|
|
70
|
-
nodeSummary = JSON.parse(fs.readFileSync(nodeSummaryFile, 'utf-8'));
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
// Update the node summary with new test results
|
|
74
32
|
for (const suite of this.result) {
|
|
75
|
-
|
|
33
|
+
summary.tests = summary.tests.concat(suite.testSuite.tests);
|
|
76
34
|
for (const test of suite.testSuite.tests) {
|
|
77
35
|
if (test.status === 'skipped') {
|
|
78
|
-
|
|
79
|
-
}
|
|
80
|
-
if (test.status === 'failed' || test.status === 'timedOut') {
|
|
81
|
-
nodeSummary.failed += 1;
|
|
82
|
-
}
|
|
83
|
-
if (this.separateFlakyTests && test.status === 'passed' && test.retry > 0) {
|
|
84
|
-
nodeSummary.flaky += 1;
|
|
85
|
-
}
|
|
86
|
-
if (test.status === 'passed' && (!this.separateFlakyTests || test.retry === 0)) {
|
|
87
|
-
nodeSummary.passed += 1;
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
// Write the updated summary back to the file for the current node
|
|
93
|
-
fs.writeFileSync(nodeSummaryFile, JSON.stringify(nodeSummary, null, 2));
|
|
94
|
-
|
|
95
|
-
if (this.shardIndex === 0 && this.totalShardCount > 1) {
|
|
96
|
-
await this.fetchAllArtifacts();
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
if (this.allNodeSummaryFilesExist() && this.allBlobZipsExist()) {
|
|
100
|
-
// Merge all node summaries into the final summary
|
|
101
|
-
for (let i = 0; i < this.totalShardCount; i++) {
|
|
102
|
-
const nodeSummaryFile = path.join(summariesDir, `node_summary_${i}.json`);
|
|
103
|
-
const fileToRead = nodeSummaryFile;
|
|
104
|
-
|
|
105
|
-
if (fs.existsSync(fileToRead)) {
|
|
106
|
-
const nodeSummary = JSON.parse(fs.readFileSync(fileToRead, 'utf-8'));
|
|
107
|
-
summary.total += nodeSummary.total;
|
|
108
|
-
summary.passed += nodeSummary.passed;
|
|
109
|
-
summary.failed += nodeSummary.failed;
|
|
110
|
-
summary.flaky += nodeSummary.flaky;
|
|
111
|
-
summary.skipped += nodeSummary.skipped;
|
|
112
|
-
summary.failures = summary.failures.concat(nodeSummary.failures);
|
|
113
|
-
summary.tests = summary.tests.concat(nodeSummary.tests);
|
|
36
|
+
summary.skipped += 1;
|
|
114
37
|
}
|
|
115
38
|
}
|
|
116
|
-
this.mergeReports();
|
|
117
|
-
return summary;
|
|
118
|
-
} else {
|
|
119
|
-
return { summary, sendResults: 'off' };
|
|
120
39
|
}
|
|
40
|
+
return summary;
|
|
121
41
|
}
|
|
122
42
|
async getFailures() {
|
|
123
43
|
const failures = [];
|
|
@@ -259,81 +179,5 @@ class ResultsParser {
|
|
|
259
179
|
}
|
|
260
180
|
return [..._passes.values()];
|
|
261
181
|
}*/
|
|
262
|
-
allNodeSummaryFilesExist() {
|
|
263
|
-
const summariesDir = path.join(process.env.CIRCLE_WORKING_DIRECTORY || './', 'playwright-report');
|
|
264
|
-
|
|
265
|
-
for (let i = 0; i < this.totalShardCount; i++) {
|
|
266
|
-
const nodeSummaryFile = path.join(summariesDir, `node_summary_${i}.json`);
|
|
267
|
-
if (!fs.existsSync(nodeSummaryFile)) {
|
|
268
|
-
return false;
|
|
269
|
-
}
|
|
270
|
-
}
|
|
271
|
-
return true;
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
allBlobZipsExist() {
|
|
275
|
-
const summariesDir = path.join(process.env.CIRCLE_WORKING_DIRECTORY || './', 'playwright-report');
|
|
276
|
-
|
|
277
|
-
for (let i = 0; i < this.totalShardCount; i++) {
|
|
278
|
-
const blobZipFile = path.join(summariesDir, `blob-report-node-${i}.zip`);
|
|
279
|
-
if (!fs.existsSync(blobZipFile)) {
|
|
280
|
-
return false;
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
return true;
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
async fetchAllArtifacts() {
|
|
287
|
-
const summariesDir = path.join(process.env.CIRCLE_WORKING_DIRECTORY || './', 'playwright-report');
|
|
288
|
-
while (!this.allNodeSummaryFilesExist() || !this.allBlobZipsExist()) {
|
|
289
|
-
console.log('Waiting for all blob zips to exist...');
|
|
290
|
-
if (!fs.existsSync(summariesDir)) {
|
|
291
|
-
fs.mkdirSync(summariesDir, { recursive: true });
|
|
292
|
-
}
|
|
293
|
-
for (let i = 1; i < this.totalShardCount; i++) {
|
|
294
|
-
await this.fetchArtifact(i, `node_summary_${i}.json`, summariesDir);
|
|
295
|
-
await this.fetchArtifact(i, `blob-report-node-${i}.zip`, summariesDir);
|
|
296
|
-
}
|
|
297
|
-
}
|
|
298
|
-
}
|
|
299
|
-
|
|
300
|
-
async fetchArtifact(i, file, summariesDir) {
|
|
301
|
-
const circleciToken = process.env.safetywingtest_CIRCLECI_API_TOKEN;
|
|
302
|
-
const circleciJobId = process.env.CIRCLE_WORKFLOW_JOB_ID || 'cb4e5f50-16cc-4dd1-a494-8c3094d91ceb';
|
|
303
|
-
const circleciApiUrl = `https://output.circle-artifacts.com/output/job/${circleciJobId}/artifacts/${i}/html-report/${file}`;
|
|
304
|
-
const filePath = path.join(summariesDir, file);
|
|
305
|
-
|
|
306
|
-
while (true) {
|
|
307
|
-
try {
|
|
308
|
-
const response = await axios.get(circleciApiUrl, {
|
|
309
|
-
headers: {
|
|
310
|
-
'Circle-Token': circleciToken,
|
|
311
|
-
}
|
|
312
|
-
});
|
|
313
|
-
|
|
314
|
-
fs.writeFileSync(filePath, response.data);
|
|
315
|
-
console.log(`Successfully fetched file ${file} from shard ${i}`);
|
|
316
|
-
break; // Exit the loop if the file is fetched successfully
|
|
317
|
-
} catch (error) {
|
|
318
|
-
if (error.response && error.response.status === 404) {
|
|
319
|
-
console.warn(`File ${file} not found at ${circleciApiUrl}. Retrying in 10 seconds...`);
|
|
320
|
-
await new Promise(resolve => setTimeout(resolve, 10000)); // Wait for 10 seconds
|
|
321
|
-
} else {
|
|
322
|
-
console.error(`Failed to fetch file ${file} from shard ${i}!`, error);
|
|
323
|
-
break; // Exit the loop on other errors
|
|
324
|
-
}
|
|
325
|
-
}
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
mergeReports() {
|
|
329
|
-
try {
|
|
330
|
-
// Execute the command to merge reports
|
|
331
|
-
exec('npx playwright merge-reports --reporter html playwright-report', { stdio: 'inherit' });
|
|
332
|
-
console.log('Reports merged successfully.');
|
|
333
|
-
} catch (error) {
|
|
334
|
-
// Log a warning instead of throwing an error
|
|
335
|
-
console.warn('Warning: Failed to merge reports. This may not affect the overall process.', error.message);
|
|
336
|
-
}
|
|
337
|
-
}
|
|
338
182
|
}
|
|
339
183
|
exports.default = ResultsParser;
|
package/package.json
CHANGED
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
"lint-fix": "npx eslint . --ext .ts --fix"
|
|
31
31
|
},
|
|
32
32
|
"name": "playwright-slack-report-burak",
|
|
33
|
-
"version": "2.0
|
|
33
|
+
"version": "2.1.0",
|
|
34
34
|
"main": "index.js",
|
|
35
35
|
"types": "dist/index.d.ts",
|
|
36
36
|
"repository": "git@github.com:ryanrosello-og/playwright-slack-report.git",
|