@arghajit/dummy 0.3.41 ā 0.3.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -601,7 +601,8 @@ class PlaywrightPulseReporter {
|
|
|
601
601
|
else {
|
|
602
602
|
// Logic for appending/merging reports
|
|
603
603
|
const pulseResultsDir = path.join(this.outputDir, this.individualReportsSubDir);
|
|
604
|
-
const
|
|
604
|
+
const shardPrefix = this.baseOutputFile.replace(".json", "-");
|
|
605
|
+
const individualReportPath = path.join(pulseResultsDir, `${shardPrefix}${Date.now()}.json`);
|
|
605
606
|
try {
|
|
606
607
|
await this._ensureDirExists(pulseResultsDir);
|
|
607
608
|
await fs.writeFile(individualReportPath, JSON.stringify(finalReport, jsonReplacer, 2));
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@arghajit/dummy",
|
|
3
3
|
"author": "Arghajit Singha",
|
|
4
|
-
"version": "0.3.
|
|
4
|
+
"version": "0.3.43",
|
|
5
5
|
"description": "A Playwright reporter and dashboard for visualizing test results.",
|
|
6
6
|
"homepage": "https://arghajit47.github.io/playwright-pulse/",
|
|
7
7
|
"repository": {
|
|
@@ -35,7 +35,7 @@
|
|
|
35
35
|
],
|
|
36
36
|
"license": "MIT",
|
|
37
37
|
"bin": {
|
|
38
|
-
"logo": "
|
|
38
|
+
"logo": "scripts/terminal-logo.mjs",
|
|
39
39
|
"generate-pulse-report": "scripts/generate-static-report.mjs",
|
|
40
40
|
"generate-report": "scripts/generate-report.mjs",
|
|
41
41
|
"merge-pulse-report": "scripts/merge-pulse-report.mjs",
|
|
@@ -36,7 +36,7 @@ async function extractReporterOptionsFromConfig(configPath) {
|
|
|
36
36
|
try {
|
|
37
37
|
// Find the Pulse/Dummy reporter block
|
|
38
38
|
// It usually looks like ["@arghajit/playwright-pulse-report", { ... }] or ["playwright-pulse-report", { ... }]
|
|
39
|
-
const reporterBlockRegex = /\[\s*["'](?:@arghajit\/)?(?:playwright-pulse-report|dummy)["']\s*,\s*(\{[\s\S]*?\})\s*\]/g;
|
|
39
|
+
const reporterBlockRegex = /\[\s*["'](?:@arghajit\/)?(?:playwright-pulse-report|dummy)["']\s*,\s*(\{[\s\S]*?\})\s*,?\s*\]/g;
|
|
40
40
|
let match;
|
|
41
41
|
while ((match = reporterBlockRegex.exec(fileContent)) !== null) {
|
|
42
42
|
const block = match[1];
|
|
@@ -38,16 +38,27 @@ async function getFullConfig() {
|
|
|
38
38
|
/**
|
|
39
39
|
* Scans the report directory for subdirectories (shards).
|
|
40
40
|
* Returns an array of absolute paths to these subdirectories.
|
|
41
|
-
* Excludes the 'attachments' folder
|
|
41
|
+
* Excludes the 'attachments' folder and non-shard directories.
|
|
42
42
|
*/
|
|
43
|
-
function getShardDirectories(dir) {
|
|
43
|
+
function getShardDirectories(dir, outputFile, individualReportsSubDir) {
|
|
44
44
|
if (!fs.existsSync(dir)) {
|
|
45
45
|
return [];
|
|
46
46
|
}
|
|
47
47
|
|
|
48
48
|
return fs
|
|
49
49
|
.readdirSync(dir, { withFileTypes: true })
|
|
50
|
-
.filter((dirent) =>
|
|
50
|
+
.filter((dirent) => {
|
|
51
|
+
if (!dirent.isDirectory() || dirent.name === "attachments" || dirent.name === individualReportsSubDir) {
|
|
52
|
+
return false;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const shardPath = path.join(dir, dirent.name);
|
|
56
|
+
const hasDirectReport = fs.existsSync(path.join(shardPath, outputFile));
|
|
57
|
+
const hasSequentialResults = fs.existsSync(path.join(shardPath, individualReportsSubDir));
|
|
58
|
+
|
|
59
|
+
// Scenario 3: Only consider directories that have either a report or sequential results
|
|
60
|
+
return hasDirectReport || hasSequentialResults;
|
|
61
|
+
})
|
|
51
62
|
.map((dirent) => path.join(dir, dirent.name));
|
|
52
63
|
}
|
|
53
64
|
|
|
@@ -61,6 +72,7 @@ function mergeReports(shardDirs, outputFile) {
|
|
|
61
72
|
failed: 0,
|
|
62
73
|
skipped: 0,
|
|
63
74
|
duration: 0,
|
|
75
|
+
flaky: 0
|
|
64
76
|
};
|
|
65
77
|
|
|
66
78
|
let combinedResults = [];
|
|
@@ -72,7 +84,7 @@ function mergeReports(shardDirs, outputFile) {
|
|
|
72
84
|
const jsonPath = path.join(shardDir, outputFile);
|
|
73
85
|
|
|
74
86
|
if (!fs.existsSync(jsonPath)) {
|
|
75
|
-
console.warn(` Warning: No ${outputFile} found in ${path.basename(shardDir)}
|
|
87
|
+
console.warn(` Warning: No ${outputFile} found in ${path.basename(shardDir)} after pre-merge attempt.`);
|
|
76
88
|
continue;
|
|
77
89
|
}
|
|
78
90
|
|
|
@@ -85,7 +97,7 @@ function mergeReports(shardDirs, outputFile) {
|
|
|
85
97
|
combinedRun.passed += run.passed || 0;
|
|
86
98
|
combinedRun.failed += run.failed || 0;
|
|
87
99
|
combinedRun.skipped += run.skipped || 0;
|
|
88
|
-
combinedRun.flaky
|
|
100
|
+
combinedRun.flaky += run.flaky || 0;
|
|
89
101
|
combinedRun.duration += run.duration || 0;
|
|
90
102
|
|
|
91
103
|
if (run.environment) {
|
|
@@ -179,8 +191,7 @@ function cleanupShardDirectories(shardDirs) {
|
|
|
179
191
|
const config = await getFullConfig();
|
|
180
192
|
const REPORT_DIR = config.outputDir;
|
|
181
193
|
const OUTPUT_FILE = config.outputFile;
|
|
182
|
-
|
|
183
|
-
await mergeSequentialReportsIfNeeded(REPORT_DIR);
|
|
194
|
+
const INDIVIDUAL_SUBDIR = config.individualReportsSubDir;
|
|
184
195
|
|
|
185
196
|
console.log(`\nš Playwright Pulse - Merge Reports (Sharding Mode)\n`);
|
|
186
197
|
console.log(` Report directory: ${REPORT_DIR}`);
|
|
@@ -192,13 +203,13 @@ function cleanupShardDirectories(shardDirs) {
|
|
|
192
203
|
}
|
|
193
204
|
console.log();
|
|
194
205
|
|
|
195
|
-
// 1. Get Shard Directories
|
|
196
|
-
const shardDirs = getShardDirectories(REPORT_DIR);
|
|
206
|
+
// 1. Get initial Shard Directories (Scenario 3: filtering non-relevant folders)
|
|
207
|
+
const shardDirs = getShardDirectories(REPORT_DIR, OUTPUT_FILE, INDIVIDUAL_SUBDIR);
|
|
197
208
|
|
|
198
209
|
if (shardDirs.length === 0) {
|
|
199
210
|
console.log("ā No shard directories found.");
|
|
200
211
|
console.log(
|
|
201
|
-
` Expected structure: <report-dir>/<shard-folder>/${OUTPUT_FILE}
|
|
212
|
+
` Expected structure: <report-dir>/<shard-folder>/${OUTPUT_FILE} or <report-dir>/<shard-folder>/${INDIVIDUAL_SUBDIR}/`,
|
|
202
213
|
);
|
|
203
214
|
process.exit(0);
|
|
204
215
|
}
|
|
@@ -209,18 +220,30 @@ function cleanupShardDirectories(shardDirs) {
|
|
|
209
220
|
});
|
|
210
221
|
console.log();
|
|
211
222
|
|
|
212
|
-
// 2.
|
|
213
|
-
console.log(
|
|
223
|
+
// 2. Scenario 1: Pre-merge sequential results for EACH shard if needed
|
|
224
|
+
console.log(`āļø Checking for sequential results in shards...`);
|
|
225
|
+
for (const shardDir of shardDirs) {
|
|
226
|
+
const hasSequential = fs.existsSync(path.join(shardDir, INDIVIDUAL_SUBDIR));
|
|
227
|
+
if (hasSequential) {
|
|
228
|
+
console.log(` - ${path.basename(shardDir)}: Merging sequential results...`);
|
|
229
|
+
// Force merge because individual shard dirs might not have playwright.config.ts resolving to resetOnEachRun=false
|
|
230
|
+
await mergeSequentialReportsIfNeeded(shardDir, true);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
console.log();
|
|
234
|
+
|
|
235
|
+
// 3. Merge JSON Reports
|
|
236
|
+
console.log(`š Merging reports across shards...`);
|
|
214
237
|
const merged = mergeReports(shardDirs, OUTPUT_FILE);
|
|
215
238
|
console.log(` ā Merged ${shardDirs.length} report(s)`);
|
|
216
239
|
console.log();
|
|
217
240
|
|
|
218
|
-
//
|
|
241
|
+
// 4. Copy Attachments
|
|
219
242
|
console.log(`š Merging attachments...`);
|
|
220
243
|
mergeAttachments(shardDirs, REPORT_DIR);
|
|
221
244
|
console.log(` ā Attachments merged`);
|
|
222
245
|
|
|
223
|
-
//
|
|
246
|
+
// 5. Write Final Merged JSON
|
|
224
247
|
const finalReportPath = path.join(REPORT_DIR, OUTPUT_FILE);
|
|
225
248
|
fs.writeFileSync(finalReportPath, JSON.stringify(merged, null, 2));
|
|
226
249
|
|
|
@@ -228,7 +251,7 @@ function cleanupShardDirectories(shardDirs) {
|
|
|
228
251
|
console.log(` Total tests: ${merged.run.totalTests}`);
|
|
229
252
|
console.log(` Passed: ${merged.run.passed} | Failed: ${merged.run.failed} | Skipped: ${merged.run.skipped} | Flaky: ${merged.run.flaky}`);
|
|
230
253
|
|
|
231
|
-
//
|
|
254
|
+
// 6. Cleanup Shard Directories
|
|
232
255
|
cleanupShardDirectories(shardDirs);
|
|
233
256
|
|
|
234
257
|
console.log();
|
|
@@ -4,33 +4,43 @@ import * as path from "path";
|
|
|
4
4
|
import { getReporterConfig } from "./config-reader.mjs";
|
|
5
5
|
|
|
6
6
|
/**
|
|
7
|
-
* Reads all
|
|
8
|
-
* and merges them into a single
|
|
7
|
+
* Reads all `<outputFile>-*.json` files in the `pulse-results` directory
|
|
8
|
+
* and merges them into a single `<outputFile>.json`.
|
|
9
9
|
* It resolves duplicate tests using exactly the same logic as the reporter.
|
|
10
10
|
*
|
|
11
11
|
* @param {string} customOutputDir The base report directory override (from CLI).
|
|
12
|
+
* @param {boolean} forceMerge Try to merge regardless of config.resetOnEachRun (used by sharded merge).
|
|
12
13
|
*/
|
|
13
|
-
export async function mergeSequentialReportsIfNeeded(customOutputDir) {
|
|
14
|
+
export async function mergeSequentialReportsIfNeeded(customOutputDir, forceMerge = false) {
|
|
14
15
|
const config = await getReporterConfig(customOutputDir);
|
|
15
16
|
|
|
16
|
-
// This logic should ONLY run if resetOnEachRun is disabled
|
|
17
|
-
|
|
17
|
+
// This logic should ONLY run if resetOnEachRun is disabled, UNLESS we are forcing it
|
|
18
|
+
// (e.g. recovering orphaned shards in merge-pulse-report.mjs).
|
|
19
|
+
if (config.resetOnEachRun && !forceMerge) {
|
|
18
20
|
return;
|
|
19
21
|
}
|
|
20
22
|
|
|
21
23
|
const individualReportsSubDir = config.individualReportsSubDir;
|
|
22
24
|
const baseOutputFile = config.outputFile;
|
|
23
|
-
|
|
25
|
+
|
|
26
|
+
// If customOutputDir is provided, it might be an absolute path to a shard. Use it directly if it is absolute.
|
|
27
|
+
// Otherwise, fall back to the config's outputDir (which is resolved relative to CWD).
|
|
28
|
+
const outputDir = customOutputDir && path.isAbsolute(customOutputDir)
|
|
29
|
+
? customOutputDir
|
|
30
|
+
: config.outputDir;
|
|
24
31
|
|
|
25
32
|
const pulseResultsDir = path.join(outputDir, individualReportsSubDir);
|
|
26
33
|
const finalOutputPath = path.join(outputDir, baseOutputFile);
|
|
27
34
|
|
|
35
|
+
// Use the actual outputFile name as seed for shard files (e.g. "results.json" -> "results-")
|
|
36
|
+
const shardPrefix = baseOutputFile.replace(".json", "-");
|
|
37
|
+
|
|
28
38
|
let reportFiles;
|
|
29
39
|
try {
|
|
30
40
|
const allFiles = await fs.readdir(pulseResultsDir);
|
|
31
41
|
reportFiles = allFiles.filter(
|
|
32
42
|
(file) =>
|
|
33
|
-
file.startsWith(
|
|
43
|
+
file.startsWith(shardPrefix) && file.endsWith(".json"),
|
|
34
44
|
);
|
|
35
45
|
} catch (error) {
|
|
36
46
|
if (error.code === "ENOENT") {
|
|
@@ -64,7 +74,7 @@ export async function mergeSequentialReportsIfNeeded(customOutputDir) {
|
|
|
64
74
|
const content = await fs.readFile(filePath, "utf-8");
|
|
65
75
|
const json = JSON.parse(content);
|
|
66
76
|
|
|
67
|
-
let currentRunId = `run-${Date.now()}
|
|
77
|
+
let currentRunId = `run-${Date.now()}`;
|
|
68
78
|
if (json.run) {
|
|
69
79
|
if (json.run.id) currentRunId = json.run.id;
|
|
70
80
|
|
|
@@ -99,7 +109,7 @@ export async function mergeSequentialReportsIfNeeded(customOutputDir) {
|
|
|
99
109
|
);
|
|
100
110
|
|
|
101
111
|
const combinedRun = {
|
|
102
|
-
id: `run-${Date.now()}
|
|
112
|
+
id: `run-${Date.now()}`,
|
|
103
113
|
timestamp: latestTimestamp.toISOString(),
|
|
104
114
|
environment: lastRunEnvironment,
|
|
105
115
|
totalTests: finalMergedResults.length,
|