@arghajit/playwright-pulse-report 0.3.4 → 0.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,10 @@
1
1
  #!/usr/bin/env node
2
2
 
3
- const fs = require("fs");
4
- const path = require("path");
3
+ import * as fs from "fs";
4
+ import path from "path";
5
+ import { getReporterConfig } from "./config-reader.mjs";
6
+ import { animate } from "./terminal-logo.mjs";
7
+ import { mergeSequentialReportsIfNeeded } from "./merge-sequential-reports.mjs";
5
8
 
6
9
  const args = process.argv.slice(2);
7
10
  let customOutputDir = null;
@@ -13,61 +16,63 @@ for (let i = 0; i < args.length; i++) {
13
16
  }
14
17
  }
15
18
 
16
- const OUTPUT_FILE = "playwright-pulse-report.json";
17
-
18
19
  /**
19
- * Securely resolves the report directory.
20
- * Prevents Path Traversal by ensuring the output directory
21
- * is contained within the current working directory.
20
+ * Securely resolves the report directory and config.
22
21
  */
23
- async function getReportDir() {
22
+ async function getFullConfig() {
23
+ const config = await getReporterConfig(customOutputDir);
24
+
24
25
  if (customOutputDir) {
25
26
  const resolvedPath = path.resolve(process.cwd(), customOutputDir);
26
-
27
27
  if (!resolvedPath.startsWith(process.cwd())) {
28
28
  console.error(
29
29
  "⛔ Security Error: Custom output directory must be within the current project root.",
30
30
  );
31
31
  process.exit(1);
32
32
  }
33
-
34
- return resolvedPath;
35
33
  }
36
34
 
37
- try {
38
- const { getOutputDir } = await import("./config-reader.mjs");
39
- return await getOutputDir();
40
- } catch (error) {
41
- return path.resolve(process.cwd(), "pulse-report");
42
- }
35
+ return config;
43
36
  }
44
37
 
45
38
  /**
46
39
  * Scans the report directory for subdirectories (shards).
47
40
  * Returns an array of absolute paths to these subdirectories.
48
- * Excludes the 'attachments' folder itself.
41
+ * Excludes the 'attachments' folder and non-shard directories.
49
42
  */
50
- function getShardDirectories(dir) {
43
+ function getShardDirectories(dir, outputFile, individualReportsSubDir) {
51
44
  if (!fs.existsSync(dir)) {
52
45
  return [];
53
46
  }
54
47
 
55
48
  return fs
56
49
  .readdirSync(dir, { withFileTypes: true })
57
- .filter((dirent) => dirent.isDirectory() && dirent.name !== "attachments")
50
+ .filter((dirent) => {
51
+ if (!dirent.isDirectory() || dirent.name === "attachments" || dirent.name === individualReportsSubDir) {
52
+ return false;
53
+ }
54
+
55
+ const shardPath = path.join(dir, dirent.name);
56
+ const hasDirectReport = fs.existsSync(path.join(shardPath, outputFile));
57
+ const hasSequentialResults = fs.existsSync(path.join(shardPath, individualReportsSubDir));
58
+
59
+ // Scenario 3: Only consider directories that have either a report or sequential results
60
+ return hasDirectReport || hasSequentialResults;
61
+ })
58
62
  .map((dirent) => path.join(dir, dirent.name));
59
63
  }
60
64
 
61
65
  /**
62
66
  * Merges JSON reports from all shard directories.
63
67
  */
64
- function mergeReports(shardDirs) {
68
+ function mergeReports(shardDirs, outputFile) {
65
69
  let combinedRun = {
66
70
  totalTests: 0,
67
71
  passed: 0,
68
72
  failed: 0,
69
73
  skipped: 0,
70
74
  duration: 0,
75
+ flaky: 0
71
76
  };
72
77
 
73
78
  let combinedResults = [];
@@ -76,10 +81,10 @@ function mergeReports(shardDirs) {
76
81
  let allEnvironments = [];
77
82
 
78
83
  for (const shardDir of shardDirs) {
79
- const jsonPath = path.join(shardDir, OUTPUT_FILE);
84
+ const jsonPath = path.join(shardDir, outputFile);
80
85
 
81
86
  if (!fs.existsSync(jsonPath)) {
82
- console.warn(` Warning: No ${OUTPUT_FILE} found in ${path.basename(shardDir)}`);
87
+ console.warn(` Warning: No ${outputFile} found in ${path.basename(shardDir)} after pre-merge attempt.`);
83
88
  continue;
84
89
  }
85
90
 
@@ -92,7 +97,7 @@ function mergeReports(shardDirs) {
92
97
  combinedRun.passed += run.passed || 0;
93
98
  combinedRun.failed += run.failed || 0;
94
99
  combinedRun.skipped += run.skipped || 0;
95
- combinedRun.flaky = (combinedRun.flaky || 0) + (run.flaky || 0);
100
+ combinedRun.flaky += run.flaky || 0;
96
101
  combinedRun.duration += run.duration || 0;
97
102
 
98
103
  if (run.environment) {
@@ -181,13 +186,16 @@ function cleanupShardDirectories(shardDirs) {
181
186
 
182
187
  // Main execution
183
188
  (async () => {
184
- const { animate } = await import("./terminal-logo.mjs");
185
189
  await animate();
186
190
 
187
- const REPORT_DIR = await getReportDir();
191
+ const config = await getFullConfig();
192
+ const REPORT_DIR = config.outputDir;
193
+ const OUTPUT_FILE = config.outputFile;
194
+ const INDIVIDUAL_SUBDIR = config.individualReportsSubDir;
188
195
 
189
196
  console.log(`\n🔄 Playwright Pulse - Merge Reports (Sharding Mode)\n`);
190
197
  console.log(` Report directory: ${REPORT_DIR}`);
198
+ console.log(` Output file: ${OUTPUT_FILE}`);
191
199
  if (customOutputDir) {
192
200
  console.log(` (from CLI argument)`);
193
201
  } else {
@@ -195,13 +203,13 @@ function cleanupShardDirectories(shardDirs) {
195
203
  }
196
204
  console.log();
197
205
 
198
- // 1. Get Shard Directories
199
- const shardDirs = getShardDirectories(REPORT_DIR);
206
+ // 1. Get initial Shard Directories (Scenario 3: filtering non-relevant folders)
207
+ const shardDirs = getShardDirectories(REPORT_DIR, OUTPUT_FILE, INDIVIDUAL_SUBDIR);
200
208
 
201
209
  if (shardDirs.length === 0) {
202
210
  console.log("❌ No shard directories found.");
203
211
  console.log(
204
- " Expected structure: <report-dir>/<shard-folder>/playwright-pulse-report.json",
212
+ ` Expected structure: <report-dir>/<shard-folder>/${OUTPUT_FILE} or <report-dir>/<shard-folder>/${INDIVIDUAL_SUBDIR}/`,
205
213
  );
206
214
  process.exit(0);
207
215
  }
@@ -212,18 +220,30 @@ function cleanupShardDirectories(shardDirs) {
212
220
  });
213
221
  console.log();
214
222
 
215
- // 2. Merge JSON Reports
216
- console.log(`🔀 Merging reports...`);
217
- const merged = mergeReports(shardDirs);
223
+ // 2. Scenario 1: Pre-merge sequential results for EACH shard if needed
224
+ console.log(`⚙️ Checking for sequential results in shards...`);
225
+ for (const shardDir of shardDirs) {
226
+ const hasSequential = fs.existsSync(path.join(shardDir, INDIVIDUAL_SUBDIR));
227
+ if (hasSequential) {
228
+ console.log(` - ${path.basename(shardDir)}: Merging sequential results...`);
229
+ // Force merge because individual shard dirs might not have playwright.config.ts resolving to resetOnEachRun=false
230
+ await mergeSequentialReportsIfNeeded(shardDir, true);
231
+ }
232
+ }
233
+ console.log();
234
+
235
+ // 3. Merge JSON Reports
236
+ console.log(`🔀 Merging reports across shards...`);
237
+ const merged = mergeReports(shardDirs, OUTPUT_FILE);
218
238
  console.log(` ✓ Merged ${shardDirs.length} report(s)`);
219
239
  console.log();
220
240
 
221
- // 3. Copy Attachments
241
+ // 4. Copy Attachments
222
242
  console.log(`📎 Merging attachments...`);
223
243
  mergeAttachments(shardDirs, REPORT_DIR);
224
244
  console.log(` ✓ Attachments merged`);
225
245
 
226
- // 4. Write Final Merged JSON
246
+ // 5. Write Final Merged JSON
227
247
  const finalReportPath = path.join(REPORT_DIR, OUTPUT_FILE);
228
248
  fs.writeFileSync(finalReportPath, JSON.stringify(merged, null, 2));
229
249
 
@@ -231,7 +251,7 @@ function cleanupShardDirectories(shardDirs) {
231
251
  console.log(` Total tests: ${merged.run.totalTests}`);
232
252
  console.log(` Passed: ${merged.run.passed} | Failed: ${merged.run.failed} | Skipped: ${merged.run.skipped} | Flaky: ${merged.run.flaky}`);
233
253
 
234
- // 5. Cleanup Shard Directories
254
+ // 6. Cleanup Shard Directories
235
255
  cleanupShardDirectories(shardDirs);
236
256
 
237
257
  console.log();
@@ -0,0 +1,172 @@
1
+ import * as fs from "fs/promises";
2
+ import * as path from "path";
3
+
4
+ import { getReporterConfig } from "./config-reader.mjs";
5
+
6
+ /**
7
+ * Reads all `<outputFile>-*.json` files in the `pulse-results` directory
8
+ * and merges them into a single `<outputFile>.json`.
9
+ * It resolves duplicate tests using exactly the same logic as the reporter.
10
+ *
11
+ * @param {string} customOutputDir The base report directory override (from CLI).
12
+ * @param {boolean} forceMerge Try to merge regardless of config.resetOnEachRun (used by sharded merge).
13
+ */
14
+ export async function mergeSequentialReportsIfNeeded(customOutputDir, forceMerge = false) {
15
+ const config = await getReporterConfig(customOutputDir);
16
+
17
+ // This logic should ONLY run if resetOnEachRun is disabled, UNLESS we are forcing it
18
+ // (e.g. recovering orphaned shards in merge-pulse-report.mjs).
19
+ if (config.resetOnEachRun && !forceMerge) {
20
+ return;
21
+ }
22
+
23
+ const individualReportsSubDir = config.individualReportsSubDir;
24
+ const baseOutputFile = config.outputFile;
25
+
26
+ // If customOutputDir is provided, it might be an absolute path to a shard. Use it directly if it is absolute.
27
+ // Otherwise, fall back to the config's outputDir (which is resolved relative to CWD).
28
+ const outputDir = customOutputDir && path.isAbsolute(customOutputDir)
29
+ ? customOutputDir
30
+ : config.outputDir;
31
+
32
+ const pulseResultsDir = path.join(outputDir, individualReportsSubDir);
33
+ const finalOutputPath = path.join(outputDir, baseOutputFile);
34
+
35
+ // Use the actual outputFile name as seed for shard files (e.g. "results.json" -> "results-")
36
+ const shardPrefix = baseOutputFile.replace(".json", "-");
37
+
38
+ let reportFiles;
39
+ try {
40
+ const allFiles = await fs.readdir(pulseResultsDir);
41
+ reportFiles = allFiles.filter(
42
+ (file) =>
43
+ file.startsWith(shardPrefix) && file.endsWith(".json"),
44
+ );
45
+ } catch (error) {
46
+ if (error.code === "ENOENT") {
47
+ // No individual reports directory found, which is completely fine/normal
48
+ return;
49
+ }
50
+ console.error(
51
+ `Pulse Reporter: Error reading directory ${pulseResultsDir}:`,
52
+ error,
53
+ );
54
+ return;
55
+ }
56
+
57
+ if (reportFiles.length === 0) {
58
+ // No matching JSON report files found to merge
59
+ return;
60
+ }
61
+
62
+ console.log(
63
+ `\n🔄 Merging ${reportFiles.length} sequential test run(s) from '${individualReportsSubDir}'...`,
64
+ );
65
+
66
+ const allResultsFromAllFiles = [];
67
+ let latestTimestamp = new Date(0);
68
+ let lastRunEnvironment = undefined;
69
+ let totalDuration = 0;
70
+
71
+ for (const file of reportFiles) {
72
+ const filePath = path.join(pulseResultsDir, file);
73
+ try {
74
+ const content = await fs.readFile(filePath, "utf-8");
75
+ const json = JSON.parse(content);
76
+
77
+ let currentRunId = `run-${Date.now()}`;
78
+ if (json.run) {
79
+ if (json.run.id) currentRunId = json.run.id;
80
+
81
+ const runTimestamp = new Date(json.run.timestamp);
82
+ if (runTimestamp > latestTimestamp) {
83
+ latestTimestamp = runTimestamp;
84
+ lastRunEnvironment = json.run.environment || undefined;
85
+ }
86
+ }
87
+
88
+ if (json.results) {
89
+ // Tag each result with its runId to ensure we can sum them up if they have same IDs but different runs
90
+ const resultsWithRunId = json.results.map((r) => ({
91
+ ...r,
92
+ runId: currentRunId,
93
+ }));
94
+ allResultsFromAllFiles.push(...resultsWithRunId);
95
+ }
96
+ } catch (err) {
97
+ console.warn(
98
+ `Pulse Reporter: Could not parse report file ${filePath}. Skipping. Error: ${err.message}`,
99
+ );
100
+ }
101
+ }
102
+
103
+ // The results from individual run JSONs are already finalized and deduplicated by their own run's reporter.
104
+ // We simply concatenate them. The runId tag ensures tests across runs remain distinguishable.
105
+ const finalMergedResults = allResultsFromAllFiles;
106
+
107
+ totalDuration = finalMergedResults.reduce(
108
+ (acc, r) => acc + (r.duration || 0),
109
+ 0,
110
+ );
111
+
112
+ const combinedRun = {
113
+ id: `run-${Date.now()}`,
114
+ timestamp: latestTimestamp.toISOString(),
115
+ environment: lastRunEnvironment,
116
+ totalTests: finalMergedResults.length,
117
+ passed: finalMergedResults.filter(
118
+ (r) => (r.final_status || r.status) === "passed",
119
+ ).length,
120
+ failed: finalMergedResults.filter(
121
+ (r) => (r.final_status || r.status) === "failed",
122
+ ).length,
123
+ skipped: finalMergedResults.filter(
124
+ (r) => (r.final_status || r.status) === "skipped",
125
+ ).length,
126
+ flaky: finalMergedResults.filter(
127
+ (r) => (r.final_status || r.status) === "flaky",
128
+ ).length,
129
+ duration: totalDuration,
130
+ };
131
+
132
+ const finalReport = {
133
+ run: combinedRun,
134
+ results: finalMergedResults,
135
+ metadata: {
136
+ generatedAt: new Date().toISOString(),
137
+ },
138
+ };
139
+
140
+ try {
141
+ await fs.writeFile(
142
+ finalOutputPath,
143
+ JSON.stringify(
144
+ finalReport,
145
+ (key, value) => {
146
+ if (value instanceof Date) return value.toISOString();
147
+ return value;
148
+ },
149
+ 2,
150
+ ),
151
+ );
152
+ console.log(
153
+ `✅ Merged report with ${finalMergedResults.length} total results saved to ${finalOutputPath}`,
154
+ );
155
+
156
+ // Clean up the pulse-results directory after a successful merge
157
+ try {
158
+ await fs.rm(pulseResultsDir, { recursive: true, force: true });
159
+ console.log(
160
+ `🧹 Cleaned up temporary reports directory at ${pulseResultsDir}`,
161
+ );
162
+ } catch (cleanupErr) {
163
+ console.warn(
164
+ `Pulse Reporter: Could not clean up individual reports directory. Error: ${cleanupErr.message}`,
165
+ );
166
+ }
167
+ } catch (err) {
168
+ console.error(
169
+ `Pulse Reporter: Failed to write final merged report to ${finalOutputPath}. Error: ${err.message}`,
170
+ );
171
+ }
172
+ }