@arghajit/dummy 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +259 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +26 -0
- package/dist/lib/report-types.d.ts +8 -0
- package/dist/lib/report-types.js +2 -0
- package/dist/playwright-pulse-reporter.d.ts +26 -0
- package/dist/playwright-pulse-reporter.js +304 -0
- package/dist/reporter/attachment-utils.d.ts +10 -0
- package/dist/reporter/attachment-utils.js +192 -0
- package/dist/reporter/index.d.ts +5 -0
- package/dist/reporter/index.js +9 -0
- package/dist/reporter/lib/report-types.d.ts +8 -0
- package/dist/reporter/lib/report-types.js +2 -0
- package/dist/reporter/playwright-pulse-reporter.d.ts +27 -0
- package/dist/reporter/playwright-pulse-reporter.js +454 -0
- package/dist/reporter/reporter/playwright-pulse-reporter.d.ts +1 -0
- package/dist/reporter/reporter/playwright-pulse-reporter.js +398 -0
- package/dist/reporter/types/index.d.ts +52 -0
- package/dist/reporter/types/index.js +2 -0
- package/dist/types/index.d.ts +65 -0
- package/dist/types/index.js +2 -0
- package/package.json +73 -0
- package/screenshots/127-0-0-1-5500-pulse-report-output-playwright-pulse-static-report-html-i-Phone-14-Pro-Max-1.png +0 -0
- package/screenshots/127-0-0-1-5500-pulse-report-output-playwright-pulse-static-report-html-i-Phone-14-Pro-Max.png +0 -0
- package/screenshots/Email-report.jpg +0 -0
- package/screenshots/Users-arghajitsingha-Downloads-pulse-report-1-playwright-pulse-static-report-html-1.png +0 -0
- package/screenshots/Users-arghajitsingha-Downloads-pulse-report-1-playwright-pulse-static-report-html-2.png +0 -0
- package/screenshots/Users-arghajitsingha-Downloads-pulse-report-1-playwright-pulse-static-report-html.png +0 -0
- package/screenshots/image.png +0 -0
- package/scripts/generate-static-report.mjs +2279 -0
- package/scripts/generate-trend.mjs +165 -0
- package/scripts/merge-pulse-report.js +81 -0
- package/scripts/sendReport.js +335 -0
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import * as fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
// XLSX is NO LONGER NEEDED here
|
|
4
|
+
|
|
5
|
+
// Use dynamic import for chalk as it's ESM only for prettier console logs
|
|
6
|
+
let chalk;
|
|
7
|
+
try {
|
|
8
|
+
chalk = (await import("chalk")).default;
|
|
9
|
+
} catch (e) {
|
|
10
|
+
chalk = {
|
|
11
|
+
green: (t) => t,
|
|
12
|
+
red: (t) => t,
|
|
13
|
+
yellow: (t) => t,
|
|
14
|
+
blue: (t) => t,
|
|
15
|
+
bold: (t) => t,
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const DEFAULT_OUTPUT_DIR = "pulse-report";
|
|
20
|
+
const CURRENT_RUN_JSON_FILE = "playwright-pulse-report.json"; // Source of the current run data
|
|
21
|
+
const HISTORY_SUBDIR = "history"; // Subdirectory for historical JSON files
|
|
22
|
+
const HISTORY_FILE_PREFIX = "trend-";
|
|
23
|
+
const MAX_HISTORY_FILES = 15; // Store last 15 runs
|
|
24
|
+
|
|
25
|
+
async function archiveCurrentRunData() {
|
|
26
|
+
const outputDir = path.resolve(process.cwd(), DEFAULT_OUTPUT_DIR);
|
|
27
|
+
const currentRunJsonPath = path.join(outputDir, CURRENT_RUN_JSON_FILE);
|
|
28
|
+
const historyDir = path.join(outputDir, HISTORY_SUBDIR);
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
// 1. Ensure history directory exists
|
|
32
|
+
await fs.mkdir(historyDir, { recursive: true });
|
|
33
|
+
// console.log(chalk.blue(`History directory ensured at: ${historyDir}`));
|
|
34
|
+
|
|
35
|
+
// 2. Read the current run's JSON data
|
|
36
|
+
// console.log(chalk.blue(`Reading current run data from: ${currentRunJsonPath}`));
|
|
37
|
+
let currentReportData;
|
|
38
|
+
try {
|
|
39
|
+
const jsonData = await fs.readFile(currentRunJsonPath, "utf-8");
|
|
40
|
+
currentReportData = JSON.parse(jsonData);
|
|
41
|
+
if (
|
|
42
|
+
!currentReportData ||
|
|
43
|
+
!currentReportData.run ||
|
|
44
|
+
!currentReportData.run.timestamp
|
|
45
|
+
) {
|
|
46
|
+
throw new Error(
|
|
47
|
+
"Invalid current run JSON report structure. Missing 'run' or 'run.timestamp' data."
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
} catch (error) {
|
|
51
|
+
console.error(
|
|
52
|
+
chalk.red(
|
|
53
|
+
`Error reading or parsing current run JSON report at ${currentRunJsonPath}: ${error.message}`
|
|
54
|
+
)
|
|
55
|
+
);
|
|
56
|
+
process.exit(1); // Exit if we can't read the source file
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// 3. Determine the filename for the new history file
|
|
60
|
+
// Ensure timestamp is a valid number before using getTime()
|
|
61
|
+
let runTimestampMs;
|
|
62
|
+
try {
|
|
63
|
+
runTimestampMs = new Date(currentReportData.run.timestamp).getTime();
|
|
64
|
+
if (isNaN(runTimestampMs)) {
|
|
65
|
+
throw new Error(
|
|
66
|
+
`Invalid timestamp value: ${currentReportData.run.timestamp}`
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
} catch (dateError) {
|
|
70
|
+
console.error(
|
|
71
|
+
chalk.red(
|
|
72
|
+
`Failed to parse timestamp '${currentReportData.run.timestamp}': ${dateError.message}`
|
|
73
|
+
)
|
|
74
|
+
);
|
|
75
|
+
process.exit(1);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const newHistoryFileName = `${HISTORY_FILE_PREFIX}${runTimestampMs}.json`;
|
|
79
|
+
const newHistoryFilePath = path.join(historyDir, newHistoryFileName);
|
|
80
|
+
|
|
81
|
+
// 4. Write the current run's data to the new history file
|
|
82
|
+
// console.log(chalk.blue(`Saving current run data to: ${newHistoryFilePath}`));
|
|
83
|
+
await fs.writeFile(
|
|
84
|
+
newHistoryFilePath,
|
|
85
|
+
JSON.stringify(currentReportData, null, 2),
|
|
86
|
+
"utf-8"
|
|
87
|
+
);
|
|
88
|
+
console.log(chalk.green(`Archived current run to: ${newHistoryFilePath}`));
|
|
89
|
+
|
|
90
|
+
// 5. Prune old history files
|
|
91
|
+
await pruneOldHistoryFiles(historyDir);
|
|
92
|
+
} catch (error) {
|
|
93
|
+
console.error(
|
|
94
|
+
chalk.red(`Error in archiveCurrentRunData: ${error.message}`)
|
|
95
|
+
);
|
|
96
|
+
// console.error(error.stack); // Uncomment for more detailed stack trace
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
async function pruneOldHistoryFiles(historyDir) {
|
|
102
|
+
// console.log(chalk.blue(`Pruning old history files in ${historyDir} (keeping last ${MAX_HISTORY_FILES})...`));
|
|
103
|
+
try {
|
|
104
|
+
const files = await fs.readdir(historyDir);
|
|
105
|
+
const historyJsonFiles = files
|
|
106
|
+
.filter(
|
|
107
|
+
(file) => file.startsWith(HISTORY_FILE_PREFIX) && file.endsWith(".json")
|
|
108
|
+
)
|
|
109
|
+
.map((file) => {
|
|
110
|
+
const timestampPart = file
|
|
111
|
+
.replace(HISTORY_FILE_PREFIX, "")
|
|
112
|
+
.replace(".json", "");
|
|
113
|
+
return { name: file, timestamp: parseInt(timestampPart, 10) };
|
|
114
|
+
})
|
|
115
|
+
.filter((file) => !isNaN(file.timestamp))
|
|
116
|
+
.sort((a, b) => a.timestamp - b.timestamp); // Sort ascending (oldest first)
|
|
117
|
+
|
|
118
|
+
if (historyJsonFiles.length > MAX_HISTORY_FILES) {
|
|
119
|
+
const filesToDelete = historyJsonFiles.slice(
|
|
120
|
+
0,
|
|
121
|
+
historyJsonFiles.length - MAX_HISTORY_FILES
|
|
122
|
+
);
|
|
123
|
+
console.log(
|
|
124
|
+
chalk.yellow(
|
|
125
|
+
`Found ${historyJsonFiles.length} history files. Pruning ${filesToDelete.length} oldest file(s)...`
|
|
126
|
+
)
|
|
127
|
+
);
|
|
128
|
+
for (const fileMeta of filesToDelete) {
|
|
129
|
+
const filePathToDelete = path.join(historyDir, fileMeta.name);
|
|
130
|
+
try {
|
|
131
|
+
await fs.unlink(filePathToDelete);
|
|
132
|
+
// console.log(chalk.gray(`Deleted old history file: ${fileMeta.name}`));
|
|
133
|
+
} catch (deleteError) {
|
|
134
|
+
console.warn(
|
|
135
|
+
chalk.yellow(
|
|
136
|
+
`Could not delete old history file ${fileMeta.name}: ${deleteError.message}`
|
|
137
|
+
)
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
} else {
|
|
142
|
+
// console.log(chalk.green(`Found ${historyJsonFiles.length} history files. No pruning needed.`));
|
|
143
|
+
}
|
|
144
|
+
} catch (error) {
|
|
145
|
+
console.warn(
|
|
146
|
+
chalk.yellow(
|
|
147
|
+
`Warning during history pruning in ${historyDir}: ${error.message}`
|
|
148
|
+
)
|
|
149
|
+
);
|
|
150
|
+
// Don't exit for pruning errors, as saving the current run is more critical
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Main execution
|
|
155
|
+
archiveCurrentRunData().catch((error) => {
|
|
156
|
+
// Fallback catch, though critical errors in archiveCurrentRunData should exit
|
|
157
|
+
if (process.exitCode === undefined || process.exitCode === 0) {
|
|
158
|
+
// check if not already exited
|
|
159
|
+
console.error(
|
|
160
|
+
chalk.red.bold("An unexpected error occurred in history archiving:"),
|
|
161
|
+
error
|
|
162
|
+
);
|
|
163
|
+
process.exit(1);
|
|
164
|
+
}
|
|
165
|
+
});
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const fs = require("fs");
|
|
4
|
+
const path = require("path");
|
|
5
|
+
|
|
6
|
+
const REPORT_DIR = "./pulse-report"; // Or change this to your reports directory
|
|
7
|
+
const OUTPUT_FILE = "playwright-pulse-report.json";
|
|
8
|
+
|
|
9
|
+
function getReportFiles(dir) {
|
|
10
|
+
return fs
|
|
11
|
+
.readdirSync(dir)
|
|
12
|
+
.filter(
|
|
13
|
+
(file) =>
|
|
14
|
+
file.startsWith("playwright-pulse-report-") && file.endsWith(".json")
|
|
15
|
+
);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function mergeReports(files) {
|
|
19
|
+
let combinedRun = {
|
|
20
|
+
totalTests: 0,
|
|
21
|
+
passed: 0,
|
|
22
|
+
failed: 0,
|
|
23
|
+
skipped: 0,
|
|
24
|
+
duration: 0,
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
let combinedResults = [];
|
|
28
|
+
|
|
29
|
+
let latestTimestamp = "";
|
|
30
|
+
let latestGeneratedAt = "";
|
|
31
|
+
|
|
32
|
+
for (const file of files) {
|
|
33
|
+
const filePath = path.join(REPORT_DIR, file);
|
|
34
|
+
const json = JSON.parse(fs.readFileSync(filePath, "utf-8"));
|
|
35
|
+
|
|
36
|
+
const run = json.run || {};
|
|
37
|
+
combinedRun.totalTests += run.totalTests || 0;
|
|
38
|
+
combinedRun.passed += run.passed || 0;
|
|
39
|
+
combinedRun.failed += run.failed || 0;
|
|
40
|
+
combinedRun.skipped += run.skipped || 0;
|
|
41
|
+
combinedRun.duration += run.duration || 0;
|
|
42
|
+
|
|
43
|
+
if (json.results) {
|
|
44
|
+
combinedResults.push(...json.results);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
if (run.timestamp > latestTimestamp) latestTimestamp = run.timestamp;
|
|
48
|
+
if (json.metadata?.generatedAt > latestGeneratedAt)
|
|
49
|
+
latestGeneratedAt = json.metadata.generatedAt;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const finalJson = {
|
|
53
|
+
run: {
|
|
54
|
+
id: `merged-${Date.now()}`,
|
|
55
|
+
timestamp: latestTimestamp,
|
|
56
|
+
...combinedRun,
|
|
57
|
+
},
|
|
58
|
+
results: combinedResults,
|
|
59
|
+
metadata: {
|
|
60
|
+
generatedAt: latestGeneratedAt,
|
|
61
|
+
},
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
return finalJson;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Main execution
|
|
68
|
+
const reportFiles = getReportFiles(REPORT_DIR);
|
|
69
|
+
|
|
70
|
+
if (reportFiles.length === 0) {
|
|
71
|
+
console.log("No matching JSON report files found.");
|
|
72
|
+
process.exit(1);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const merged = mergeReports(reportFiles);
|
|
76
|
+
|
|
77
|
+
fs.writeFileSync(
|
|
78
|
+
path.join(REPORT_DIR, OUTPUT_FILE),
|
|
79
|
+
JSON.stringify(merged, null, 2)
|
|
80
|
+
);
|
|
81
|
+
console.log(`✅ Merged report saved as ${OUTPUT_FILE}`);
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
const nodemailer = require("nodemailer");
|
|
3
|
+
const path = require("path");
|
|
4
|
+
const archiver = require("archiver");
|
|
5
|
+
const fileSystem = require("fs");
|
|
6
|
+
const reportDir = "./pulse-report";
|
|
7
|
+
|
|
8
|
+
require("dotenv").config();
|
|
9
|
+
|
|
10
|
+
let fetch;
|
|
11
|
+
import("node-fetch")
|
|
12
|
+
.then((module) => {
|
|
13
|
+
fetch = module.default;
|
|
14
|
+
})
|
|
15
|
+
.catch((err) => {
|
|
16
|
+
console.error("Failed to import node-fetch:", err);
|
|
17
|
+
process.exit(1);
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
let projectName;
|
|
21
|
+
|
|
22
|
+
function getUUID() {
|
|
23
|
+
const reportPath = path.join(
|
|
24
|
+
process.cwd(),
|
|
25
|
+
`${reportDir}/playwright-pulse-report.json`
|
|
26
|
+
);
|
|
27
|
+
console.log("Report path:", reportPath);
|
|
28
|
+
|
|
29
|
+
if (!fileSystem.existsSync(reportPath)) {
|
|
30
|
+
throw new Error("Pulse report file not found.");
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const content = JSON.parse(fileSystem.readFileSync(reportPath, "utf-8"));
|
|
34
|
+
const idString = content.run.id;
|
|
35
|
+
const parts = idString.split("-");
|
|
36
|
+
const uuid = parts.slice(-5).join("-");
|
|
37
|
+
return uuid;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function formatDuration(ms) {
|
|
41
|
+
const seconds = (ms / 1000).toFixed(2);
|
|
42
|
+
if (ms < 1000) return `${ms}ms`;
|
|
43
|
+
if (ms < 60000) return `${seconds}s`;
|
|
44
|
+
if (ms < 3600000) return `${(ms / 60000).toFixed(1)}min`;
|
|
45
|
+
return `${(ms / 3600000).toFixed(1)}h`;
|
|
46
|
+
}
|
|
47
|
+
const formatStartTime = (isoString) => {
|
|
48
|
+
const date = new Date(isoString);
|
|
49
|
+
return date.toLocaleString(); // Default locale
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
// Generate test-data from allure report
|
|
53
|
+
const getPulseReportSummary = () => {
|
|
54
|
+
const reportPath = path.join(
|
|
55
|
+
process.cwd(),
|
|
56
|
+
`${reportDir}/playwright-pulse-report.json`
|
|
57
|
+
);
|
|
58
|
+
|
|
59
|
+
if (!fileSystem.existsSync(reportPath)) {
|
|
60
|
+
throw new Error("Pulse report file not found.");
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const content = JSON.parse(fileSystem.readFileSync(reportPath, "utf-8"));
|
|
64
|
+
const run = content.run;
|
|
65
|
+
|
|
66
|
+
const total = run.totalTests || 0;
|
|
67
|
+
const passed = run.passed || 0;
|
|
68
|
+
const failed = run.failed || 0;
|
|
69
|
+
const skipped = run.skipped || 0;
|
|
70
|
+
const duration = (run.duration || 0) / 1000; // Convert ms to seconds
|
|
71
|
+
|
|
72
|
+
const readableStartTime = new Date(run.timestamp).toLocaleString();
|
|
73
|
+
|
|
74
|
+
return {
|
|
75
|
+
total,
|
|
76
|
+
passed,
|
|
77
|
+
failed,
|
|
78
|
+
skipped,
|
|
79
|
+
passedPercentage: total ? ((passed / total) * 100).toFixed(2) : "0.00",
|
|
80
|
+
failedPercentage: total ? ((failed / total) * 100).toFixed(2) : "0.00",
|
|
81
|
+
skippedPercentage: total ? ((skipped / total) * 100).toFixed(2) : "0.00",
|
|
82
|
+
startTime: readableStartTime,
|
|
83
|
+
duration: formatDuration(duration),
|
|
84
|
+
};
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
// sleep function for javascript file
|
|
88
|
+
const delay = (time) => new Promise((resolve) => setTimeout(resolve, time));
|
|
89
|
+
// Function to zip the folder asynchronously using async/await
|
|
90
|
+
const zipFolder = async (folderPath, zipPath) => {
|
|
91
|
+
return new Promise((resolve, reject) => {
|
|
92
|
+
const output = fileSystem.createWriteStream(zipPath); // Must use require("fs") directly here
|
|
93
|
+
const archive = archiver("zip", { zlib: { level: 9 } });
|
|
94
|
+
|
|
95
|
+
output.on("close", () => {
|
|
96
|
+
console.log(`${archive.pointer()} total bytes`);
|
|
97
|
+
console.log("Folder has been zipped successfully.");
|
|
98
|
+
resolve(); // Resolve the promise after zipping is complete
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
archive.on("error", (err) => {
|
|
102
|
+
reject(err); // Reject the promise in case of an error
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
archive.pipe(output);
|
|
106
|
+
archive.directory(folderPath, false); // Zip the folder without the parent folder
|
|
107
|
+
archive.finalize(); // Finalize the archive
|
|
108
|
+
});
|
|
109
|
+
};
|
|
110
|
+
|
|
111
|
+
// Function to convert JSON data to HTML table format
|
|
112
|
+
const generateHtmlTable = (data) => {
|
|
113
|
+
projectName = "Pulse Emailable Report";
|
|
114
|
+
const stats = data;
|
|
115
|
+
const total = stats.passed + stats.failed + stats.skipped;
|
|
116
|
+
const passedTests = stats.passed;
|
|
117
|
+
const passedPercentage = stats.passedPercentage;
|
|
118
|
+
const failedTests = stats.failed;
|
|
119
|
+
const failedPercentage = stats.failedPercentage;
|
|
120
|
+
const skippedTests = stats.skipped;
|
|
121
|
+
const skippedPercentage = stats.skippedPercentage;
|
|
122
|
+
const startTime = stats.startTime;
|
|
123
|
+
const durationSeconds = stats.duration;
|
|
124
|
+
|
|
125
|
+
return `
|
|
126
|
+
<!DOCTYPE html>
|
|
127
|
+
<html lang="en">
|
|
128
|
+
<head>
|
|
129
|
+
<meta charset="UTF-8">
|
|
130
|
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
131
|
+
<title>Test Stats Report</title>
|
|
132
|
+
<style>
|
|
133
|
+
table {
|
|
134
|
+
width: 100%;
|
|
135
|
+
border-collapse: collapse;
|
|
136
|
+
}
|
|
137
|
+
table, th, td {
|
|
138
|
+
border: 1px solid black;
|
|
139
|
+
}
|
|
140
|
+
th, td {
|
|
141
|
+
padding: 8px;
|
|
142
|
+
text-align: left;
|
|
143
|
+
}
|
|
144
|
+
th {
|
|
145
|
+
background-color: #f2f2f2;
|
|
146
|
+
}
|
|
147
|
+
</style>
|
|
148
|
+
</head>
|
|
149
|
+
<body>
|
|
150
|
+
<h1>${projectName} Statistics</h1>
|
|
151
|
+
<table>
|
|
152
|
+
<thead>
|
|
153
|
+
<tr>
|
|
154
|
+
<th>Metric</th>
|
|
155
|
+
<th>Value</th>
|
|
156
|
+
</tr>
|
|
157
|
+
</thead>
|
|
158
|
+
<tbody>
|
|
159
|
+
<tr>
|
|
160
|
+
<td>Test Start Time</td>
|
|
161
|
+
<td>${startTime}</td>
|
|
162
|
+
</tr>
|
|
163
|
+
<tr>
|
|
164
|
+
<td>Test Run Duration (Seconds)</td>
|
|
165
|
+
<td>${durationSeconds}</td>
|
|
166
|
+
</tr>
|
|
167
|
+
<tr>
|
|
168
|
+
<td>Total Tests Count</td>
|
|
169
|
+
<td>${total}</td>
|
|
170
|
+
</tr>
|
|
171
|
+
<tr>
|
|
172
|
+
<td>Tests Passed</td>
|
|
173
|
+
<td>${passedTests} (${passedPercentage}%)</td>
|
|
174
|
+
</tr>
|
|
175
|
+
<tr>
|
|
176
|
+
<td>Skipped Tests</td>
|
|
177
|
+
<td>${skippedTests} (${skippedPercentage}%)</td>
|
|
178
|
+
</tr>
|
|
179
|
+
<tr>
|
|
180
|
+
<td>Test Failed</td>
|
|
181
|
+
<td>${failedTests} (${failedPercentage}%)</td>
|
|
182
|
+
</tr>
|
|
183
|
+
</tbody>
|
|
184
|
+
</table>
|
|
185
|
+
<p>With regards,</p>
|
|
186
|
+
<p>Networks QA Team</p>
|
|
187
|
+
</body>
|
|
188
|
+
</html>
|
|
189
|
+
`;
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
// Async function to send an email
|
|
193
|
+
const sendEmail = async (credentials) => {
|
|
194
|
+
try {
|
|
195
|
+
console.log("Starting the sendEmail function...");
|
|
196
|
+
|
|
197
|
+
// Configure nodemailer transporter
|
|
198
|
+
const secureTransporter = nodemailer.createTransport({
|
|
199
|
+
host: "smtp.gmail.com",
|
|
200
|
+
port: 465,
|
|
201
|
+
secure: true, // Use SSL/TLS
|
|
202
|
+
auth: {
|
|
203
|
+
user: credentials.username,
|
|
204
|
+
pass: credentials.password, // Ensure you use app password or secured token
|
|
205
|
+
},
|
|
206
|
+
});
|
|
207
|
+
// Generate HTML content for email
|
|
208
|
+
const reportData = getPulseReportSummary();
|
|
209
|
+
const htmlContent = generateHtmlTable(reportData);
|
|
210
|
+
|
|
211
|
+
// Configure mail options
|
|
212
|
+
const mailOptions = {
|
|
213
|
+
from: credentials.username,
|
|
214
|
+
to: [
|
|
215
|
+
process.env.SENDER_EMAIL_1 || "",
|
|
216
|
+
process.env.SENDER_EMAIL_2 || "",
|
|
217
|
+
process.env.SENDER_EMAIL_3 || "",
|
|
218
|
+
process.env.SENDER_EMAIL_4 || "",
|
|
219
|
+
process.env.SENDER_EMAIL_5 || "",
|
|
220
|
+
],
|
|
221
|
+
subject: "Pulse Report " + new Date().toLocaleString(),
|
|
222
|
+
html: htmlContent,
|
|
223
|
+
attachments: [
|
|
224
|
+
{
|
|
225
|
+
filename: `report.html`,
|
|
226
|
+
path: `${reportDir}/playwright-pulse-static-report.html`, // Attach the zipped folder
|
|
227
|
+
},
|
|
228
|
+
],
|
|
229
|
+
};
|
|
230
|
+
|
|
231
|
+
// Send email
|
|
232
|
+
const info = await secureTransporter.sendMail(mailOptions);
|
|
233
|
+
console.log("Email sent: ", info.response);
|
|
234
|
+
} catch (error) {
|
|
235
|
+
console.error("Error sending email: ", error);
|
|
236
|
+
}
|
|
237
|
+
};
|
|
238
|
+
|
|
239
|
+
async function fetchCredentials(retries = 6) {
|
|
240
|
+
const timeout = 10000; // 10 seconds timeout
|
|
241
|
+
const key = getUUID();
|
|
242
|
+
// Validate API key exists before making any requests
|
|
243
|
+
if (!key) {
|
|
244
|
+
console.error(
|
|
245
|
+
"🔴 Critical: API key not provided - please set EMAIL_KEY in your environment variables"
|
|
246
|
+
);
|
|
247
|
+
console.warn("🟠Falling back to default credentials (if any)");
|
|
248
|
+
return null; // Return null instead of throwing
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
for (let attempt = 1; attempt <= retries; attempt++) {
|
|
252
|
+
try {
|
|
253
|
+
console.log(`🟡 Attempt ${attempt} of ${retries}`);
|
|
254
|
+
|
|
255
|
+
// Create a timeout promise
|
|
256
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
257
|
+
setTimeout(() => {
|
|
258
|
+
reject(new Error(`Request timed out after ${timeout}ms`));
|
|
259
|
+
}, timeout);
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
// Create the fetch promise
|
|
263
|
+
const fetchPromise = fetch(
|
|
264
|
+
"https://test-dashboard-66zd.onrender.com/api/getcredentials",
|
|
265
|
+
{
|
|
266
|
+
method: "GET",
|
|
267
|
+
headers: {
|
|
268
|
+
"x-api-key": `${key}`,
|
|
269
|
+
},
|
|
270
|
+
}
|
|
271
|
+
);
|
|
272
|
+
|
|
273
|
+
// Race between fetch and timeout
|
|
274
|
+
const response = await Promise.race([fetchPromise, timeoutPromise]);
|
|
275
|
+
|
|
276
|
+
if (!response.ok) {
|
|
277
|
+
// Handle specific HTTP errors with console messages only
|
|
278
|
+
if (response.status === 401) {
|
|
279
|
+
console.error("🔴 Invalid API key - authentication failed");
|
|
280
|
+
} else if (response.status === 404) {
|
|
281
|
+
console.error("🔴 Endpoint not found - check the API URL");
|
|
282
|
+
} else {
|
|
283
|
+
console.error(`🔴 Fetch failed with status: ${response.status}`);
|
|
284
|
+
}
|
|
285
|
+
continue; // Skip to next attempt instead of throwing
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
const data = await response.json();
|
|
289
|
+
|
|
290
|
+
// Validate the response structure
|
|
291
|
+
if (!data.username || !data.password) {
|
|
292
|
+
console.error("🔴 Invalid credentials format received from API");
|
|
293
|
+
continue;
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
console.log("🟢 Fetched credentials successfully");
|
|
297
|
+
return data;
|
|
298
|
+
} catch (err) {
|
|
299
|
+
console.error(`🔴 Attempt ${attempt} failed: ${err.message}`);
|
|
300
|
+
|
|
301
|
+
if (attempt === retries) {
|
|
302
|
+
console.error(
|
|
303
|
+
`🔴 All ${retries} attempts failed. Last error: ${err.message}`
|
|
304
|
+
);
|
|
305
|
+
console.warn(
|
|
306
|
+
"🟠Proceeding without credentials - email sending will be skipped"
|
|
307
|
+
);
|
|
308
|
+
return null;
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
// Main function to zip the folder and send the email
|
|
317
|
+
const main = async () => {
|
|
318
|
+
await import("node-fetch").then((module) => {
|
|
319
|
+
fetch = module.default;
|
|
320
|
+
});
|
|
321
|
+
const credentials = await fetchCredentials();
|
|
322
|
+
if (!credentials) {
|
|
323
|
+
console.warn("Skipping email sending due to missing credentials");
|
|
324
|
+
// Continue with pipeline without failing
|
|
325
|
+
return;
|
|
326
|
+
}
|
|
327
|
+
await delay(10000);
|
|
328
|
+
try {
|
|
329
|
+
await sendEmail(credentials);
|
|
330
|
+
} catch (error) {
|
|
331
|
+
console.error("Error in main function: ", error);
|
|
332
|
+
}
|
|
333
|
+
};
|
|
334
|
+
|
|
335
|
+
main();
|