@empiricalrun/test-run 0.11.1 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +23 -0
- package/dist/bin/merge-reports.d.ts +3 -0
- package/dist/bin/merge-reports.d.ts.map +1 -0
- package/dist/bin/merge-reports.js +26 -0
- package/dist/index.d.ts +6 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +14 -6
- package/dist/lib/cancellation-watcher.d.ts +5 -0
- package/dist/lib/cancellation-watcher.d.ts.map +1 -0
- package/dist/lib/cancellation-watcher.js +49 -0
- package/dist/lib/cmd.d.ts +9 -1
- package/dist/lib/cmd.d.ts.map +1 -1
- package/dist/lib/cmd.js +48 -6
- package/dist/lib/merge-reports.d.ts +26 -0
- package/dist/lib/merge-reports.d.ts.map +1 -0
- package/dist/lib/merge-reports.js +248 -0
- package/dist/lib/run-specific-test.d.ts.map +1 -1
- package/dist/lib/run-specific-test.js +1 -1
- package/dist/stdout-parser/index.d.ts.map +1 -0
- package/dist/utils/config.d.ts +1 -1
- package/dist/utils/config.d.ts.map +1 -1
- package/package.json +8 -6
- package/test-data/blob-report/report-1.zip +0 -0
- package/test-data/blob-report/report-2.zip +0 -0
- package/tsconfig.tsbuildinfo +1 -1
- package/dist/parser/index.d.ts.map +0 -1
- package/eslint.config.mjs +0 -16
- /package/dist/{parser → stdout-parser}/index.d.ts +0 -0
- /package/dist/{parser → stdout-parser}/index.js +0 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,28 @@
|
|
|
1
1
|
# @empiricalrun/test-run
|
|
2
2
|
|
|
3
|
+
## 0.13.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- d270c6d: feat: add cancellation watcher to self destruct
|
|
8
|
+
- 2d9919d: feat: consolidate zip utils and move to streaming
|
|
9
|
+
|
|
10
|
+
### Patch Changes
|
|
11
|
+
|
|
12
|
+
- Updated dependencies [2d9919d]
|
|
13
|
+
- @empiricalrun/r2-uploader@0.7.0
|
|
14
|
+
|
|
15
|
+
## 0.12.0
|
|
16
|
+
|
|
17
|
+
### Minor Changes
|
|
18
|
+
|
|
19
|
+
- 79a4e0f: feat: add blob reporters for sharding
|
|
20
|
+
|
|
21
|
+
### Patch Changes
|
|
22
|
+
|
|
23
|
+
- Updated dependencies [79a4e0f]
|
|
24
|
+
- @empiricalrun/r2-uploader@0.6.0
|
|
25
|
+
|
|
3
26
|
## 0.11.1
|
|
4
27
|
|
|
5
28
|
### Patch Changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"merge-reports.d.ts","sourceRoot":"","sources":["../../src/bin/merge-reports.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
const commander_1 = require("commander");
|
|
8
|
+
const dotenv_1 = __importDefault(require("dotenv"));
|
|
9
|
+
const merge_reports_1 = require("../lib/merge-reports");
|
|
10
|
+
dotenv_1.default.config({
|
|
11
|
+
path: [".env.local", ".env"],
|
|
12
|
+
});
|
|
13
|
+
(async function main() {
|
|
14
|
+
commander_1.program
|
|
15
|
+
.option("-b, --blob-dir <blob-dir>", "Path to the blob-report directory")
|
|
16
|
+
.option("-c, --cwd <cwd>", "Working directory")
|
|
17
|
+
.parse(process.argv);
|
|
18
|
+
const options = commander_1.program.opts();
|
|
19
|
+
const { success } = await (0, merge_reports_1.mergeReports)({
|
|
20
|
+
blobDir: options.blobDir,
|
|
21
|
+
cwd: options.cwd,
|
|
22
|
+
});
|
|
23
|
+
if (!success) {
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
})();
|
package/dist/index.d.ts
CHANGED
|
@@ -1,18 +1,22 @@
|
|
|
1
1
|
import { JSONReport as PlaywrightJSONReport } from "@playwright/test/reporter";
|
|
2
2
|
import { spawnCmd } from "./lib/cmd";
|
|
3
3
|
import { runSpecificTestsCmd } from "./lib/run-specific-test";
|
|
4
|
+
import { parseTestListOutput } from "./stdout-parser";
|
|
4
5
|
import { Platform, TestCase } from "./types";
|
|
5
6
|
import { getProjectsFromPlaywrightConfig } from "./utils/config";
|
|
6
|
-
export { getProjectsFromPlaywrightConfig, Platform, runSpecificTestsCmd, spawnCmd, };
|
|
7
|
+
export { getProjectsFromPlaywrightConfig, parseTestListOutput, Platform, runSpecificTestsCmd, spawnCmd, };
|
|
7
8
|
export * from "./glob-matcher";
|
|
9
|
+
export { type CancellationWatcher, startCancellationWatcher, } from "./lib/cancellation-watcher";
|
|
8
10
|
export { filterArrayByGlobMatchersSet, generateProjectFilters } from "./utils";
|
|
9
|
-
export declare function runSingleTest({ testName, suites, filePath, projects, envOverrides, repoDir, }: {
|
|
11
|
+
export declare function runSingleTest({ testName, suites, filePath, projects, envOverrides, repoDir, stdout, stderr, }: {
|
|
10
12
|
testName: string;
|
|
11
13
|
suites: string[];
|
|
12
14
|
filePath: string;
|
|
13
15
|
projects: string[];
|
|
14
16
|
envOverrides?: Record<string, string>;
|
|
15
17
|
repoDir: string;
|
|
18
|
+
stdout?: NodeJS.WritableStream;
|
|
19
|
+
stderr?: NodeJS.WritableStream;
|
|
16
20
|
}): Promise<{
|
|
17
21
|
hasTestPassed: boolean;
|
|
18
22
|
summaryJson: PlaywrightJSONReport;
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,IAAI,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AAI/E,OAAO,EAAkB,QAAQ,EAAE,MAAM,WAAW,CAAC;AACrD,OAAO,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,IAAI,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AAI/E,OAAO,EAAkB,QAAQ,EAAE,MAAM,WAAW,CAAC;AACrD,OAAO,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AAC9D,OAAO,EAAE,mBAAmB,EAAE,MAAM,iBAAiB,CAAC;AACtD,OAAO,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAC;AAE7C,OAAO,EAAE,+BAA+B,EAAE,MAAM,gBAAgB,CAAC;AAMjE,OAAO,EACL,+BAA+B,EAC/B,mBAAmB,EACnB,QAAQ,EACR,mBAAmB,EACnB,QAAQ,GACT,CAAC;AACF,cAAc,gBAAgB,CAAC;AAC/B,OAAO,EACL,KAAK,mBAAmB,EACxB,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,4BAA4B,EAAE,sBAAsB,EAAE,MAAM,SAAS,CAAC;AAc/E,wBAAsB,aAAa,CAAC,EAClC,QAAQ,EACR,MAAM,EACN,QAAQ,EACR,QAAQ,EACR,YAAY,EACZ,OAAO,EACP,MAAM,EACN,MAAM,GACP,EAAE;IACD,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;IAC/B,MAAM,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;CAChC,GAAG,OAAO,CAAC;IACV,aAAa,EAAE,OAAO,CAAC;IACvB,WAAW,EAAE,oBAAoB,CAAC;CACnC,CAAC,CAoBD;AAED,wBAAsB,oBAAoB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC;IACnE,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,mBAAmB,EAAE,MAAM,CAAC,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAC;CACjD,CAAC,CAeD"}
|
package/dist/index.js
CHANGED
|
@@ -17,7 +17,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
17
17
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
18
18
|
};
|
|
19
19
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
20
|
-
exports.generateProjectFilters = exports.filterArrayByGlobMatchersSet = exports.spawnCmd = exports.runSpecificTestsCmd = exports.Platform = exports.getProjectsFromPlaywrightConfig = void 0;
|
|
20
|
+
exports.generateProjectFilters = exports.filterArrayByGlobMatchersSet = exports.startCancellationWatcher = exports.spawnCmd = exports.runSpecificTestsCmd = exports.Platform = exports.parseTestListOutput = exports.getProjectsFromPlaywrightConfig = void 0;
|
|
21
21
|
exports.runSingleTest = runSingleTest;
|
|
22
22
|
exports.listProjectsAndTests = listProjectsAndTests;
|
|
23
23
|
const fs_1 = __importDefault(require("fs"));
|
|
@@ -26,7 +26,8 @@ const cmd_1 = require("./lib/cmd");
|
|
|
26
26
|
Object.defineProperty(exports, "spawnCmd", { enumerable: true, get: function () { return cmd_1.spawnCmd; } });
|
|
27
27
|
const run_specific_test_1 = require("./lib/run-specific-test");
|
|
28
28
|
Object.defineProperty(exports, "runSpecificTestsCmd", { enumerable: true, get: function () { return run_specific_test_1.runSpecificTestsCmd; } });
|
|
29
|
-
const
|
|
29
|
+
const stdout_parser_1 = require("./stdout-parser");
|
|
30
|
+
Object.defineProperty(exports, "parseTestListOutput", { enumerable: true, get: function () { return stdout_parser_1.parseTestListOutput; } });
|
|
30
31
|
const types_1 = require("./types");
|
|
31
32
|
Object.defineProperty(exports, "Platform", { enumerable: true, get: function () { return types_1.Platform; } });
|
|
32
33
|
const utils_1 = require("./utils");
|
|
@@ -36,6 +37,8 @@ Object.defineProperty(exports, "getProjectsFromPlaywrightConfig", { enumerable:
|
|
|
36
37
|
// The bin entrypoint has support for mobile also
|
|
37
38
|
const supportedPlatform = types_1.Platform.WEB;
|
|
38
39
|
__exportStar(require("./glob-matcher"), exports);
|
|
40
|
+
var cancellation_watcher_1 = require("./lib/cancellation-watcher");
|
|
41
|
+
Object.defineProperty(exports, "startCancellationWatcher", { enumerable: true, get: function () { return cancellation_watcher_1.startCancellationWatcher; } });
|
|
39
42
|
var utils_2 = require("./utils");
|
|
40
43
|
Object.defineProperty(exports, "filterArrayByGlobMatchersSet", { enumerable: true, get: function () { return utils_2.filterArrayByGlobMatchersSet; } });
|
|
41
44
|
Object.defineProperty(exports, "generateProjectFilters", { enumerable: true, get: function () { return utils_2.generateProjectFilters; } });
|
|
@@ -46,7 +49,7 @@ function getSummaryJsonPath(repoDir) {
|
|
|
46
49
|
? pathForPlaywright147
|
|
47
50
|
: pathForOtherPlaywrightVersions;
|
|
48
51
|
}
|
|
49
|
-
async function runSingleTest({ testName, suites, filePath, projects, envOverrides, repoDir, }) {
|
|
52
|
+
async function runSingleTest({ testName, suites, filePath, projects, envOverrides, repoDir, stdout, stderr, }) {
|
|
50
53
|
const testDir = "tests";
|
|
51
54
|
const commandToRun = await (0, run_specific_test_1.runSpecificTestsCmd)({
|
|
52
55
|
tests: [{ name: testName, dir: testDir, filePath, suites }],
|
|
@@ -55,7 +58,10 @@ async function runSingleTest({ testName, suites, filePath, projects, envOverride
|
|
|
55
58
|
platform: supportedPlatform,
|
|
56
59
|
repoDir,
|
|
57
60
|
});
|
|
58
|
-
const { hasTestPassed } = await (0, cmd_1.runTestsForCmd)(commandToRun, repoDir
|
|
61
|
+
const { hasTestPassed } = await (0, cmd_1.runTestsForCmd)(commandToRun, repoDir, {
|
|
62
|
+
stdout,
|
|
63
|
+
stderr,
|
|
64
|
+
});
|
|
59
65
|
const jsonFilePath = getSummaryJsonPath(repoDir);
|
|
60
66
|
const jsonFileContents = fs_1.default.readFileSync(jsonFilePath, "utf8");
|
|
61
67
|
const summaryJson = JSON.parse(jsonFileContents);
|
|
@@ -69,12 +75,14 @@ async function listProjectsAndTests(repoDir) {
|
|
|
69
75
|
const args = [testRunner, "test", "--list"];
|
|
70
76
|
const { output, code } = await (0, cmd_1.spawnCmd)("npx", args, {
|
|
71
77
|
cwd: repoDir,
|
|
72
|
-
envOverrides: {
|
|
78
|
+
envOverrides: {
|
|
79
|
+
NODE_PATH: path_1.default.join(repoDir, "node_modules"),
|
|
80
|
+
},
|
|
73
81
|
captureOutput: true,
|
|
74
82
|
throwOnError: true,
|
|
75
83
|
});
|
|
76
84
|
if (!output) {
|
|
77
85
|
throw new Error(`Failed to run list command; exit code: ${code}`);
|
|
78
86
|
}
|
|
79
|
-
return (0,
|
|
87
|
+
return (0, stdout_parser_1.parseTestListOutput)(output);
|
|
80
88
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cancellation-watcher.d.ts","sourceRoot":"","sources":["../../src/lib/cancellation-watcher.ts"],"names":[],"mappings":"AAqCA,MAAM,MAAM,mBAAmB,GAAG;IAChC,IAAI,EAAE,MAAM,IAAI,CAAC;CAClB,CAAC;AAEF,wBAAgB,wBAAwB,CACtC,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,IAAI,EACpB,cAAc,SAA2B,GACxC,mBAAmB,CAgCrB"}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.startCancellationWatcher = startCancellationWatcher;
|
|
4
|
+
const DOMAIN = process.env.DASHBOARD_DOMAIN || "https://dash.empirical.run";
|
|
5
|
+
const DEFAULT_POLL_INTERVAL_MS = 5000;
|
|
6
|
+
async function checkTestRunStatus(testRunId, apiKey) {
|
|
7
|
+
const url = `${DOMAIN}/api/test-runs/${testRunId}/status`;
|
|
8
|
+
try {
|
|
9
|
+
const response = await fetch(url, {
|
|
10
|
+
headers: { Authorization: `Bearer ${apiKey}` },
|
|
11
|
+
});
|
|
12
|
+
if (!response.ok) {
|
|
13
|
+
console.log(`[CancellationWatcher] Failed to check status: HTTP ${response.status} from ${url}`);
|
|
14
|
+
return { isTerminal: false, status: null };
|
|
15
|
+
}
|
|
16
|
+
const result = (await response.json());
|
|
17
|
+
const status = result.data || { isTerminal: false, status: null };
|
|
18
|
+
return status;
|
|
19
|
+
}
|
|
20
|
+
catch (error) {
|
|
21
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
22
|
+
console.log(`[CancellationWatcher] Failed to check status: ${message}`);
|
|
23
|
+
return { isTerminal: false, status: null };
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
function startCancellationWatcher(testRunId, apiKey, onCancel, pollIntervalMs = DEFAULT_POLL_INTERVAL_MS) {
|
|
27
|
+
let stopped = false;
|
|
28
|
+
console.log(`[CancellationWatcher] Starting watcher for test run ${testRunId} (polling every ${pollIntervalMs}ms)`);
|
|
29
|
+
console.log(`[CancellationWatcher] Dashboard domain: ${DOMAIN}`);
|
|
30
|
+
const poll = async () => {
|
|
31
|
+
while (!stopped) {
|
|
32
|
+
const { status } = await checkTestRunStatus(testRunId, apiKey);
|
|
33
|
+
if (status === "cancelling" || status === "cancelled") {
|
|
34
|
+
console.log(`[CancellationWatcher] Test run ${testRunId} is ${status}, triggering cancellation`);
|
|
35
|
+
onCancel();
|
|
36
|
+
stopped = true;
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
poll();
|
|
43
|
+
return {
|
|
44
|
+
stop: () => {
|
|
45
|
+
console.log(`[CancellationWatcher] Stopping watcher for ${testRunId}`);
|
|
46
|
+
stopped = true;
|
|
47
|
+
},
|
|
48
|
+
};
|
|
49
|
+
}
|
package/dist/lib/cmd.d.ts
CHANGED
|
@@ -1,16 +1,24 @@
|
|
|
1
|
+
import { type ChildProcess } from "child_process";
|
|
1
2
|
import { CommandToRun } from "../types";
|
|
2
3
|
export declare function getCommandFromString(command: string): {
|
|
3
4
|
command: string;
|
|
4
5
|
args: string[];
|
|
5
6
|
};
|
|
6
|
-
export declare function runTestsForCmd({ command, args, env }: CommandToRun, cwd: string
|
|
7
|
+
export declare function runTestsForCmd({ command, args, env }: CommandToRun, cwd: string, options?: {
|
|
8
|
+
stdout?: NodeJS.WritableStream;
|
|
9
|
+
stderr?: NodeJS.WritableStream;
|
|
10
|
+
}): Promise<{
|
|
7
11
|
hasTestPassed: boolean;
|
|
12
|
+
wasCancelled: boolean;
|
|
8
13
|
}>;
|
|
9
14
|
export declare function spawnCmd(command: string, args: string[], options: {
|
|
10
15
|
cwd: string;
|
|
11
16
|
envOverrides: Record<string, string>;
|
|
12
17
|
captureOutput: boolean;
|
|
13
18
|
throwOnError: boolean;
|
|
19
|
+
stdout?: NodeJS.WritableStream;
|
|
20
|
+
stderr?: NodeJS.WritableStream;
|
|
21
|
+
onSpawn?: (proc: ChildProcess) => void;
|
|
14
22
|
}): Promise<{
|
|
15
23
|
code: number;
|
|
16
24
|
output?: string;
|
package/dist/lib/cmd.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cmd.d.ts","sourceRoot":"","sources":["../../src/lib/cmd.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"cmd.d.ts","sourceRoot":"","sources":["../../src/lib/cmd.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,YAAY,EAAS,MAAM,eAAe,CAAC;AAGzD,OAAO,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAMxC,wBAAgB,oBAAoB,CAAC,OAAO,EAAE,MAAM,GAAG;IACrD,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB,CAeA;AAED,wBAAsB,cAAc,CAClC,EAAE,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,YAAY,EACpC,GAAG,EAAE,MAAM,EACX,OAAO,CAAC,EAAE;IACR,MAAM,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;IAC/B,MAAM,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;CAChC;;;GAqEF;AAED,wBAAsB,QAAQ,CAC5B,OAAO,EAAE,MAAM,EACf,IAAI,EAAE,MAAM,EAAE,EACd,OAAO,EAAE;IACP,GAAG,EAAE,MAAM,CAAC;IACZ,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACrC,aAAa,EAAE,OAAO,CAAC;IACvB,YAAY,EAAE,OAAO,CAAC;IACtB,MAAM,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;IAC/B,MAAM,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;IAC/B,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,YAAY,KAAK,IAAI,CAAC;CACxC,GACA,OAAO,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,MAAM,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC,CAiD5C"}
|
package/dist/lib/cmd.js
CHANGED
|
@@ -5,6 +5,7 @@ exports.runTestsForCmd = runTestsForCmd;
|
|
|
5
5
|
exports.spawnCmd = spawnCmd;
|
|
6
6
|
const child_process_1 = require("child_process");
|
|
7
7
|
const logger_1 = require("../logger");
|
|
8
|
+
const cancellation_watcher_1 = require("./cancellation-watcher");
|
|
8
9
|
function getCommandFromString(command) {
|
|
9
10
|
const regex = /[^\s"']+|"([^"]*)"|'([^']*)'/g;
|
|
10
11
|
const matches = command.match(regex) || [];
|
|
@@ -19,21 +20,59 @@ function getCommandFromString(command) {
|
|
|
19
20
|
}),
|
|
20
21
|
};
|
|
21
22
|
}
|
|
22
|
-
async function runTestsForCmd({ command, args, env }, cwd) {
|
|
23
|
+
async function runTestsForCmd({ command, args, env }, cwd, options) {
|
|
23
24
|
logger_1.logger.debug(`Running cmd: ${command} with args: ${args}`);
|
|
25
|
+
const testRunId = process.env.TEST_RUN_GITHUB_ACTION_ID;
|
|
26
|
+
const apiKey = process.env.EMPIRICALRUN_API_KEY;
|
|
27
|
+
console.log(`[CancellationWatcher] Environment: TEST_RUN_GITHUB_ACTION_ID=${testRunId}, EMPIRICALRUN_API_KEY=${apiKey ? "***" : "undefined"}`);
|
|
24
28
|
let hasTestPassed = true;
|
|
29
|
+
let wasCancelled = false;
|
|
30
|
+
let cancellationWatcher;
|
|
31
|
+
let childProcess;
|
|
32
|
+
const cancelHandler = () => {
|
|
33
|
+
wasCancelled = true;
|
|
34
|
+
console.log(`[CancellationWatcher] Cancel handler invoked, killing child process (pid: ${childProcess?.pid})`);
|
|
35
|
+
if (childProcess && !childProcess.killed) {
|
|
36
|
+
// Use SIGTERM for more forceful termination that propagates to child processes
|
|
37
|
+
childProcess.kill("SIGTERM");
|
|
38
|
+
// Also try to kill the process group to ensure Playwright workers are stopped
|
|
39
|
+
if (childProcess.pid) {
|
|
40
|
+
try {
|
|
41
|
+
process.kill(-childProcess.pid, "SIGTERM");
|
|
42
|
+
console.log(`[CancellationWatcher] Sent SIGTERM to process group ${childProcess.pid}`);
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
// Process group kill may fail if not a group leader
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
if (testRunId && apiKey) {
|
|
51
|
+
cancellationWatcher = (0, cancellation_watcher_1.startCancellationWatcher)(testRunId, apiKey, cancelHandler);
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
console.log("[CancellationWatcher] Not starting watcher - missing testRunId or apiKey");
|
|
55
|
+
}
|
|
25
56
|
try {
|
|
26
57
|
await spawnCmd(command, args, {
|
|
27
58
|
cwd,
|
|
28
59
|
envOverrides: env,
|
|
29
60
|
captureOutput: false,
|
|
30
61
|
throwOnError: true,
|
|
62
|
+
stdout: options?.stdout,
|
|
63
|
+
stderr: options?.stderr,
|
|
64
|
+
onSpawn: (proc) => {
|
|
65
|
+
childProcess = proc;
|
|
66
|
+
},
|
|
31
67
|
});
|
|
32
68
|
}
|
|
33
69
|
catch {
|
|
34
70
|
hasTestPassed = false;
|
|
35
71
|
}
|
|
36
|
-
|
|
72
|
+
finally {
|
|
73
|
+
cancellationWatcher?.stop();
|
|
74
|
+
}
|
|
75
|
+
return { hasTestPassed, wasCancelled };
|
|
37
76
|
}
|
|
38
77
|
async function spawnCmd(command, args, options) {
|
|
39
78
|
let output = options.captureOutput ? "" : undefined;
|
|
@@ -42,9 +81,10 @@ async function spawnCmd(command, args, options) {
|
|
|
42
81
|
const p = (0, child_process_1.spawn)(command, args, {
|
|
43
82
|
env: { ...process.env, ...options.envOverrides },
|
|
44
83
|
cwd: options.cwd,
|
|
45
|
-
//
|
|
46
|
-
detached:
|
|
84
|
+
// Create new process group so we can kill all child processes together
|
|
85
|
+
detached: true,
|
|
47
86
|
});
|
|
87
|
+
options.onSpawn?.(p);
|
|
48
88
|
// Setup signal handlers and get cleanup function
|
|
49
89
|
const cleanupSignalHandlers = setupProcessSignalHandlers(p);
|
|
50
90
|
p.stdout.on("data", (x) => {
|
|
@@ -53,7 +93,8 @@ async function spawnCmd(command, args, options) {
|
|
|
53
93
|
output += log;
|
|
54
94
|
}
|
|
55
95
|
else {
|
|
56
|
-
process.stdout
|
|
96
|
+
const stdout = options.stdout || process.stdout;
|
|
97
|
+
stdout.write(log);
|
|
57
98
|
}
|
|
58
99
|
if (log.includes("Error")) {
|
|
59
100
|
errorLogs.push(log);
|
|
@@ -61,7 +102,8 @@ async function spawnCmd(command, args, options) {
|
|
|
61
102
|
});
|
|
62
103
|
p.stderr.on("data", (x) => {
|
|
63
104
|
const log = x.toString();
|
|
64
|
-
process.stderr
|
|
105
|
+
const stderr = options.stderr || process.stderr;
|
|
106
|
+
stderr.write(log);
|
|
65
107
|
errorLogs.push(log);
|
|
66
108
|
});
|
|
67
109
|
p.on("exit", (code) => {
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
interface MergeReportsOptions {
|
|
2
|
+
blobDir: string;
|
|
3
|
+
outputDir: string;
|
|
4
|
+
cwd: string;
|
|
5
|
+
}
|
|
6
|
+
interface UploadOptions {
|
|
7
|
+
projectName: string;
|
|
8
|
+
runId: string;
|
|
9
|
+
baseUrl: string;
|
|
10
|
+
uploadBucket: string;
|
|
11
|
+
}
|
|
12
|
+
export declare function runPlaywrightMergeReports(options: MergeReportsOptions): Promise<{
|
|
13
|
+
success: boolean;
|
|
14
|
+
}>;
|
|
15
|
+
export declare function extractUrlMappingsFromBlobs(blobDir: string): Promise<Record<string, string>>;
|
|
16
|
+
export declare function patchMergedHtmlReport(htmlFilePath: string, urlMappings: Record<string, string>): Promise<void>;
|
|
17
|
+
export declare function patchSummaryJson(jsonFilePath: string, urlMappings: Record<string, string>): Promise<void>;
|
|
18
|
+
export declare function uploadMergedReports(cwd: string, outputDir: string, uploadOptions: UploadOptions): Promise<void>;
|
|
19
|
+
export declare function mergeReports(options: {
|
|
20
|
+
blobDir?: string;
|
|
21
|
+
cwd?: string;
|
|
22
|
+
}): Promise<{
|
|
23
|
+
success: boolean;
|
|
24
|
+
}>;
|
|
25
|
+
export {};
|
|
26
|
+
//# sourceMappingURL=merge-reports.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"merge-reports.d.ts","sourceRoot":"","sources":["../../src/lib/merge-reports.ts"],"names":[],"mappings":"AAgBA,UAAU,mBAAmB;IAC3B,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,EAAE,MAAM,CAAC;IAClB,GAAG,EAAE,MAAM,CAAC;CACb;AAED,UAAU,aAAa;IACrB,WAAW,EAAE,MAAM,CAAC;IACpB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;CACtB;AA+BD,wBAAsB,yBAAyB,CAC7C,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC;IAAE,OAAO,EAAE,OAAO,CAAA;CAAE,CAAC,CA2B/B;AAED,wBAAsB,2BAA2B,CAC/C,OAAO,EAAE,MAAM,GACd,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CA2BjC;AAED,wBAAsB,qBAAqB,CACzC,YAAY,EAAE,MAAM,EACpB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAClC,OAAO,CAAC,IAAI,CAAC,CA8Ff;AAED,wBAAsB,gBAAgB,CACpC,YAAY,EAAE,MAAM,EACpB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAClC,OAAO,CAAC,IAAI,CAAC,CAgBf;AAED,wBAAsB,mBAAmB,CACvC,GAAG,EAAE,MAAM,EACX,SAAS,EAAE,MAAM,EACjB,aAAa,EAAE,aAAa,GAC3B,OAAO,CAAC,IAAI,CAAC,CAkDf;AAED,wBAAsB,YAAY,CAAC,OAAO,EAAE;IAC1C,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,GAAG,CAAC,EAAE,MAAM,CAAC;CACd,GAAG,OAAO,CAAC;IAAE,OAAO,EAAE,OAAO,CAAA;CAAE,CAAC,CAuDhC"}
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.runPlaywrightMergeReports = runPlaywrightMergeReports;
|
|
7
|
+
exports.extractUrlMappingsFromBlobs = extractUrlMappingsFromBlobs;
|
|
8
|
+
exports.patchMergedHtmlReport = patchMergedHtmlReport;
|
|
9
|
+
exports.patchSummaryJson = patchSummaryJson;
|
|
10
|
+
exports.uploadMergedReports = uploadMergedReports;
|
|
11
|
+
exports.mergeReports = mergeReports;
|
|
12
|
+
const r2_uploader_1 = require("@empiricalrun/r2-uploader");
|
|
13
|
+
const zip_1 = require("@empiricalrun/r2-uploader/zip");
|
|
14
|
+
const fs_1 = __importDefault(require("fs"));
|
|
15
|
+
const path_1 = __importDefault(require("path"));
|
|
16
|
+
const logger_1 = require("../logger");
|
|
17
|
+
const cmd_1 = require("./cmd");
|
|
18
|
+
function buildMappingPatterns(urlMappings) {
|
|
19
|
+
return Object.entries(urlMappings).map(([resourcePath, url]) => {
|
|
20
|
+
const resourceFileName = resourcePath.replace(/^resources\//, "");
|
|
21
|
+
const escaped = resourceFileName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
22
|
+
const regex = new RegExp(`[^"]*${escaped}`, "g");
|
|
23
|
+
return { regex, resourceFileName, url };
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
function applyMappingPatterns(content, patterns) {
|
|
27
|
+
let modified = content;
|
|
28
|
+
for (const { regex, resourceFileName, url } of patterns) {
|
|
29
|
+
if (!modified.includes(resourceFileName))
|
|
30
|
+
continue;
|
|
31
|
+
modified = modified.replace(regex, url);
|
|
32
|
+
}
|
|
33
|
+
return modified;
|
|
34
|
+
}
|
|
35
|
+
async function runPlaywrightMergeReports(options) {
|
|
36
|
+
const { blobDir, outputDir, cwd } = options;
|
|
37
|
+
logger_1.logger.debug(`[Merge Reports] Running playwright merge-reports`);
|
|
38
|
+
logger_1.logger.debug(`[Merge Reports] Blob dir: ${blobDir}`);
|
|
39
|
+
logger_1.logger.debug(`[Merge Reports] Output dir: ${outputDir}`);
|
|
40
|
+
try {
|
|
41
|
+
await (0, cmd_1.spawnCmd)("npx", ["playwright", "merge-reports", blobDir, "--reporter", "html,json"], {
|
|
42
|
+
cwd,
|
|
43
|
+
envOverrides: {
|
|
44
|
+
PLAYWRIGHT_HTML_OPEN: "never",
|
|
45
|
+
PLAYWRIGHT_HTML_OUTPUT_DIR: outputDir,
|
|
46
|
+
PLAYWRIGHT_JSON_OUTPUT_NAME: path_1.default.join(cwd, "summary.json"),
|
|
47
|
+
},
|
|
48
|
+
captureOutput: false,
|
|
49
|
+
throwOnError: true,
|
|
50
|
+
});
|
|
51
|
+
return { success: true };
|
|
52
|
+
}
|
|
53
|
+
catch (error) {
|
|
54
|
+
logger_1.logger.error(`[Merge Reports] Failed to merge reports:`, error);
|
|
55
|
+
return { success: false };
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
async function extractUrlMappingsFromBlobs(blobDir) {
|
|
59
|
+
const combinedMap = {};
|
|
60
|
+
const files = fs_1.default.readdirSync(blobDir);
|
|
61
|
+
for (const fileName of files.filter((f) => f.endsWith(".zip"))) {
|
|
62
|
+
const zipPath = path_1.default.join(blobDir, fileName);
|
|
63
|
+
try {
|
|
64
|
+
const buffer = await (0, zip_1.readZipEntry)(zipPath, "_empirical_urls.json");
|
|
65
|
+
if (buffer) {
|
|
66
|
+
const content = JSON.parse(buffer.toString("utf8"));
|
|
67
|
+
Object.assign(combinedMap, content);
|
|
68
|
+
logger_1.logger.debug(`[Merge Reports] Extracted ${Object.keys(content).length} URL mappings from ${fileName}`);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
catch (error) {
|
|
72
|
+
logger_1.logger.error(`[Merge Reports] Failed to extract URL mappings from ${fileName}:`, error);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
logger_1.logger.info(`[Merge Reports] Total URL mappings: ${Object.keys(combinedMap).length}`);
|
|
76
|
+
return combinedMap;
|
|
77
|
+
}
|
|
78
|
+
async function patchMergedHtmlReport(htmlFilePath, urlMappings) {
|
|
79
|
+
if (Object.keys(urlMappings).length === 0) {
|
|
80
|
+
logger_1.logger.debug(`[Merge Reports] No URL mappings to apply`);
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
let htmlContent;
|
|
84
|
+
const startTime = Date.now();
|
|
85
|
+
logger_1.logger.info(`[Merge Reports] Starting HTML patch...`);
|
|
86
|
+
try {
|
|
87
|
+
htmlContent = await fs_1.default.promises.readFile(htmlFilePath, "utf8");
|
|
88
|
+
logger_1.logger.info(`[Merge Reports] HTML file read: ${(htmlContent.length / 1024 / 1024).toFixed(2)} MB in ${Date.now() - startTime}ms`);
|
|
89
|
+
}
|
|
90
|
+
catch (error) {
|
|
91
|
+
logger_1.logger.error(`[Merge Reports] Failed to read HTML file:`, error);
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
const oldFormatMatch = htmlContent.match(/window\.playwrightReportBase64\s*=\s*"(?:data:application\/zip;base64,)?([^"]+)"/);
|
|
95
|
+
const newFormatMatch = htmlContent.match(/<script\s+id="playwrightReportBase64"[^>]*>(?:data:application\/zip;base64,)?([^<]+)<\/script>/);
|
|
96
|
+
const base64 = oldFormatMatch?.[1] || newFormatMatch?.[1];
|
|
97
|
+
if (!base64) {
|
|
98
|
+
logger_1.logger.error(`[Merge Reports] Base64 zip data not found in HTML`);
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
const htmlDir = path_1.default.dirname(path_1.default.resolve(htmlFilePath));
|
|
102
|
+
const tempDir = fs_1.default.mkdtempSync(path_1.default.join(htmlDir, "merge-patch-"));
|
|
103
|
+
const zipPath = path_1.default.join(tempDir, "archive.zip");
|
|
104
|
+
try {
|
|
105
|
+
let stepTime = Date.now();
|
|
106
|
+
await fs_1.default.promises.writeFile(zipPath, Buffer.from(base64, "base64"));
|
|
107
|
+
await (0, zip_1.extractZipToDirectory)(zipPath, tempDir);
|
|
108
|
+
await fs_1.default.promises.unlink(zipPath);
|
|
109
|
+
logger_1.logger.info(`[Merge Reports] Zip extracted in ${Date.now() - stepTime}ms`);
|
|
110
|
+
const jsonFiles = (await fs_1.default.promises.readdir(tempDir)).filter((f) => f.endsWith(".json"));
|
|
111
|
+
logger_1.logger.info(`[Merge Reports] Patching ${jsonFiles.length} JSON files with ${Object.keys(urlMappings).length} mappings`);
|
|
112
|
+
const mappingPatterns = buildMappingPatterns(urlMappings);
|
|
113
|
+
stepTime = Date.now();
|
|
114
|
+
for (const file of jsonFiles) {
|
|
115
|
+
const filePath = path_1.default.join(tempDir, file);
|
|
116
|
+
const content = await fs_1.default.promises.readFile(filePath, "utf8");
|
|
117
|
+
const modified = applyMappingPatterns(content, mappingPatterns);
|
|
118
|
+
if (modified !== content) {
|
|
119
|
+
await fs_1.default.promises.writeFile(filePath, modified, "utf8");
|
|
120
|
+
logger_1.logger.debug(`[Merge Reports] Patched ${file}`);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
logger_1.logger.info(`[Merge Reports] JSON patching completed in ${Date.now() - stepTime}ms`);
|
|
124
|
+
stepTime = Date.now();
|
|
125
|
+
const newBuffer = await (0, zip_1.createZipFromDirectory)(tempDir);
|
|
126
|
+
const newBase64 = newBuffer.toString("base64");
|
|
127
|
+
logger_1.logger.info(`[Merge Reports] New zip created in ${Date.now() - stepTime}ms`);
|
|
128
|
+
let updatedHtml;
|
|
129
|
+
if (oldFormatMatch) {
|
|
130
|
+
updatedHtml = htmlContent.replace(/(window\.playwrightReportBase64\s*=\s*")(?:data:application\/zip;base64,)?[^"]*(")/, `$1data:application/zip;base64,${newBase64}$2`);
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
updatedHtml = htmlContent.replace(/(<script\s+id="playwrightReportBase64"[^>]*>)(?:data:application\/zip;base64,)?[^<]*(<\/script>)/, `$1data:application/zip;base64,${newBase64}$2`);
|
|
134
|
+
}
|
|
135
|
+
await fs_1.default.promises.writeFile(htmlFilePath, updatedHtml, "utf8");
|
|
136
|
+
logger_1.logger.info(`[Merge Reports] HTML file patched successfully`);
|
|
137
|
+
}
|
|
138
|
+
catch (error) {
|
|
139
|
+
logger_1.logger.error(`[Merge Reports] Failed to patch HTML:`, error);
|
|
140
|
+
}
|
|
141
|
+
finally {
|
|
142
|
+
await fs_1.default.promises.rm(tempDir, { recursive: true, force: true });
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
async function patchSummaryJson(jsonFilePath, urlMappings) {
|
|
146
|
+
if (Object.keys(urlMappings).length === 0) {
|
|
147
|
+
logger_1.logger.debug(`[Merge Reports] No URL mappings to apply to summary.json`);
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
try {
|
|
151
|
+
const content = await fs_1.default.promises.readFile(jsonFilePath, "utf8");
|
|
152
|
+
const mappingPatterns = buildMappingPatterns(urlMappings);
|
|
153
|
+
const modified = applyMappingPatterns(content, mappingPatterns);
|
|
154
|
+
await fs_1.default.promises.writeFile(jsonFilePath, modified, "utf8");
|
|
155
|
+
logger_1.logger.info(`[Merge Reports] summary.json patched successfully`);
|
|
156
|
+
}
|
|
157
|
+
catch (error) {
|
|
158
|
+
logger_1.logger.error(`[Merge Reports] Failed to patch summary.json:`, error);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
async function uploadMergedReports(cwd, outputDir, uploadOptions) {
|
|
162
|
+
const { projectName, runId, baseUrl, uploadBucket } = uploadOptions;
|
|
163
|
+
const destinationDir = path_1.default.join(projectName, runId);
|
|
164
|
+
const htmlFilePath = path_1.default.join(outputDir, "index.html");
|
|
165
|
+
const jsonFilePath = path_1.default.join(cwd, "summary.json");
|
|
166
|
+
if (fs_1.default.existsSync(htmlFilePath)) {
|
|
167
|
+
logger_1.logger.debug(`[Merge Reports] Uploading HTML report`);
|
|
168
|
+
const task = (0, r2_uploader_1.createUploadTask)({
|
|
169
|
+
sourceDir: outputDir,
|
|
170
|
+
fileList: [htmlFilePath],
|
|
171
|
+
destinationDir,
|
|
172
|
+
uploadBucket,
|
|
173
|
+
baseUrl,
|
|
174
|
+
});
|
|
175
|
+
void (0, r2_uploader_1.sendTaskToQueue)(task);
|
|
176
|
+
}
|
|
177
|
+
if (fs_1.default.existsSync(jsonFilePath)) {
|
|
178
|
+
logger_1.logger.debug(`[Merge Reports] Uploading summary.json`);
|
|
179
|
+
const task = (0, r2_uploader_1.createUploadTask)({
|
|
180
|
+
sourceDir: cwd,
|
|
181
|
+
fileList: [jsonFilePath],
|
|
182
|
+
destinationDir,
|
|
183
|
+
uploadBucket,
|
|
184
|
+
baseUrl,
|
|
185
|
+
});
|
|
186
|
+
void (0, r2_uploader_1.sendTaskToQueue)(task);
|
|
187
|
+
}
|
|
188
|
+
const traceDir = path_1.default.join(outputDir, "trace");
|
|
189
|
+
if (fs_1.default.existsSync(traceDir)) {
|
|
190
|
+
logger_1.logger.debug(`[Merge Reports] Uploading trace folder`);
|
|
191
|
+
const task = (0, r2_uploader_1.createUploadTask)({
|
|
192
|
+
sourceDir: traceDir,
|
|
193
|
+
destinationDir: path_1.default.join(destinationDir, "trace"),
|
|
194
|
+
uploadBucket,
|
|
195
|
+
baseUrl,
|
|
196
|
+
});
|
|
197
|
+
void (0, r2_uploader_1.sendTaskToQueue)(task);
|
|
198
|
+
}
|
|
199
|
+
await (0, r2_uploader_1.waitForTaskQueueToFinish)();
|
|
200
|
+
const reportUrl = `${baseUrl}/${destinationDir}/index.html`;
|
|
201
|
+
const jsonUrl = `${baseUrl}/${destinationDir}/summary.json`;
|
|
202
|
+
logger_1.logger.info(`[Merge Reports] All uploads completed`);
|
|
203
|
+
logger_1.logger.info(`[Merge Reports] HTML Report: ${reportUrl}`);
|
|
204
|
+
logger_1.logger.info(`[Merge Reports] Summary JSON: ${jsonUrl}`);
|
|
205
|
+
}
|
|
206
|
+
async function mergeReports(options) {
|
|
207
|
+
const cwd = options.cwd || process.cwd();
|
|
208
|
+
const blobDir = options.blobDir || path_1.default.join(cwd, "blob-report");
|
|
209
|
+
const outputDir = path_1.default.join(cwd, "playwright-report");
|
|
210
|
+
const projectName = process.env.PROJECT_NAME;
|
|
211
|
+
const runId = process.env.TEST_RUN_GITHUB_ACTION_ID;
|
|
212
|
+
if (!projectName || !runId) {
|
|
213
|
+
logger_1.logger.error(`[Merge Reports] PROJECT_NAME and TEST_RUN_GITHUB_ACTION_ID must be set`);
|
|
214
|
+
return { success: false };
|
|
215
|
+
}
|
|
216
|
+
if (!fs_1.default.existsSync(blobDir)) {
|
|
217
|
+
logger_1.logger.error(`[Merge Reports] Blob directory does not exist: ${blobDir}`);
|
|
218
|
+
return { success: false };
|
|
219
|
+
}
|
|
220
|
+
const urlMappings = await extractUrlMappingsFromBlobs(blobDir);
|
|
221
|
+
const { success } = await runPlaywrightMergeReports({
|
|
222
|
+
blobDir,
|
|
223
|
+
outputDir,
|
|
224
|
+
cwd,
|
|
225
|
+
});
|
|
226
|
+
if (!success) {
|
|
227
|
+
return { success: false };
|
|
228
|
+
}
|
|
229
|
+
const htmlFilePath = path_1.default.join(outputDir, "index.html");
|
|
230
|
+
const jsonFilePath = path_1.default.join(cwd, "summary.json");
|
|
231
|
+
await patchMergedHtmlReport(htmlFilePath, urlMappings);
|
|
232
|
+
await patchSummaryJson(jsonFilePath, urlMappings);
|
|
233
|
+
const hasR2Creds = process.env.R2_ACCOUNT_ID &&
|
|
234
|
+
process.env.R2_ACCESS_KEY_ID &&
|
|
235
|
+
process.env.R2_SECRET_ACCESS_KEY;
|
|
236
|
+
if (hasR2Creds) {
|
|
237
|
+
await uploadMergedReports(cwd, outputDir, {
|
|
238
|
+
projectName,
|
|
239
|
+
runId,
|
|
240
|
+
baseUrl: "https://reports.empirical.run",
|
|
241
|
+
uploadBucket: "test-report",
|
|
242
|
+
});
|
|
243
|
+
}
|
|
244
|
+
else {
|
|
245
|
+
logger_1.logger.info(`[Merge Reports] R2 credentials not found, skipping upload`);
|
|
246
|
+
}
|
|
247
|
+
return { success: true };
|
|
248
|
+
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"run-specific-test.d.ts","sourceRoot":"","sources":["../../src/lib/run-specific-test.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAC;AAY5D,wBAAsB,mBAAmB,CAAC,EACxC,KAAU,EACV,QAAQ,EACR,eAAe,EACf,QAAQ,EACR,YAAY,EACZ,OAAO,GACR,EAAE;IACD,KAAK,CAAC,EAAE,QAAQ,EAAE,CAAC;IACnB,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,QAAQ,EAAE,QAAQ,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;CACjB,GAAG,OAAO,CAAC,YAAY,CAAC,
|
|
1
|
+
{"version":3,"file":"run-specific-test.d.ts","sourceRoot":"","sources":["../../src/lib/run-specific-test.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAC;AAY5D,wBAAsB,mBAAmB,CAAC,EACxC,KAAU,EACV,QAAQ,EACR,eAAe,EACf,QAAQ,EACR,YAAY,EACZ,OAAO,GACR,EAAE;IACD,KAAK,CAAC,EAAE,QAAQ,EAAE,CAAC;IACnB,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,QAAQ,EAAE,QAAQ,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;CACjB,GAAG,OAAO,CAAC,YAAY,CAAC,CA2FxB"}
|
|
@@ -33,7 +33,7 @@ async function runSpecificTestsCmd({ tests = [], projects, passthroughArgs, plat
|
|
|
33
33
|
}
|
|
34
34
|
}
|
|
35
35
|
if (!matchingFilePath) {
|
|
36
|
-
const suitesPrefix = testCase.suites
|
|
36
|
+
const suitesPrefix = testCase.suites && testCase.suites.length > 0
|
|
37
37
|
? `${testCase.suites.join(" > ")} > `
|
|
38
38
|
: "";
|
|
39
39
|
const fullTestName = `${suitesPrefix}${testCase.name}`;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/stdout-parser/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAC;AAEpC,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,MAAM,GAAG;IACnD,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,mBAAmB,EAAE,MAAM,CAAC,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAC;CACjD,CA0CA"}
|
package/dist/utils/config.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { PlaywrightProject } from "@empiricalrun/shared-types";
|
|
1
|
+
import { PlaywrightProject } from "@empiricalrun/shared-types/tool-results";
|
|
2
2
|
import { Platform } from "../types";
|
|
3
3
|
export declare function getProjectsFromPlaywrightConfig(platform: Platform, repoDir: string): Promise<PlaywrightProject[]>;
|
|
4
4
|
//# sourceMappingURL=config.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/utils/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/utils/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,yCAAyC,CAAC;AAK5E,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAC;AAEpC,wBAAsB,+BAA+B,CACnD,QAAQ,EAAE,QAAQ,EAClB,OAAO,EAAE,MAAM,GACd,OAAO,CAAC,iBAAiB,EAAE,CAAC,CAyE9B"}
|
package/package.json
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@empiricalrun/test-run",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.13.0",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"registry": "https://registry.npmjs.org/",
|
|
6
6
|
"access": "public"
|
|
7
7
|
},
|
|
8
8
|
"bin": {
|
|
9
|
-
"@empiricalrun/test-run": "dist/bin/index.js"
|
|
9
|
+
"@empiricalrun/test-run": "dist/bin/index.js",
|
|
10
|
+
"@empiricalrun/merge-reports": "dist/bin/merge-reports.js"
|
|
10
11
|
},
|
|
11
12
|
"main": "dist/index.js",
|
|
12
13
|
"exports": {
|
|
@@ -30,21 +31,22 @@
|
|
|
30
31
|
"console-log-level": "^1.4.1",
|
|
31
32
|
"dotenv": "^16.4.5",
|
|
32
33
|
"minimatch": "^10.0.1",
|
|
33
|
-
"ts-morph": "^23.0.0"
|
|
34
|
+
"ts-morph": "^23.0.0",
|
|
35
|
+
"@empiricalrun/r2-uploader": "^0.7.0"
|
|
34
36
|
},
|
|
35
37
|
"devDependencies": {
|
|
38
|
+
"@playwright/test": "1.53.2",
|
|
36
39
|
"@types/async-retry": "^1.4.8",
|
|
37
40
|
"@types/console-log-level": "^1.4.5",
|
|
38
41
|
"@types/node": "^22.5.5",
|
|
39
|
-
"@playwright/test": "1.53.2",
|
|
40
42
|
"memfs": "^4.17.1",
|
|
41
|
-
"@empiricalrun/shared-types": "0.
|
|
43
|
+
"@empiricalrun/shared-types": "0.12.0"
|
|
42
44
|
},
|
|
43
45
|
"scripts": {
|
|
44
46
|
"dev": "tsc --build --watch",
|
|
45
47
|
"build": "tsc --build",
|
|
46
48
|
"clean": "tsc --build --clean",
|
|
47
|
-
"lint": "
|
|
49
|
+
"lint": "biome check --unsafe",
|
|
48
50
|
"test": "vitest run",
|
|
49
51
|
"test:watch": "vitest"
|
|
50
52
|
}
|
|
Binary file
|
|
Binary file
|
package/tsconfig.tsbuildinfo
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"root":["./src/dashboard.ts","./src/glob-matcher.ts","./src/index.ts","./src/logger.ts","./src/bin/index.ts","./src/lib/cmd.ts","./src/lib/run-all-tests.ts","./src/lib/run-specific-test.ts","./src/lib/memfs/read-hello-world.ts","./src/parser/index.ts","./src/types/index.ts","./src/utils/config-parser.ts","./src/utils/config.ts","./src/utils/index.ts"],"version":"5.8.3"}
|
|
1
|
+
{"root":["./src/dashboard.ts","./src/glob-matcher.ts","./src/index.ts","./src/logger.ts","./src/bin/index.ts","./src/bin/merge-reports.ts","./src/lib/cancellation-watcher.ts","./src/lib/cmd.ts","./src/lib/merge-reports.ts","./src/lib/run-all-tests.ts","./src/lib/run-specific-test.ts","./src/lib/memfs/read-hello-world.ts","./src/stdout-parser/index.ts","./src/types/index.ts","./src/utils/config-parser.ts","./src/utils/config.ts","./src/utils/index.ts"],"version":"5.8.3"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/parser/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAC;AAEpC,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,MAAM,GAAG;IACnD,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,mBAAmB,EAAE,MAAM,CAAC,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAC;CACjD,CA0CA"}
|
package/eslint.config.mjs
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import libraryConfig from "../eslint-config/library.mjs";
|
|
2
|
-
import tsParser from "@typescript-eslint/parser";
|
|
3
|
-
|
|
4
|
-
export default [
|
|
5
|
-
...libraryConfig,
|
|
6
|
-
{
|
|
7
|
-
files: ["src/**/*.ts", "src/**/*.tsx"],
|
|
8
|
-
languageOptions: {
|
|
9
|
-
parser: tsParser,
|
|
10
|
-
parserOptions: {
|
|
11
|
-
project: "./tsconfig.lint.json",
|
|
12
|
-
tsconfigRootDir: import.meta.dirname,
|
|
13
|
-
},
|
|
14
|
-
},
|
|
15
|
-
},
|
|
16
|
-
];
|
|
File without changes
|
|
File without changes
|