@flakiness/sdk 0.147.0 → 0.149.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,5 +1,98 @@
1
1
  # Flakiness SDK
2
2
 
3
- The package provides a set of tools to create Flakiness Reports in Node.js.
3
+ The Flakiness SDK provides a comprehensive set of tools for creating and managing Flakiness Reports in Node.js.
4
+
5
+
6
+ ## Quick Start
7
+
8
+ Here's a minimal example of creating a Flakiness report:
9
+
10
+ ```typescript
11
+ import {
12
+ FlakinessReport,
13
+ GitWorktree,
14
+ ReportUtils,
15
+ writeReport,
16
+ uploadReport,
17
+ CIUtils
18
+ } from '@flakiness/sdk';
19
+
20
+ // Initialize git worktree and environment
21
+ const worktree = GitWorktree.create(process.cwd());
22
+ const env = ReportUtils.createEnvironment({ name: 'CI' });
23
+
24
+ // Create a simple test report
25
+ const report: FlakinessReport.Report = {
26
+ category: 'testreport',
27
+ commitId: worktree.headCommitId(),
28
+ url: CIUtils.runUrl(),
29
+ environments: [env],
30
+ suites: [{
31
+ title: 'My Test Suite',
32
+ type: 'describe',
33
+ tests: [{
34
+ title: 'My Test',
35
+ location: { file: 'test.spec.ts', line: 10, column: 1 },
36
+ attempts: [{
37
+ environmentIdx: 0,
38
+ expectedStatus: 'passed',
39
+ actualStatus: 'passed',
40
+ duration: 100 as FlakinessReport.DurationMS,
41
+ }],
42
+ }],
43
+ }],
44
+ startTimestamp: Date.now() as FlakinessReport.UnixTimestampMS,
45
+ duration: 100 as FlakinessReport.DurationMS,
46
+ };
47
+
48
+ // Write report to disk or upload to Flakiness.io
49
+ await writeReport(report, [], './flakiness-report');
50
+ // Or: await uploadReport(report, [], { flakinessAccessToken: 'your-token' });
51
+ ```
52
+
53
+ ## Entry Points
54
+
55
+ The SDK provides two entry points:
56
+
57
+ ### `@flakiness/sdk`
58
+
59
+ The main entry point for Node.js environments. Provides full access to all SDK functionality including:
60
+ - Git repository utilities
61
+ - File system operations
62
+ - System resource monitoring
63
+ - Report upload/download
64
+ - Local report viewing
65
+
66
+ ### `@flakiness/sdk/browser`
67
+
68
+ A browser-compatible entry point with a subset of utilities that work in browser environments. Exports:
69
+ - `FlakinessReport` - Type definitions for the report format
70
+ - `ReportUtils` - Browser-safe utilities (normalizeReport, stripAnsi, visitTests)
71
+
72
+ Use this entry point when you need to process or manipulate reports in browser-based tools or web applications.
73
+
74
+ ## Top-Level Exports
75
+
76
+ ### Report Type & Validation
77
+ - **`FlakinessReport`** - Type definitions and validation for the Flakiness JSON Report format
78
+
79
+ ### Building Reports
80
+ - **`CIUtils`** - Utilities to extract CI/CD information (run URLs, environment detection)
81
+ - **`GitWorktree`** - Git repository utilities for path conversion and commit information
82
+ - **`ReportUtils`** - Namespace with utilities for report creation and manipulation:
83
+ - `createEnvironment()` - Create environment objects with system information
84
+ - `normalizeReport()` - Deduplicate environments, suites, and tests
85
+ - `createTestStepSnippetsInplace()` - Generate code snippets for test steps
86
+ - `stripAnsi()` - Remove ANSI escape codes from strings
87
+ - `visitTests()` - Recursively visit all tests in a report
88
+ - `createFileAttachment()` / `createDataAttachment()` - Create report attachments
89
+ - **`SystemUtilizationSampler`** - Monitor and record CPU/memory utilization during test runs
90
+
91
+ ### Working with Reports
92
+ - **`showReport()`** - Start a local server and open the report in your browser
93
+ - **`uploadReport()`** - Upload reports and attachments to Flakiness.io
94
+ - **`writeReport()`** - Write reports to disk in the standard Flakiness report format
95
+
96
+ ### Project Configuration
97
+ - **`FlakinessProjectConfig`** - Manage project configuration stored in `.flakiness/config.json`
4
98
 
5
- Read docs at https://flakiness.io/docs/integrations/custom/
@@ -1,8 +1,21 @@
1
- // src/httpUtils.ts
1
+ // src/_internalUtils.ts
2
+ import { spawnSync } from "child_process";
3
+ import crypto from "crypto";
4
+ import fs from "fs";
2
5
  import http from "http";
3
6
  import https from "https";
4
-
5
- // src/utils.ts
7
+ import util from "util";
8
+ import zlib from "zlib";
9
+ var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
10
+ async function compressTextAsync(text) {
11
+ return asyncBrotliCompress(text, {
12
+ chunkSize: 32 * 1024,
13
+ params: {
14
+ [zlib.constants.BROTLI_PARAM_QUALITY]: 6,
15
+ [zlib.constants.BROTLI_PARAM_MODE]: zlib.constants.BROTLI_MODE_TEXT
16
+ }
17
+ });
18
+ }
6
19
  var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
7
20
  function errorText(error) {
8
21
  return FLAKINESS_DBG ? error.stack : error.message;
@@ -23,13 +36,6 @@ async function retryWithBackoff(job, backoff = []) {
23
36
  }
24
37
  return await job();
25
38
  }
26
- var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
27
-
28
- // src/httpUtils.ts
29
- var FLAKINESS_DBG2 = !!process.env.FLAKINESS_DBG;
30
- function errorText2(error) {
31
- return FLAKINESS_DBG2 ? error.stack : error.message;
32
- }
33
39
  var httpUtils;
34
40
  ((httpUtils2) => {
35
41
  function createRequest({ url, method = "get", headers = {} }) {
@@ -92,8 +98,59 @@ var httpUtils;
92
98
  }
93
99
  httpUtils2.postJSON = postJSON;
94
100
  })(httpUtils || (httpUtils = {}));
101
+ function shell(command, args, options) {
102
+ try {
103
+ const result = spawnSync(command, args, { encoding: "utf-8", ...options });
104
+ if (result.status !== 0) {
105
+ return void 0;
106
+ }
107
+ return result.stdout.trim();
108
+ } catch (e) {
109
+ console.error(e);
110
+ return void 0;
111
+ }
112
+ }
113
+ function sha1Text(data) {
114
+ const hash = crypto.createHash("sha1");
115
+ hash.update(data);
116
+ return hash.digest("hex");
117
+ }
118
+ function sha1File(filePath) {
119
+ return new Promise((resolve, reject) => {
120
+ const hash = crypto.createHash("sha1");
121
+ const stream = fs.createReadStream(filePath);
122
+ stream.on("data", (chunk) => {
123
+ hash.update(chunk);
124
+ });
125
+ stream.on("end", () => {
126
+ resolve(hash.digest("hex"));
127
+ });
128
+ stream.on("error", (err) => {
129
+ reject(err);
130
+ });
131
+ });
132
+ }
133
+ function randomUUIDBase62() {
134
+ const BASE62_CHARSET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
135
+ let num = BigInt("0x" + crypto.randomUUID().replace(/-/g, ""));
136
+ if (num === 0n)
137
+ return BASE62_CHARSET[0];
138
+ const chars = [];
139
+ while (num > 0n) {
140
+ const remainder = Number(num % 62n);
141
+ num /= 62n;
142
+ chars.push(BASE62_CHARSET[remainder]);
143
+ }
144
+ return chars.reverse().join("");
145
+ }
95
146
  export {
96
- errorText2 as errorText,
97
- httpUtils
147
+ compressTextAsync,
148
+ errorText,
149
+ httpUtils,
150
+ randomUUIDBase62,
151
+ retryWithBackoff,
152
+ sha1File,
153
+ sha1Text,
154
+ shell
98
155
  };
99
- //# sourceMappingURL=httpUtils.js.map
156
+ //# sourceMappingURL=_internalUtils.js.map
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=_stable-hash.d.js.map
package/lib/browser.js ADDED
@@ -0,0 +1,154 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __export = (target, all) => {
3
+ for (var name in all)
4
+ __defProp(target, name, { get: all[name], enumerable: true });
5
+ };
6
+
7
+ // src/browser.ts
8
+ import { FlakinessReport } from "@flakiness/flakiness-report";
9
+
10
+ // src/reportUtilsBrowser.ts
11
+ var reportUtilsBrowser_exports = {};
12
+ __export(reportUtilsBrowser_exports, {
13
+ normalizeReport: () => normalizeReport,
14
+ stripAnsi: () => stripAnsi,
15
+ visitTests: () => visitTests
16
+ });
17
+
18
+ // src/normalizeReport.ts
19
+ import stableObjectHash from "stable-hash";
20
+ var Multimap = class {
21
+ _map = /* @__PURE__ */ new Map();
22
+ set(key, value) {
23
+ const set = this._map.get(key) ?? /* @__PURE__ */ new Set();
24
+ this._map.set(key, set);
25
+ set.add(value);
26
+ }
27
+ getAll(key) {
28
+ return Array.from(this._map.get(key) ?? []);
29
+ }
30
+ };
31
+ function normalizeReport(report) {
32
+ const gEnvs = /* @__PURE__ */ new Map();
33
+ const gSuites = /* @__PURE__ */ new Map();
34
+ const gTests = new Multimap();
35
+ const gSuiteIds = /* @__PURE__ */ new Map();
36
+ const gTestIds = /* @__PURE__ */ new Map();
37
+ const gEnvIds = /* @__PURE__ */ new Map();
38
+ const gSuiteChildren = new Multimap();
39
+ const gSuiteTests = new Multimap();
40
+ for (const env of report.environments) {
41
+ const envId = computeEnvId(env);
42
+ gEnvs.set(envId, env);
43
+ gEnvIds.set(env, envId);
44
+ }
45
+ const usedEnvIds = /* @__PURE__ */ new Set();
46
+ function visitTests2(tests, suiteId) {
47
+ for (const test of tests ?? []) {
48
+ const testId = computeTestId(test, suiteId);
49
+ gTests.set(testId, test);
50
+ gTestIds.set(test, testId);
51
+ gSuiteTests.set(suiteId, test);
52
+ for (const attempt of test.attempts) {
53
+ const env = report.environments[attempt.environmentIdx];
54
+ const envId = gEnvIds.get(env);
55
+ usedEnvIds.add(envId);
56
+ }
57
+ }
58
+ }
59
+ function visitSuite(suite, parentSuiteId) {
60
+ const suiteId = computeSuiteId(suite, parentSuiteId);
61
+ gSuites.set(suiteId, suite);
62
+ gSuiteIds.set(suite, suiteId);
63
+ for (const childSuite of suite.suites ?? []) {
64
+ visitSuite(childSuite, suiteId);
65
+ gSuiteChildren.set(suiteId, childSuite);
66
+ }
67
+ visitTests2(suite.tests ?? [], suiteId);
68
+ }
69
+ function transformTests(tests) {
70
+ const testIds = new Set(tests.map((test) => gTestIds.get(test)));
71
+ return [...testIds].map((testId) => {
72
+ const tests2 = gTests.getAll(testId);
73
+ const tags = tests2.map((test) => test.tags ?? []).flat();
74
+ return {
75
+ location: tests2[0].location,
76
+ title: tests2[0].title,
77
+ tags: tags.length ? tags : void 0,
78
+ attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
79
+ ...attempt,
80
+ environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
81
+ }))
82
+ };
83
+ });
84
+ }
85
+ function transformSuites(suites) {
86
+ const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
87
+ return [...suiteIds].map((suiteId) => {
88
+ const suite = gSuites.get(suiteId);
89
+ return {
90
+ location: suite.location,
91
+ title: suite.title,
92
+ type: suite.type,
93
+ suites: transformSuites(gSuiteChildren.getAll(suiteId)),
94
+ tests: transformTests(gSuiteTests.getAll(suiteId))
95
+ };
96
+ });
97
+ }
98
+ visitTests2(report.tests ?? [], "suiteless");
99
+ for (const suite of report.suites)
100
+ visitSuite(suite);
101
+ const newEnvironments = [...usedEnvIds];
102
+ const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
103
+ return {
104
+ ...report,
105
+ environments: newEnvironments.map((envId) => gEnvs.get(envId)),
106
+ suites: transformSuites(report.suites),
107
+ tests: transformTests(report.tests ?? [])
108
+ };
109
+ }
110
+ function computeEnvId(env) {
111
+ return stableObjectHash(env);
112
+ }
113
+ function computeSuiteId(suite, parentSuiteId) {
114
+ return stableObjectHash({
115
+ parentSuiteId: parentSuiteId ?? "",
116
+ type: suite.type,
117
+ file: suite.location?.file ?? "",
118
+ title: suite.title
119
+ });
120
+ }
121
+ function computeTestId(test, suiteId) {
122
+ return stableObjectHash({
123
+ suiteId,
124
+ file: test.location?.file ?? "",
125
+ title: test.title
126
+ });
127
+ }
128
+
129
+ // src/stripAnsi.ts
130
+ var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
131
+ function stripAnsi(str) {
132
+ return str.replace(ansiRegex, "");
133
+ }
134
+
135
+ // src/visitTests.ts
136
+ function visitTests(report, testVisitor) {
137
+ function visitSuite(suite, parents) {
138
+ parents.push(suite);
139
+ for (const test of suite.tests ?? [])
140
+ testVisitor(test, parents);
141
+ for (const childSuite of suite.suites ?? [])
142
+ visitSuite(childSuite, parents);
143
+ parents.pop();
144
+ }
145
+ for (const test of report.tests ?? [])
146
+ testVisitor(test, []);
147
+ for (const suite of report.suites)
148
+ visitSuite(suite, []);
149
+ }
150
+ export {
151
+ FlakinessReport,
152
+ reportUtilsBrowser_exports as ReportUtils
153
+ };
154
+ //# sourceMappingURL=browser.js.map
package/lib/ciUtils.js ADDED
@@ -0,0 +1,42 @@
1
+ // src/ciUtils.ts
2
+ var CIUtils;
3
+ ((CIUtils2) => {
4
+ function runUrl() {
5
+ return githubActions() ?? azure() ?? process.env.CI_JOB_URL ?? process.env.BUILD_URL;
6
+ }
7
+ CIUtils2.runUrl = runUrl;
8
+ })(CIUtils || (CIUtils = {}));
9
+ function githubActions() {
10
+ const serverUrl = process.env.GITHUB_SERVER_URL || "https://github.com";
11
+ const repo = process.env.GITHUB_REPOSITORY;
12
+ const runId = process.env.GITHUB_RUN_ID;
13
+ if (!repo || !runId) return void 0;
14
+ try {
15
+ const url = new URL(`${serverUrl}/${repo}/actions/runs/${runId}`);
16
+ const attempt = process.env.GITHUB_RUN_ATTEMPT;
17
+ if (attempt) url.searchParams.set("attempt", attempt);
18
+ url.searchParams.set("check_suite_focus", "true");
19
+ return url.toString();
20
+ } catch (error) {
21
+ return void 0;
22
+ }
23
+ }
24
+ function azure() {
25
+ const collectionUri = process.env.SYSTEM_TEAMFOUNDATIONCOLLECTIONURI;
26
+ const project = process.env.SYSTEM_TEAMPROJECT;
27
+ const buildId = process.env.BUILD_BUILDID;
28
+ if (!collectionUri || !project || !buildId)
29
+ return void 0;
30
+ try {
31
+ const baseUrl = collectionUri.endsWith("/") ? collectionUri : `${collectionUri}/`;
32
+ const url = new URL(`${baseUrl}${project}/_build/results`);
33
+ url.searchParams.set("buildId", buildId);
34
+ return url.toString();
35
+ } catch (error) {
36
+ return void 0;
37
+ }
38
+ }
39
+ export {
40
+ CIUtils
41
+ };
42
+ //# sourceMappingURL=ciUtils.js.map
@@ -1,7 +1,96 @@
1
1
  // src/createEnvironment.ts
2
- import { spawnSync } from "child_process";
3
2
  import fs from "fs";
4
3
  import os from "os";
4
+
5
+ // src/_internalUtils.ts
6
+ import { spawnSync } from "child_process";
7
+ import http from "http";
8
+ import https from "https";
9
+ import util from "util";
10
+ import zlib from "zlib";
11
+ var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
12
+ var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
13
+ function errorText(error) {
14
+ return FLAKINESS_DBG ? error.stack : error.message;
15
+ }
16
+ async function retryWithBackoff(job, backoff = []) {
17
+ for (const timeout of backoff) {
18
+ try {
19
+ return await job();
20
+ } catch (e) {
21
+ if (e instanceof AggregateError)
22
+ console.error(`[flakiness.io err]`, errorText(e.errors[0]));
23
+ else if (e instanceof Error)
24
+ console.error(`[flakiness.io err]`, errorText(e));
25
+ else
26
+ console.error(`[flakiness.io err]`, e);
27
+ await new Promise((x) => setTimeout(x, timeout));
28
+ }
29
+ }
30
+ return await job();
31
+ }
32
+ var httpUtils;
33
+ ((httpUtils2) => {
34
+ function createRequest({ url, method = "get", headers = {} }) {
35
+ let resolve;
36
+ let reject;
37
+ const responseDataPromise = new Promise((a, b) => {
38
+ resolve = a;
39
+ reject = b;
40
+ });
41
+ const protocol = url.startsWith("https") ? https : http;
42
+ headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
43
+ const request = protocol.request(url, { method, headers }, (res) => {
44
+ const chunks = [];
45
+ res.on("data", (chunk) => chunks.push(chunk));
46
+ res.on("end", () => {
47
+ if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
48
+ resolve(Buffer.concat(chunks));
49
+ else
50
+ reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
51
+ });
52
+ res.on("error", (error) => reject(error));
53
+ });
54
+ request.on("error", reject);
55
+ return { request, responseDataPromise };
56
+ }
57
+ httpUtils2.createRequest = createRequest;
58
+ async function getBuffer(url, backoff) {
59
+ return await retryWithBackoff(async () => {
60
+ const { request, responseDataPromise } = createRequest({ url });
61
+ request.end();
62
+ return await responseDataPromise;
63
+ }, backoff);
64
+ }
65
+ httpUtils2.getBuffer = getBuffer;
66
+ async function getText(url, backoff) {
67
+ const buffer = await getBuffer(url, backoff);
68
+ return buffer.toString("utf-8");
69
+ }
70
+ httpUtils2.getText = getText;
71
+ async function getJSON(url) {
72
+ return JSON.parse(await getText(url));
73
+ }
74
+ httpUtils2.getJSON = getJSON;
75
+ async function postText(url, text, backoff) {
76
+ const headers = {
77
+ "Content-Type": "application/json",
78
+ "Content-Length": Buffer.byteLength(text) + ""
79
+ };
80
+ return await retryWithBackoff(async () => {
81
+ const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
82
+ request.write(text);
83
+ request.end();
84
+ return await responseDataPromise;
85
+ }, backoff);
86
+ }
87
+ httpUtils2.postText = postText;
88
+ async function postJSON(url, json, backoff) {
89
+ const buffer = await postText(url, JSON.stringify(json), backoff);
90
+ return JSON.parse(buffer.toString("utf-8"));
91
+ }
92
+ httpUtils2.postJSON = postJSON;
93
+ })(httpUtils || (httpUtils = {}));
5
94
  function shell(command, args, options) {
6
95
  try {
7
96
  const result = spawnSync(command, args, { encoding: "utf-8", ...options });
@@ -14,6 +103,8 @@ function shell(command, args, options) {
14
103
  return void 0;
15
104
  }
16
105
  }
106
+
107
+ // src/createEnvironment.ts
17
108
  function readLinuxOSRelease() {
18
109
  const osReleaseText = fs.readFileSync("/etc/os-release", "utf-8");
19
110
  return new Map(osReleaseText.toLowerCase().split("\n").filter((line) => line.includes("=")).map((line) => {
@@ -1,132 +1,27 @@
1
1
  // src/createTestStepSnippets.ts
2
2
  import { codeFrameColumns } from "@babel/code-frame";
3
3
  import fs from "fs";
4
- import { posix as posixPath } from "path";
5
4
 
6
- // src/reportUtils.ts
7
- import { Multimap } from "@flakiness/shared/common/multimap.js";
8
- import { xxHash, xxHashObject } from "@flakiness/shared/common/utils.js";
9
- var ReportUtils;
10
- ((ReportUtils2) => {
11
- function visitTests(report, testVisitor) {
12
- function visitSuite(suite, parents) {
13
- parents.push(suite);
14
- for (const test of suite.tests ?? [])
15
- testVisitor(test, parents);
16
- for (const childSuite of suite.suites ?? [])
17
- visitSuite(childSuite, parents);
18
- parents.pop();
19
- }
20
- for (const test of report.tests ?? [])
21
- testVisitor(test, []);
22
- for (const suite of report.suites)
23
- visitSuite(suite, []);
24
- }
25
- ReportUtils2.visitTests = visitTests;
26
- function normalizeReport(report) {
27
- const gEnvs = /* @__PURE__ */ new Map();
28
- const gSuites = /* @__PURE__ */ new Map();
29
- const gTests = new Multimap();
30
- const gSuiteIds = /* @__PURE__ */ new Map();
31
- const gTestIds = /* @__PURE__ */ new Map();
32
- const gEnvIds = /* @__PURE__ */ new Map();
33
- const gSuiteChildren = new Multimap();
34
- const gSuiteTests = new Multimap();
35
- for (const env of report.environments) {
36
- const envId = computeEnvId(env);
37
- gEnvs.set(envId, env);
38
- gEnvIds.set(env, envId);
39
- }
40
- const usedEnvIds = /* @__PURE__ */ new Set();
41
- function visitTests2(tests, suiteId) {
42
- for (const test of tests ?? []) {
43
- const testId = computeTestId(test, suiteId);
44
- gTests.set(testId, test);
45
- gTestIds.set(test, testId);
46
- gSuiteTests.set(suiteId, test);
47
- for (const attempt of test.attempts) {
48
- const env = report.environments[attempt.environmentIdx];
49
- const envId = gEnvIds.get(env);
50
- usedEnvIds.add(envId);
51
- }
52
- }
53
- }
54
- function visitSuite(suite, parentSuiteId) {
55
- const suiteId = computeSuiteId(suite, parentSuiteId);
56
- gSuites.set(suiteId, suite);
57
- gSuiteIds.set(suite, suiteId);
58
- for (const childSuite of suite.suites ?? []) {
59
- visitSuite(childSuite, suiteId);
60
- gSuiteChildren.set(suiteId, childSuite);
61
- }
62
- visitTests2(suite.tests ?? [], suiteId);
63
- }
64
- function transformTests(tests) {
65
- const testIds = new Set(tests.map((test) => gTestIds.get(test)));
66
- return [...testIds].map((testId) => {
67
- const tests2 = gTests.getAll(testId);
68
- const tags = tests2.map((test) => test.tags ?? []).flat();
69
- return {
70
- location: tests2[0].location,
71
- title: tests2[0].title,
72
- tags: tags.length ? tags : void 0,
73
- attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
74
- ...attempt,
75
- environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
76
- }))
77
- };
78
- });
79
- }
80
- function transformSuites(suites) {
81
- const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
82
- return [...suiteIds].map((suiteId) => {
83
- const suite = gSuites.get(suiteId);
84
- return {
85
- location: suite.location,
86
- title: suite.title,
87
- type: suite.type,
88
- suites: transformSuites(gSuiteChildren.getAll(suiteId)),
89
- tests: transformTests(gSuiteTests.getAll(suiteId))
90
- };
91
- });
92
- }
93
- visitTests2(report.tests ?? [], "suiteless");
94
- for (const suite of report.suites)
95
- visitSuite(suite);
96
- const newEnvironments = [...usedEnvIds];
97
- const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
98
- return {
99
- ...report,
100
- environments: newEnvironments.map((envId) => gEnvs.get(envId)),
101
- suites: transformSuites(report.suites),
102
- tests: transformTests(report.tests ?? [])
103
- };
104
- }
105
- ReportUtils2.normalizeReport = normalizeReport;
106
- function computeEnvId(env) {
107
- return xxHashObject(env);
5
+ // src/visitTests.ts
6
+ function visitTests(report, testVisitor) {
7
+ function visitSuite(suite, parents) {
8
+ parents.push(suite);
9
+ for (const test of suite.tests ?? [])
10
+ testVisitor(test, parents);
11
+ for (const childSuite of suite.suites ?? [])
12
+ visitSuite(childSuite, parents);
13
+ parents.pop();
108
14
  }
109
- function computeSuiteId(suite, parentSuiteId) {
110
- return xxHash([
111
- parentSuiteId ?? "",
112
- suite.type,
113
- suite.location?.file ?? "",
114
- suite.title
115
- ]);
116
- }
117
- function computeTestId(test, suiteId) {
118
- return xxHash([
119
- suiteId,
120
- test.location?.file ?? "",
121
- test.title
122
- ]);
123
- }
124
- })(ReportUtils || (ReportUtils = {}));
15
+ for (const test of report.tests ?? [])
16
+ testVisitor(test, []);
17
+ for (const suite of report.suites)
18
+ visitSuite(suite, []);
19
+ }
125
20
 
126
21
  // src/createTestStepSnippets.ts
127
- function createTestStepSnippetsInplace(report, gitRoot) {
22
+ function createTestStepSnippetsInplace(worktree, report) {
128
23
  const allSteps = /* @__PURE__ */ new Map();
129
- ReportUtils.visitTests(report, (test) => {
24
+ visitTests(report, (test) => {
130
25
  for (const attempt of test.attempts) {
131
26
  for (const step of attempt.steps ?? []) {
132
27
  if (!step.location)
@@ -143,7 +38,7 @@ function createTestStepSnippetsInplace(report, gitRoot) {
143
38
  for (const [gitFilePath, steps] of allSteps) {
144
39
  let source;
145
40
  try {
146
- source = fs.readFileSync(posixPath.join(gitRoot, gitFilePath), "utf-8");
41
+ source = fs.readFileSync(worktree.absolutePath(gitFilePath), "utf-8");
147
42
  } catch (e) {
148
43
  continue;
149
44
  }