flakiness 0.0.0 → 0.147.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +45 -0
- package/README.md +30 -0
- package/lib/cli/cli.js +1980 -0
- package/lib/cli/cmd-convert.js +421 -0
- package/lib/cli/cmd-download.js +42 -0
- package/lib/cli/cmd-link.js +21 -0
- package/lib/cli/cmd-login.js +223 -0
- package/lib/cli/cmd-logout.js +170 -0
- package/lib/cli/cmd-status.js +181 -0
- package/lib/cli/cmd-unlink.js +13 -0
- package/lib/cli/cmd-upload-playwright-json.js +463 -0
- package/lib/cli/cmd-upload.js +169 -0
- package/lib/cli/cmd-whoami.js +173 -0
- package/lib/flakinessSession.js +159 -0
- package/lib/junit.js +310 -0
- package/lib/playwrightJSONReport.js +429 -0
- package/lib/serverapi.js +111 -0
- package/lib/utils.js +374 -0
- package/package.json +41 -6
- package/types/tsconfig.tsbuildinfo +1 -0
- package/index.js +0 -0
|
@@ -0,0 +1,429 @@
|
|
|
1
|
+
// src/playwrightJSONReport.ts
|
|
2
|
+
import { FlakinessReport as FK, ReportUtils as ReportUtils2 } from "@flakiness/sdk";
|
|
3
|
+
import debug from "debug";
|
|
4
|
+
import { posix as posixPath2 } from "path";
|
|
5
|
+
|
|
6
|
+
// src/utils.ts
|
|
7
|
+
import { ReportUtils } from "@flakiness/sdk";
|
|
8
|
+
import assert from "assert";
|
|
9
|
+
import { spawnSync } from "child_process";
|
|
10
|
+
import crypto from "crypto";
|
|
11
|
+
import fs from "fs";
|
|
12
|
+
import http from "http";
|
|
13
|
+
import https from "https";
|
|
14
|
+
import os from "os";
|
|
15
|
+
import path, { posix as posixPath, win32 as win32Path } from "path";
|
|
16
|
+
async function existsAsync(aPath) {
|
|
17
|
+
return fs.promises.stat(aPath).then(() => true).catch((e) => false);
|
|
18
|
+
}
|
|
19
|
+
function extractEnvConfiguration() {
|
|
20
|
+
const ENV_PREFIX = "FK_ENV_";
|
|
21
|
+
return Object.fromEntries(
|
|
22
|
+
Object.entries(process.env).filter(([key]) => key.toUpperCase().startsWith(ENV_PREFIX.toUpperCase())).map(([key, value]) => [key.substring(ENV_PREFIX.length).toLowerCase(), (value ?? "").trim().toLowerCase()])
|
|
23
|
+
);
|
|
24
|
+
}
|
|
25
|
+
function sha1File(filePath) {
|
|
26
|
+
return new Promise((resolve, reject) => {
|
|
27
|
+
const hash = crypto.createHash("sha1");
|
|
28
|
+
const stream = fs.createReadStream(filePath);
|
|
29
|
+
stream.on("data", (chunk) => {
|
|
30
|
+
hash.update(chunk);
|
|
31
|
+
});
|
|
32
|
+
stream.on("end", () => {
|
|
33
|
+
resolve(hash.digest("hex"));
|
|
34
|
+
});
|
|
35
|
+
stream.on("error", (err) => {
|
|
36
|
+
reject(err);
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
|
|
41
|
+
function errorText(error) {
|
|
42
|
+
return FLAKINESS_DBG ? error.stack : error.message;
|
|
43
|
+
}
|
|
44
|
+
function sha1Buffer(data) {
|
|
45
|
+
const hash = crypto.createHash("sha1");
|
|
46
|
+
hash.update(data);
|
|
47
|
+
return hash.digest("hex");
|
|
48
|
+
}
|
|
49
|
+
async function retryWithBackoff(job, backoff = []) {
|
|
50
|
+
for (const timeout of backoff) {
|
|
51
|
+
try {
|
|
52
|
+
return await job();
|
|
53
|
+
} catch (e) {
|
|
54
|
+
if (e instanceof AggregateError)
|
|
55
|
+
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
|
|
56
|
+
else if (e instanceof Error)
|
|
57
|
+
console.error(`[flakiness.io err]`, errorText(e));
|
|
58
|
+
else
|
|
59
|
+
console.error(`[flakiness.io err]`, e);
|
|
60
|
+
await new Promise((x) => setTimeout(x, timeout));
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
return await job();
|
|
64
|
+
}
|
|
65
|
+
var httpUtils;
|
|
66
|
+
((httpUtils2) => {
|
|
67
|
+
function createRequest({ url, method = "get", headers = {} }) {
|
|
68
|
+
let resolve;
|
|
69
|
+
let reject;
|
|
70
|
+
const responseDataPromise = new Promise((a, b) => {
|
|
71
|
+
resolve = a;
|
|
72
|
+
reject = b;
|
|
73
|
+
});
|
|
74
|
+
const protocol = url.startsWith("https") ? https : http;
|
|
75
|
+
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
|
|
76
|
+
const request = protocol.request(url, { method, headers }, (res) => {
|
|
77
|
+
const chunks = [];
|
|
78
|
+
res.on("data", (chunk) => chunks.push(chunk));
|
|
79
|
+
res.on("end", () => {
|
|
80
|
+
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
|
|
81
|
+
resolve(Buffer.concat(chunks));
|
|
82
|
+
else
|
|
83
|
+
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
|
|
84
|
+
});
|
|
85
|
+
res.on("error", (error) => reject(error));
|
|
86
|
+
});
|
|
87
|
+
request.on("error", reject);
|
|
88
|
+
return { request, responseDataPromise };
|
|
89
|
+
}
|
|
90
|
+
httpUtils2.createRequest = createRequest;
|
|
91
|
+
async function getBuffer(url, backoff) {
|
|
92
|
+
return await retryWithBackoff(async () => {
|
|
93
|
+
const { request, responseDataPromise } = createRequest({ url });
|
|
94
|
+
request.end();
|
|
95
|
+
return await responseDataPromise;
|
|
96
|
+
}, backoff);
|
|
97
|
+
}
|
|
98
|
+
httpUtils2.getBuffer = getBuffer;
|
|
99
|
+
async function getText(url, backoff) {
|
|
100
|
+
const buffer = await getBuffer(url, backoff);
|
|
101
|
+
return buffer.toString("utf-8");
|
|
102
|
+
}
|
|
103
|
+
httpUtils2.getText = getText;
|
|
104
|
+
async function getJSON(url) {
|
|
105
|
+
return JSON.parse(await getText(url));
|
|
106
|
+
}
|
|
107
|
+
httpUtils2.getJSON = getJSON;
|
|
108
|
+
async function postText(url, text, backoff) {
|
|
109
|
+
const headers = {
|
|
110
|
+
"Content-Type": "application/json",
|
|
111
|
+
"Content-Length": Buffer.byteLength(text) + ""
|
|
112
|
+
};
|
|
113
|
+
return await retryWithBackoff(async () => {
|
|
114
|
+
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
|
|
115
|
+
request.write(text);
|
|
116
|
+
request.end();
|
|
117
|
+
return await responseDataPromise;
|
|
118
|
+
}, backoff);
|
|
119
|
+
}
|
|
120
|
+
httpUtils2.postText = postText;
|
|
121
|
+
async function postJSON(url, json, backoff) {
|
|
122
|
+
const buffer = await postText(url, JSON.stringify(json), backoff);
|
|
123
|
+
return JSON.parse(buffer.toString("utf-8"));
|
|
124
|
+
}
|
|
125
|
+
httpUtils2.postJSON = postJSON;
|
|
126
|
+
})(httpUtils || (httpUtils = {}));
|
|
127
|
+
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
|
|
128
|
+
function stripAnsi(str) {
|
|
129
|
+
return str.replace(ansiRegex, "");
|
|
130
|
+
}
|
|
131
|
+
function shell(command, args, options) {
|
|
132
|
+
try {
|
|
133
|
+
const result = spawnSync(command, args, { encoding: "utf-8", ...options });
|
|
134
|
+
if (result.status !== 0) {
|
|
135
|
+
return void 0;
|
|
136
|
+
}
|
|
137
|
+
return result.stdout.trim();
|
|
138
|
+
} catch (e) {
|
|
139
|
+
console.error(e);
|
|
140
|
+
return void 0;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
function readLinuxOSRelease() {
|
|
144
|
+
const osReleaseText = fs.readFileSync("/etc/os-release", "utf-8");
|
|
145
|
+
return new Map(osReleaseText.toLowerCase().split("\n").filter((line) => line.includes("=")).map((line) => {
|
|
146
|
+
line = line.trim();
|
|
147
|
+
let [key, value] = line.split("=");
|
|
148
|
+
if (value.startsWith('"') && value.endsWith('"'))
|
|
149
|
+
value = value.substring(1, value.length - 1);
|
|
150
|
+
return [key, value];
|
|
151
|
+
}));
|
|
152
|
+
}
|
|
153
|
+
function osLinuxInfo() {
|
|
154
|
+
const arch = shell(`uname`, [`-m`]);
|
|
155
|
+
const osReleaseMap = readLinuxOSRelease();
|
|
156
|
+
const name = osReleaseMap.get("name") ?? shell(`uname`);
|
|
157
|
+
const version = osReleaseMap.get("version_id");
|
|
158
|
+
return { name, arch, version };
|
|
159
|
+
}
|
|
160
|
+
function osDarwinInfo() {
|
|
161
|
+
const name = "macos";
|
|
162
|
+
const arch = shell(`uname`, [`-m`]);
|
|
163
|
+
const version = shell(`sw_vers`, [`-productVersion`]);
|
|
164
|
+
return { name, arch, version };
|
|
165
|
+
}
|
|
166
|
+
function osWinInfo() {
|
|
167
|
+
const name = "win";
|
|
168
|
+
const arch = process.arch;
|
|
169
|
+
const version = os.release();
|
|
170
|
+
return { name, arch, version };
|
|
171
|
+
}
|
|
172
|
+
function getOSInfo() {
|
|
173
|
+
if (process.platform === "darwin")
|
|
174
|
+
return osDarwinInfo();
|
|
175
|
+
if (process.platform === "win32")
|
|
176
|
+
return osWinInfo();
|
|
177
|
+
return osLinuxInfo();
|
|
178
|
+
}
|
|
179
|
+
function inferRunUrl() {
|
|
180
|
+
if (process.env.GITHUB_REPOSITORY && process.env.GITHUB_RUN_ID)
|
|
181
|
+
return `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}`;
|
|
182
|
+
return void 0;
|
|
183
|
+
}
|
|
184
|
+
function parseStringDate(dateString) {
|
|
185
|
+
return +new Date(dateString);
|
|
186
|
+
}
|
|
187
|
+
function gitCommitInfo(gitRepo) {
|
|
188
|
+
const sha = shell(`git`, ["rev-parse", "HEAD"], {
|
|
189
|
+
cwd: gitRepo,
|
|
190
|
+
encoding: "utf-8"
|
|
191
|
+
});
|
|
192
|
+
assert(sha, `FAILED: git rev-parse HEAD @ ${gitRepo}`);
|
|
193
|
+
return sha.trim();
|
|
194
|
+
}
|
|
195
|
+
function computeGitRoot(somePathInsideGitRepo) {
|
|
196
|
+
const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
|
|
197
|
+
cwd: somePathInsideGitRepo,
|
|
198
|
+
encoding: "utf-8"
|
|
199
|
+
});
|
|
200
|
+
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
|
|
201
|
+
return normalizePath(root);
|
|
202
|
+
}
|
|
203
|
+
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
|
|
204
|
+
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
|
|
205
|
+
function normalizePath(aPath) {
|
|
206
|
+
if (IS_WIN32_PATH.test(aPath)) {
|
|
207
|
+
aPath = aPath.split(win32Path.sep).join(posixPath.sep);
|
|
208
|
+
}
|
|
209
|
+
if (IS_ALMOST_POSIX_PATH.test(aPath))
|
|
210
|
+
return "/" + aPath[0] + aPath.substring(2);
|
|
211
|
+
return aPath;
|
|
212
|
+
}
|
|
213
|
+
function gitFilePath(gitRoot, absolutePath) {
|
|
214
|
+
return posixPath.relative(gitRoot, absolutePath);
|
|
215
|
+
}
|
|
216
|
+
function parseDurationMS(value) {
|
|
217
|
+
if (isNaN(value))
|
|
218
|
+
throw new Error("Duration cannot be NaN");
|
|
219
|
+
if (value < 0)
|
|
220
|
+
throw new Error(`Duration cannot be less than 0, found ${value}`);
|
|
221
|
+
return value | 0;
|
|
222
|
+
}
|
|
223
|
+
function createEnvironments(projects) {
|
|
224
|
+
const envConfiguration = extractEnvConfiguration();
|
|
225
|
+
const osInfo = getOSInfo();
|
|
226
|
+
let uniqueNames = /* @__PURE__ */ new Set();
|
|
227
|
+
const result = /* @__PURE__ */ new Map();
|
|
228
|
+
for (const project of projects) {
|
|
229
|
+
let defaultName = project.name;
|
|
230
|
+
if (!defaultName.trim())
|
|
231
|
+
defaultName = "anonymous";
|
|
232
|
+
let name = defaultName;
|
|
233
|
+
for (let i = 2; uniqueNames.has(name); ++i)
|
|
234
|
+
name = `${defaultName}-${i}`;
|
|
235
|
+
uniqueNames.add(defaultName);
|
|
236
|
+
result.set(project, {
|
|
237
|
+
name,
|
|
238
|
+
systemData: {
|
|
239
|
+
osArch: osInfo.arch,
|
|
240
|
+
osName: osInfo.name,
|
|
241
|
+
osVersion: osInfo.version
|
|
242
|
+
},
|
|
243
|
+
userSuppliedData: {
|
|
244
|
+
...envConfiguration,
|
|
245
|
+
...project.metadata
|
|
246
|
+
},
|
|
247
|
+
opaqueData: {
|
|
248
|
+
project
|
|
249
|
+
}
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
return result;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// src/playwrightJSONReport.ts
|
|
256
|
+
var dlog = debug("flakiness:json-report");
|
|
257
|
+
var PlaywrightJSONReport;
|
|
258
|
+
((PlaywrightJSONReport2) => {
|
|
259
|
+
function collectMetadata(somePathInsideProject = process.cwd()) {
|
|
260
|
+
const commitId = gitCommitInfo(somePathInsideProject);
|
|
261
|
+
const osInfo = getOSInfo();
|
|
262
|
+
const metadata = {
|
|
263
|
+
gitRoot: computeGitRoot(somePathInsideProject),
|
|
264
|
+
commitId,
|
|
265
|
+
osName: osInfo.name,
|
|
266
|
+
arch: osInfo.arch,
|
|
267
|
+
osVersion: osInfo.version,
|
|
268
|
+
runURL: inferRunUrl()
|
|
269
|
+
};
|
|
270
|
+
dlog(`metadata directory: ${somePathInsideProject}`);
|
|
271
|
+
dlog(`metadata: ${JSON.stringify(metadata)}`);
|
|
272
|
+
dlog(`commit info: ${JSON.stringify(commitId)}`);
|
|
273
|
+
dlog(`os info: ${JSON.stringify(osInfo)}`);
|
|
274
|
+
return metadata;
|
|
275
|
+
}
|
|
276
|
+
PlaywrightJSONReport2.collectMetadata = collectMetadata;
|
|
277
|
+
async function parse(metadata, jsonReport, options) {
|
|
278
|
+
const context = {
|
|
279
|
+
projectId2environmentIdx: /* @__PURE__ */ new Map(),
|
|
280
|
+
testBaseDir: normalizePath(jsonReport.config.rootDir),
|
|
281
|
+
gitRoot: metadata.gitRoot,
|
|
282
|
+
attachments: /* @__PURE__ */ new Map(),
|
|
283
|
+
unaccessibleAttachmentPaths: [],
|
|
284
|
+
extractAttachments: options.extractAttachments
|
|
285
|
+
};
|
|
286
|
+
const configPath = jsonReport.config.configFile ? gitFilePath(context.gitRoot, normalizePath(jsonReport.config.configFile)) : void 0;
|
|
287
|
+
const report = {
|
|
288
|
+
category: FK.CATEGORY_PLAYWRIGHT,
|
|
289
|
+
commitId: metadata.commitId,
|
|
290
|
+
configPath,
|
|
291
|
+
url: metadata.runURL,
|
|
292
|
+
environments: [],
|
|
293
|
+
suites: [],
|
|
294
|
+
opaqueData: jsonReport.config,
|
|
295
|
+
unattributedErrors: jsonReport.errors.map((error) => parseJSONError(context, error)),
|
|
296
|
+
// The report.stats is a releatively new addition to Playwright's JSONReport,
|
|
297
|
+
// so we have to polyfill with some reasonable values when it's missing.
|
|
298
|
+
duration: jsonReport.stats?.duration && jsonReport.stats?.duration > 0 ? parseDurationMS(jsonReport.stats.duration) : 0,
|
|
299
|
+
startTimestamp: jsonReport.stats && jsonReport.stats.startTime ? parseStringDate(jsonReport.stats.startTime) : Date.now()
|
|
300
|
+
};
|
|
301
|
+
report.environments = [...createEnvironments(jsonReport.config.projects).values()];
|
|
302
|
+
for (let envIdx = 0; envIdx < report.environments.length; ++envIdx)
|
|
303
|
+
context.projectId2environmentIdx.set(jsonReport.config.projects[envIdx].id, envIdx);
|
|
304
|
+
report.suites = await Promise.all(jsonReport.suites.map((suite) => parseJSONSuite(context, suite)));
|
|
305
|
+
return {
|
|
306
|
+
report: ReportUtils2.normalizeReport(report),
|
|
307
|
+
attachments: [...context.attachments.values()],
|
|
308
|
+
unaccessibleAttachmentPaths: context.unaccessibleAttachmentPaths
|
|
309
|
+
};
|
|
310
|
+
}
|
|
311
|
+
PlaywrightJSONReport2.parse = parse;
|
|
312
|
+
})(PlaywrightJSONReport || (PlaywrightJSONReport = {}));
|
|
313
|
+
async function parseJSONSuite(context, jsonSuite) {
|
|
314
|
+
let type = "suite";
|
|
315
|
+
if (jsonSuite.column === 0 && jsonSuite.line === 0)
|
|
316
|
+
type = "file";
|
|
317
|
+
else if (!jsonSuite.title)
|
|
318
|
+
type = "anonymous suite";
|
|
319
|
+
const suite = {
|
|
320
|
+
type,
|
|
321
|
+
title: jsonSuite.title,
|
|
322
|
+
location: {
|
|
323
|
+
file: gitFilePath(context.gitRoot, normalizePath(jsonSuite.file)),
|
|
324
|
+
line: jsonSuite.line,
|
|
325
|
+
column: jsonSuite.column
|
|
326
|
+
}
|
|
327
|
+
};
|
|
328
|
+
if (jsonSuite.suites && jsonSuite.suites.length)
|
|
329
|
+
suite.suites = await Promise.all(jsonSuite.suites.map((suite2) => parseJSONSuite(context, suite2)));
|
|
330
|
+
if (jsonSuite.specs && jsonSuite.specs.length)
|
|
331
|
+
suite.tests = await Promise.all(jsonSuite.specs.map((spec) => parseJSONSpec(context, spec)));
|
|
332
|
+
return suite;
|
|
333
|
+
}
|
|
334
|
+
async function parseJSONSpec(context, jsonSpec) {
|
|
335
|
+
const test = {
|
|
336
|
+
title: jsonSpec.title,
|
|
337
|
+
tags: jsonSpec.tags,
|
|
338
|
+
location: {
|
|
339
|
+
file: gitFilePath(context.gitRoot, normalizePath(posixPath2.join(context.testBaseDir, normalizePath(jsonSpec.file)))),
|
|
340
|
+
line: jsonSpec.line,
|
|
341
|
+
column: jsonSpec.column
|
|
342
|
+
},
|
|
343
|
+
attempts: []
|
|
344
|
+
};
|
|
345
|
+
for (const jsonTest of jsonSpec.tests) {
|
|
346
|
+
const environmentIdx = context.projectId2environmentIdx.get(jsonTest.projectId);
|
|
347
|
+
if (environmentIdx === void 0)
|
|
348
|
+
throw new Error("Inconsistent report - no project for a test found!");
|
|
349
|
+
const testResults = jsonTest.results.filter((result) => result.status !== void 0);
|
|
350
|
+
if (!testResults.length)
|
|
351
|
+
continue;
|
|
352
|
+
test.attempts.push(...await Promise.all(testResults.map((jsonTestResult) => parseJSONTestResult(context, jsonTest, environmentIdx, jsonTestResult))));
|
|
353
|
+
}
|
|
354
|
+
return test;
|
|
355
|
+
}
|
|
356
|
+
function createLocation(context, location) {
|
|
357
|
+
return {
|
|
358
|
+
file: gitFilePath(context.gitRoot, normalizePath(location.file)),
|
|
359
|
+
line: location.line,
|
|
360
|
+
column: location.column
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
async function parseJSONTestResult(context, jsonTest, environmentIdx, jsonTestResult) {
|
|
364
|
+
const attachments = [];
|
|
365
|
+
const attempt = {
|
|
366
|
+
timeout: parseDurationMS(jsonTest.timeout),
|
|
367
|
+
annotations: jsonTest.annotations.map((annotation) => ({
|
|
368
|
+
type: annotation.type,
|
|
369
|
+
description: annotation.description,
|
|
370
|
+
location: annotation.location ? createLocation(context, annotation.location) : void 0
|
|
371
|
+
})),
|
|
372
|
+
environmentIdx,
|
|
373
|
+
expectedStatus: jsonTest.expectedStatus,
|
|
374
|
+
parallelIndex: jsonTestResult.parallelIndex,
|
|
375
|
+
status: jsonTestResult.status,
|
|
376
|
+
errors: jsonTestResult.errors && jsonTestResult.errors.length ? jsonTestResult.errors.map((error) => parseJSONError(context, error)) : void 0,
|
|
377
|
+
stdout: jsonTestResult.stdout && jsonTestResult.stdout.length ? jsonTestResult.stdout : void 0,
|
|
378
|
+
stderr: jsonTestResult.stderr && jsonTestResult.stderr.length ? jsonTestResult.stderr : void 0,
|
|
379
|
+
steps: jsonTestResult.steps ? jsonTestResult.steps.map((jsonTestStep) => parseJSONTestStep(context, jsonTestStep)) : void 0,
|
|
380
|
+
startTimestamp: parseStringDate(jsonTestResult.startTime),
|
|
381
|
+
duration: jsonTestResult.duration && jsonTestResult.duration > 0 ? parseDurationMS(jsonTestResult.duration) : 0,
|
|
382
|
+
attachments
|
|
383
|
+
};
|
|
384
|
+
if (context.extractAttachments) {
|
|
385
|
+
await Promise.all((jsonTestResult.attachments ?? []).map(async (jsonAttachment) => {
|
|
386
|
+
if (jsonAttachment.path && !await existsAsync(jsonAttachment.path)) {
|
|
387
|
+
context.unaccessibleAttachmentPaths.push(jsonAttachment.path);
|
|
388
|
+
return;
|
|
389
|
+
}
|
|
390
|
+
const id = jsonAttachment.path ? await sha1File(jsonAttachment.path) : sha1Buffer(jsonAttachment.body ?? "");
|
|
391
|
+
context.attachments.set(id, {
|
|
392
|
+
contentType: jsonAttachment.contentType,
|
|
393
|
+
id,
|
|
394
|
+
body: jsonAttachment.body ? Buffer.from(jsonAttachment.body) : void 0,
|
|
395
|
+
path: jsonAttachment.path
|
|
396
|
+
});
|
|
397
|
+
attachments.push({
|
|
398
|
+
id,
|
|
399
|
+
name: jsonAttachment.name,
|
|
400
|
+
contentType: jsonAttachment.contentType
|
|
401
|
+
});
|
|
402
|
+
}));
|
|
403
|
+
}
|
|
404
|
+
return attempt;
|
|
405
|
+
}
|
|
406
|
+
function parseJSONTestStep(context, jsonStep) {
|
|
407
|
+
const step = {
|
|
408
|
+
// NOTE: jsonStep.duration was -1 in some playwright versions
|
|
409
|
+
duration: parseDurationMS(Math.max(jsonStep.duration, 0)),
|
|
410
|
+
title: jsonStep.title
|
|
411
|
+
};
|
|
412
|
+
if (jsonStep.error)
|
|
413
|
+
step.error = parseJSONError(context, jsonStep.error);
|
|
414
|
+
if (jsonStep.steps)
|
|
415
|
+
step.steps = jsonStep.steps.map((childJSONStep) => parseJSONTestStep(context, childJSONStep));
|
|
416
|
+
return step;
|
|
417
|
+
}
|
|
418
|
+
function parseJSONError(context, error) {
|
|
419
|
+
return {
|
|
420
|
+
location: error.location ? createLocation(context, error.location) : void 0,
|
|
421
|
+
message: error.message ? stripAnsi(error.message).split("\n")[0] : void 0,
|
|
422
|
+
stack: error.stack,
|
|
423
|
+
value: error.value
|
|
424
|
+
};
|
|
425
|
+
}
|
|
426
|
+
export {
|
|
427
|
+
PlaywrightJSONReport
|
|
428
|
+
};
|
|
429
|
+
//# sourceMappingURL=playwrightJSONReport.js.map
|
package/lib/serverapi.js
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
// src/serverapi.ts
|
|
2
|
+
import { TypedHTTP } from "@flakiness/shared/common/typedHttp.js";
|
|
3
|
+
|
|
4
|
+
// src/utils.ts
|
|
5
|
+
import { ReportUtils } from "@flakiness/sdk";
|
|
6
|
+
import http from "http";
|
|
7
|
+
import https from "https";
|
|
8
|
+
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
|
|
9
|
+
function errorText(error) {
|
|
10
|
+
return FLAKINESS_DBG ? error.stack : error.message;
|
|
11
|
+
}
|
|
12
|
+
async function retryWithBackoff(job, backoff = []) {
|
|
13
|
+
for (const timeout of backoff) {
|
|
14
|
+
try {
|
|
15
|
+
return await job();
|
|
16
|
+
} catch (e) {
|
|
17
|
+
if (e instanceof AggregateError)
|
|
18
|
+
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
|
|
19
|
+
else if (e instanceof Error)
|
|
20
|
+
console.error(`[flakiness.io err]`, errorText(e));
|
|
21
|
+
else
|
|
22
|
+
console.error(`[flakiness.io err]`, e);
|
|
23
|
+
await new Promise((x) => setTimeout(x, timeout));
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
return await job();
|
|
27
|
+
}
|
|
28
|
+
var httpUtils;
|
|
29
|
+
((httpUtils2) => {
|
|
30
|
+
function createRequest({ url, method = "get", headers = {} }) {
|
|
31
|
+
let resolve;
|
|
32
|
+
let reject;
|
|
33
|
+
const responseDataPromise = new Promise((a, b) => {
|
|
34
|
+
resolve = a;
|
|
35
|
+
reject = b;
|
|
36
|
+
});
|
|
37
|
+
const protocol = url.startsWith("https") ? https : http;
|
|
38
|
+
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
|
|
39
|
+
const request = protocol.request(url, { method, headers }, (res) => {
|
|
40
|
+
const chunks = [];
|
|
41
|
+
res.on("data", (chunk) => chunks.push(chunk));
|
|
42
|
+
res.on("end", () => {
|
|
43
|
+
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
|
|
44
|
+
resolve(Buffer.concat(chunks));
|
|
45
|
+
else
|
|
46
|
+
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
|
|
47
|
+
});
|
|
48
|
+
res.on("error", (error) => reject(error));
|
|
49
|
+
});
|
|
50
|
+
request.on("error", reject);
|
|
51
|
+
return { request, responseDataPromise };
|
|
52
|
+
}
|
|
53
|
+
httpUtils2.createRequest = createRequest;
|
|
54
|
+
async function getBuffer(url, backoff) {
|
|
55
|
+
return await retryWithBackoff(async () => {
|
|
56
|
+
const { request, responseDataPromise } = createRequest({ url });
|
|
57
|
+
request.end();
|
|
58
|
+
return await responseDataPromise;
|
|
59
|
+
}, backoff);
|
|
60
|
+
}
|
|
61
|
+
httpUtils2.getBuffer = getBuffer;
|
|
62
|
+
async function getText(url, backoff) {
|
|
63
|
+
const buffer = await getBuffer(url, backoff);
|
|
64
|
+
return buffer.toString("utf-8");
|
|
65
|
+
}
|
|
66
|
+
httpUtils2.getText = getText;
|
|
67
|
+
async function getJSON(url) {
|
|
68
|
+
return JSON.parse(await getText(url));
|
|
69
|
+
}
|
|
70
|
+
httpUtils2.getJSON = getJSON;
|
|
71
|
+
async function postText(url, text, backoff) {
|
|
72
|
+
const headers = {
|
|
73
|
+
"Content-Type": "application/json",
|
|
74
|
+
"Content-Length": Buffer.byteLength(text) + ""
|
|
75
|
+
};
|
|
76
|
+
return await retryWithBackoff(async () => {
|
|
77
|
+
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
|
|
78
|
+
request.write(text);
|
|
79
|
+
request.end();
|
|
80
|
+
return await responseDataPromise;
|
|
81
|
+
}, backoff);
|
|
82
|
+
}
|
|
83
|
+
httpUtils2.postText = postText;
|
|
84
|
+
async function postJSON(url, json, backoff) {
|
|
85
|
+
const buffer = await postText(url, JSON.stringify(json), backoff);
|
|
86
|
+
return JSON.parse(buffer.toString("utf-8"));
|
|
87
|
+
}
|
|
88
|
+
httpUtils2.postJSON = postJSON;
|
|
89
|
+
})(httpUtils || (httpUtils = {}));
|
|
90
|
+
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
|
|
91
|
+
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
|
|
92
|
+
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
|
|
93
|
+
|
|
94
|
+
// src/serverapi.ts
|
|
95
|
+
function createServerAPI(endpoint, options) {
|
|
96
|
+
endpoint += "/api/";
|
|
97
|
+
const fetcher = options?.auth ? (url, init) => fetch(url, {
|
|
98
|
+
...init,
|
|
99
|
+
headers: {
|
|
100
|
+
...init.headers,
|
|
101
|
+
"Authorization": `Bearer ${options.auth}`
|
|
102
|
+
}
|
|
103
|
+
}) : fetch;
|
|
104
|
+
if (options?.retries)
|
|
105
|
+
return TypedHTTP.createClient(endpoint, (url, init) => retryWithBackoff(() => fetcher(url, init), options.retries));
|
|
106
|
+
return TypedHTTP.createClient(endpoint, fetcher);
|
|
107
|
+
}
|
|
108
|
+
export {
|
|
109
|
+
createServerAPI
|
|
110
|
+
};
|
|
111
|
+
//# sourceMappingURL=serverapi.js.map
|