flakiness 0.0.0 → 0.147.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,463 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/cli/cmd-upload-playwright-json.ts
4
+ import { ReportUploader } from "@flakiness/sdk";
5
+ import fs2 from "fs/promises";
6
+ import path2 from "path";
7
+
8
+ // src/playwrightJSONReport.ts
9
+ import { FlakinessReport as FK, ReportUtils as ReportUtils2 } from "@flakiness/sdk";
10
+ import debug from "debug";
11
+ import { posix as posixPath2 } from "path";
12
+
13
+ // src/utils.ts
14
+ import { ReportUtils } from "@flakiness/sdk";
15
+ import assert from "assert";
16
+ import { spawnSync } from "child_process";
17
+ import crypto from "crypto";
18
+ import fs from "fs";
19
+ import http from "http";
20
+ import https from "https";
21
+ import os from "os";
22
+ import path, { posix as posixPath, win32 as win32Path } from "path";
23
+ async function existsAsync(aPath) {
24
+ return fs.promises.stat(aPath).then(() => true).catch((e) => false);
25
+ }
26
+ function extractEnvConfiguration() {
27
+ const ENV_PREFIX = "FK_ENV_";
28
+ return Object.fromEntries(
29
+ Object.entries(process.env).filter(([key]) => key.toUpperCase().startsWith(ENV_PREFIX.toUpperCase())).map(([key, value]) => [key.substring(ENV_PREFIX.length).toLowerCase(), (value ?? "").trim().toLowerCase()])
30
+ );
31
+ }
32
+ function sha1File(filePath) {
33
+ return new Promise((resolve, reject) => {
34
+ const hash = crypto.createHash("sha1");
35
+ const stream = fs.createReadStream(filePath);
36
+ stream.on("data", (chunk) => {
37
+ hash.update(chunk);
38
+ });
39
+ stream.on("end", () => {
40
+ resolve(hash.digest("hex"));
41
+ });
42
+ stream.on("error", (err) => {
43
+ reject(err);
44
+ });
45
+ });
46
+ }
47
+ var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
48
+ function errorText(error) {
49
+ return FLAKINESS_DBG ? error.stack : error.message;
50
+ }
51
+ function sha1Buffer(data) {
52
+ const hash = crypto.createHash("sha1");
53
+ hash.update(data);
54
+ return hash.digest("hex");
55
+ }
56
+ async function retryWithBackoff(job, backoff = []) {
57
+ for (const timeout of backoff) {
58
+ try {
59
+ return await job();
60
+ } catch (e) {
61
+ if (e instanceof AggregateError)
62
+ console.error(`[flakiness.io err]`, errorText(e.errors[0]));
63
+ else if (e instanceof Error)
64
+ console.error(`[flakiness.io err]`, errorText(e));
65
+ else
66
+ console.error(`[flakiness.io err]`, e);
67
+ await new Promise((x) => setTimeout(x, timeout));
68
+ }
69
+ }
70
+ return await job();
71
+ }
72
+ var httpUtils;
73
+ ((httpUtils2) => {
74
+ function createRequest({ url, method = "get", headers = {} }) {
75
+ let resolve;
76
+ let reject;
77
+ const responseDataPromise = new Promise((a, b) => {
78
+ resolve = a;
79
+ reject = b;
80
+ });
81
+ const protocol = url.startsWith("https") ? https : http;
82
+ headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
83
+ const request = protocol.request(url, { method, headers }, (res) => {
84
+ const chunks = [];
85
+ res.on("data", (chunk) => chunks.push(chunk));
86
+ res.on("end", () => {
87
+ if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
88
+ resolve(Buffer.concat(chunks));
89
+ else
90
+ reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
91
+ });
92
+ res.on("error", (error) => reject(error));
93
+ });
94
+ request.on("error", reject);
95
+ return { request, responseDataPromise };
96
+ }
97
+ httpUtils2.createRequest = createRequest;
98
+ async function getBuffer(url, backoff) {
99
+ return await retryWithBackoff(async () => {
100
+ const { request, responseDataPromise } = createRequest({ url });
101
+ request.end();
102
+ return await responseDataPromise;
103
+ }, backoff);
104
+ }
105
+ httpUtils2.getBuffer = getBuffer;
106
+ async function getText(url, backoff) {
107
+ const buffer = await getBuffer(url, backoff);
108
+ return buffer.toString("utf-8");
109
+ }
110
+ httpUtils2.getText = getText;
111
+ async function getJSON(url) {
112
+ return JSON.parse(await getText(url));
113
+ }
114
+ httpUtils2.getJSON = getJSON;
115
+ async function postText(url, text, backoff) {
116
+ const headers = {
117
+ "Content-Type": "application/json",
118
+ "Content-Length": Buffer.byteLength(text) + ""
119
+ };
120
+ return await retryWithBackoff(async () => {
121
+ const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
122
+ request.write(text);
123
+ request.end();
124
+ return await responseDataPromise;
125
+ }, backoff);
126
+ }
127
+ httpUtils2.postText = postText;
128
+ async function postJSON(url, json, backoff) {
129
+ const buffer = await postText(url, JSON.stringify(json), backoff);
130
+ return JSON.parse(buffer.toString("utf-8"));
131
+ }
132
+ httpUtils2.postJSON = postJSON;
133
+ })(httpUtils || (httpUtils = {}));
134
+ var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
135
+ function stripAnsi(str) {
136
+ return str.replace(ansiRegex, "");
137
+ }
138
+ function shell(command, args, options) {
139
+ try {
140
+ const result = spawnSync(command, args, { encoding: "utf-8", ...options });
141
+ if (result.status !== 0) {
142
+ return void 0;
143
+ }
144
+ return result.stdout.trim();
145
+ } catch (e) {
146
+ console.error(e);
147
+ return void 0;
148
+ }
149
+ }
150
+ function readLinuxOSRelease() {
151
+ const osReleaseText = fs.readFileSync("/etc/os-release", "utf-8");
152
+ return new Map(osReleaseText.toLowerCase().split("\n").filter((line) => line.includes("=")).map((line) => {
153
+ line = line.trim();
154
+ let [key, value] = line.split("=");
155
+ if (value.startsWith('"') && value.endsWith('"'))
156
+ value = value.substring(1, value.length - 1);
157
+ return [key, value];
158
+ }));
159
+ }
160
+ function osLinuxInfo() {
161
+ const arch = shell(`uname`, [`-m`]);
162
+ const osReleaseMap = readLinuxOSRelease();
163
+ const name = osReleaseMap.get("name") ?? shell(`uname`);
164
+ const version = osReleaseMap.get("version_id");
165
+ return { name, arch, version };
166
+ }
167
+ function osDarwinInfo() {
168
+ const name = "macos";
169
+ const arch = shell(`uname`, [`-m`]);
170
+ const version = shell(`sw_vers`, [`-productVersion`]);
171
+ return { name, arch, version };
172
+ }
173
+ function osWinInfo() {
174
+ const name = "win";
175
+ const arch = process.arch;
176
+ const version = os.release();
177
+ return { name, arch, version };
178
+ }
179
+ function getOSInfo() {
180
+ if (process.platform === "darwin")
181
+ return osDarwinInfo();
182
+ if (process.platform === "win32")
183
+ return osWinInfo();
184
+ return osLinuxInfo();
185
+ }
186
+ function inferRunUrl() {
187
+ if (process.env.GITHUB_REPOSITORY && process.env.GITHUB_RUN_ID)
188
+ return `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}`;
189
+ return void 0;
190
+ }
191
+ function parseStringDate(dateString) {
192
+ return +new Date(dateString);
193
+ }
194
+ function gitCommitInfo(gitRepo) {
195
+ const sha = shell(`git`, ["rev-parse", "HEAD"], {
196
+ cwd: gitRepo,
197
+ encoding: "utf-8"
198
+ });
199
+ assert(sha, `FAILED: git rev-parse HEAD @ ${gitRepo}`);
200
+ return sha.trim();
201
+ }
202
+ function computeGitRoot(somePathInsideGitRepo) {
203
+ const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
204
+ cwd: somePathInsideGitRepo,
205
+ encoding: "utf-8"
206
+ });
207
+ assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
208
+ return normalizePath(root);
209
+ }
210
+ var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
211
+ var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
212
+ function normalizePath(aPath) {
213
+ if (IS_WIN32_PATH.test(aPath)) {
214
+ aPath = aPath.split(win32Path.sep).join(posixPath.sep);
215
+ }
216
+ if (IS_ALMOST_POSIX_PATH.test(aPath))
217
+ return "/" + aPath[0] + aPath.substring(2);
218
+ return aPath;
219
+ }
220
+ function gitFilePath(gitRoot, absolutePath) {
221
+ return posixPath.relative(gitRoot, absolutePath);
222
+ }
223
+ function parseDurationMS(value) {
224
+ if (isNaN(value))
225
+ throw new Error("Duration cannot be NaN");
226
+ if (value < 0)
227
+ throw new Error(`Duration cannot be less than 0, found ${value}`);
228
+ return value | 0;
229
+ }
230
+ function createEnvironments(projects) {
231
+ const envConfiguration = extractEnvConfiguration();
232
+ const osInfo = getOSInfo();
233
+ let uniqueNames = /* @__PURE__ */ new Set();
234
+ const result = /* @__PURE__ */ new Map();
235
+ for (const project of projects) {
236
+ let defaultName = project.name;
237
+ if (!defaultName.trim())
238
+ defaultName = "anonymous";
239
+ let name = defaultName;
240
+ for (let i = 2; uniqueNames.has(name); ++i)
241
+ name = `${defaultName}-${i}`;
242
+ uniqueNames.add(defaultName);
243
+ result.set(project, {
244
+ name,
245
+ systemData: {
246
+ osArch: osInfo.arch,
247
+ osName: osInfo.name,
248
+ osVersion: osInfo.version
249
+ },
250
+ userSuppliedData: {
251
+ ...envConfiguration,
252
+ ...project.metadata
253
+ },
254
+ opaqueData: {
255
+ project
256
+ }
257
+ });
258
+ }
259
+ return result;
260
+ }
261
+
262
+ // src/playwrightJSONReport.ts
263
+ var dlog = debug("flakiness:json-report");
264
+ var PlaywrightJSONReport;
265
+ ((PlaywrightJSONReport2) => {
266
+ function collectMetadata(somePathInsideProject = process.cwd()) {
267
+ const commitId = gitCommitInfo(somePathInsideProject);
268
+ const osInfo = getOSInfo();
269
+ const metadata = {
270
+ gitRoot: computeGitRoot(somePathInsideProject),
271
+ commitId,
272
+ osName: osInfo.name,
273
+ arch: osInfo.arch,
274
+ osVersion: osInfo.version,
275
+ runURL: inferRunUrl()
276
+ };
277
+ dlog(`metadata directory: ${somePathInsideProject}`);
278
+ dlog(`metadata: ${JSON.stringify(metadata)}`);
279
+ dlog(`commit info: ${JSON.stringify(commitId)}`);
280
+ dlog(`os info: ${JSON.stringify(osInfo)}`);
281
+ return metadata;
282
+ }
283
+ PlaywrightJSONReport2.collectMetadata = collectMetadata;
284
+ async function parse(metadata, jsonReport, options) {
285
+ const context = {
286
+ projectId2environmentIdx: /* @__PURE__ */ new Map(),
287
+ testBaseDir: normalizePath(jsonReport.config.rootDir),
288
+ gitRoot: metadata.gitRoot,
289
+ attachments: /* @__PURE__ */ new Map(),
290
+ unaccessibleAttachmentPaths: [],
291
+ extractAttachments: options.extractAttachments
292
+ };
293
+ const configPath = jsonReport.config.configFile ? gitFilePath(context.gitRoot, normalizePath(jsonReport.config.configFile)) : void 0;
294
+ const report = {
295
+ category: FK.CATEGORY_PLAYWRIGHT,
296
+ commitId: metadata.commitId,
297
+ configPath,
298
+ url: metadata.runURL,
299
+ environments: [],
300
+ suites: [],
301
+ opaqueData: jsonReport.config,
302
+ unattributedErrors: jsonReport.errors.map((error) => parseJSONError(context, error)),
303
+ // The report.stats is a releatively new addition to Playwright's JSONReport,
304
+ // so we have to polyfill with some reasonable values when it's missing.
305
+ duration: jsonReport.stats?.duration && jsonReport.stats?.duration > 0 ? parseDurationMS(jsonReport.stats.duration) : 0,
306
+ startTimestamp: jsonReport.stats && jsonReport.stats.startTime ? parseStringDate(jsonReport.stats.startTime) : Date.now()
307
+ };
308
+ report.environments = [...createEnvironments(jsonReport.config.projects).values()];
309
+ for (let envIdx = 0; envIdx < report.environments.length; ++envIdx)
310
+ context.projectId2environmentIdx.set(jsonReport.config.projects[envIdx].id, envIdx);
311
+ report.suites = await Promise.all(jsonReport.suites.map((suite) => parseJSONSuite(context, suite)));
312
+ return {
313
+ report: ReportUtils2.normalizeReport(report),
314
+ attachments: [...context.attachments.values()],
315
+ unaccessibleAttachmentPaths: context.unaccessibleAttachmentPaths
316
+ };
317
+ }
318
+ PlaywrightJSONReport2.parse = parse;
319
+ })(PlaywrightJSONReport || (PlaywrightJSONReport = {}));
320
+ async function parseJSONSuite(context, jsonSuite) {
321
+ let type = "suite";
322
+ if (jsonSuite.column === 0 && jsonSuite.line === 0)
323
+ type = "file";
324
+ else if (!jsonSuite.title)
325
+ type = "anonymous suite";
326
+ const suite = {
327
+ type,
328
+ title: jsonSuite.title,
329
+ location: {
330
+ file: gitFilePath(context.gitRoot, normalizePath(jsonSuite.file)),
331
+ line: jsonSuite.line,
332
+ column: jsonSuite.column
333
+ }
334
+ };
335
+ if (jsonSuite.suites && jsonSuite.suites.length)
336
+ suite.suites = await Promise.all(jsonSuite.suites.map((suite2) => parseJSONSuite(context, suite2)));
337
+ if (jsonSuite.specs && jsonSuite.specs.length)
338
+ suite.tests = await Promise.all(jsonSuite.specs.map((spec) => parseJSONSpec(context, spec)));
339
+ return suite;
340
+ }
341
+ async function parseJSONSpec(context, jsonSpec) {
342
+ const test = {
343
+ title: jsonSpec.title,
344
+ tags: jsonSpec.tags,
345
+ location: {
346
+ file: gitFilePath(context.gitRoot, normalizePath(posixPath2.join(context.testBaseDir, normalizePath(jsonSpec.file)))),
347
+ line: jsonSpec.line,
348
+ column: jsonSpec.column
349
+ },
350
+ attempts: []
351
+ };
352
+ for (const jsonTest of jsonSpec.tests) {
353
+ const environmentIdx = context.projectId2environmentIdx.get(jsonTest.projectId);
354
+ if (environmentIdx === void 0)
355
+ throw new Error("Inconsistent report - no project for a test found!");
356
+ const testResults = jsonTest.results.filter((result) => result.status !== void 0);
357
+ if (!testResults.length)
358
+ continue;
359
+ test.attempts.push(...await Promise.all(testResults.map((jsonTestResult) => parseJSONTestResult(context, jsonTest, environmentIdx, jsonTestResult))));
360
+ }
361
+ return test;
362
+ }
363
+ function createLocation(context, location) {
364
+ return {
365
+ file: gitFilePath(context.gitRoot, normalizePath(location.file)),
366
+ line: location.line,
367
+ column: location.column
368
+ };
369
+ }
370
+ async function parseJSONTestResult(context, jsonTest, environmentIdx, jsonTestResult) {
371
+ const attachments = [];
372
+ const attempt = {
373
+ timeout: parseDurationMS(jsonTest.timeout),
374
+ annotations: jsonTest.annotations.map((annotation) => ({
375
+ type: annotation.type,
376
+ description: annotation.description,
377
+ location: annotation.location ? createLocation(context, annotation.location) : void 0
378
+ })),
379
+ environmentIdx,
380
+ expectedStatus: jsonTest.expectedStatus,
381
+ parallelIndex: jsonTestResult.parallelIndex,
382
+ status: jsonTestResult.status,
383
+ errors: jsonTestResult.errors && jsonTestResult.errors.length ? jsonTestResult.errors.map((error) => parseJSONError(context, error)) : void 0,
384
+ stdout: jsonTestResult.stdout && jsonTestResult.stdout.length ? jsonTestResult.stdout : void 0,
385
+ stderr: jsonTestResult.stderr && jsonTestResult.stderr.length ? jsonTestResult.stderr : void 0,
386
+ steps: jsonTestResult.steps ? jsonTestResult.steps.map((jsonTestStep) => parseJSONTestStep(context, jsonTestStep)) : void 0,
387
+ startTimestamp: parseStringDate(jsonTestResult.startTime),
388
+ duration: jsonTestResult.duration && jsonTestResult.duration > 0 ? parseDurationMS(jsonTestResult.duration) : 0,
389
+ attachments
390
+ };
391
+ if (context.extractAttachments) {
392
+ await Promise.all((jsonTestResult.attachments ?? []).map(async (jsonAttachment) => {
393
+ if (jsonAttachment.path && !await existsAsync(jsonAttachment.path)) {
394
+ context.unaccessibleAttachmentPaths.push(jsonAttachment.path);
395
+ return;
396
+ }
397
+ const id = jsonAttachment.path ? await sha1File(jsonAttachment.path) : sha1Buffer(jsonAttachment.body ?? "");
398
+ context.attachments.set(id, {
399
+ contentType: jsonAttachment.contentType,
400
+ id,
401
+ body: jsonAttachment.body ? Buffer.from(jsonAttachment.body) : void 0,
402
+ path: jsonAttachment.path
403
+ });
404
+ attachments.push({
405
+ id,
406
+ name: jsonAttachment.name,
407
+ contentType: jsonAttachment.contentType
408
+ });
409
+ }));
410
+ }
411
+ return attempt;
412
+ }
413
+ function parseJSONTestStep(context, jsonStep) {
414
+ const step = {
415
+ // NOTE: jsonStep.duration was -1 in some playwright versions
416
+ duration: parseDurationMS(Math.max(jsonStep.duration, 0)),
417
+ title: jsonStep.title
418
+ };
419
+ if (jsonStep.error)
420
+ step.error = parseJSONError(context, jsonStep.error);
421
+ if (jsonStep.steps)
422
+ step.steps = jsonStep.steps.map((childJSONStep) => parseJSONTestStep(context, childJSONStep));
423
+ return step;
424
+ }
425
+ function parseJSONError(context, error) {
426
+ return {
427
+ location: error.location ? createLocation(context, error.location) : void 0,
428
+ message: error.message ? stripAnsi(error.message).split("\n")[0] : void 0,
429
+ stack: error.stack,
430
+ value: error.value
431
+ };
432
+ }
433
+
434
+ // src/cli/cmd-upload-playwright-json.ts
435
+ async function cmdUploadPlaywrightJson(relativePath, options) {
436
+ const fullPath = path2.resolve(relativePath);
437
+ if (!await fs2.access(fullPath, fs2.constants.F_OK).then(() => true).catch(() => false)) {
438
+ console.error(`Error: path ${fullPath} is not accessible`);
439
+ process.exit(1);
440
+ }
441
+ const text = await fs2.readFile(fullPath, "utf-8");
442
+ const playwrightJson = JSON.parse(text);
443
+ const { attachments, report, unaccessibleAttachmentPaths } = await PlaywrightJSONReport.parse(PlaywrightJSONReport.collectMetadata(), playwrightJson, {
444
+ extractAttachments: true
445
+ });
446
+ for (const unaccessibleAttachment of unaccessibleAttachmentPaths)
447
+ console.warn(`WARN: cannot access attachment ${unaccessibleAttachment}`);
448
+ const uploader = new ReportUploader({
449
+ flakinessAccessToken: options.accessToken,
450
+ flakinessEndpoint: options.endpoint
451
+ });
452
+ const upload = uploader.createUpload(report, attachments);
453
+ const uploadResult = await upload.upload();
454
+ if (!uploadResult.success) {
455
+ console.log(`[flakiness.io] X Failed to upload to ${options.endpoint}: ${uploadResult.message}`);
456
+ } else {
457
+ console.log(`[flakiness.io] \u2713 Report uploaded ${uploadResult.reportUrl ?? uploadResult.message ?? ""}`);
458
+ }
459
+ }
460
+ export {
461
+ cmdUploadPlaywrightJson
462
+ };
463
+ //# sourceMappingURL=cmd-upload-playwright-json.js.map
@@ -0,0 +1,169 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/cli/cmd-upload.ts
4
+ import { ReportUploader } from "@flakiness/sdk";
5
+ import chalk from "chalk";
6
+ import fs2 from "fs/promises";
7
+ import path2 from "path";
8
+
9
+ // src/utils.ts
10
+ import { ReportUtils } from "@flakiness/sdk";
11
+ import fs from "fs";
12
+ import http from "http";
13
+ import https from "https";
14
+ import path, { posix as posixPath, win32 as win32Path } from "path";
15
+ var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
16
+ function errorText(error) {
17
+ return FLAKINESS_DBG ? error.stack : error.message;
18
+ }
19
+ async function retryWithBackoff(job, backoff = []) {
20
+ for (const timeout of backoff) {
21
+ try {
22
+ return await job();
23
+ } catch (e) {
24
+ if (e instanceof AggregateError)
25
+ console.error(`[flakiness.io err]`, errorText(e.errors[0]));
26
+ else if (e instanceof Error)
27
+ console.error(`[flakiness.io err]`, errorText(e));
28
+ else
29
+ console.error(`[flakiness.io err]`, e);
30
+ await new Promise((x) => setTimeout(x, timeout));
31
+ }
32
+ }
33
+ return await job();
34
+ }
35
+ var httpUtils;
36
+ ((httpUtils2) => {
37
+ function createRequest({ url, method = "get", headers = {} }) {
38
+ let resolve;
39
+ let reject;
40
+ const responseDataPromise = new Promise((a, b) => {
41
+ resolve = a;
42
+ reject = b;
43
+ });
44
+ const protocol = url.startsWith("https") ? https : http;
45
+ headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
46
+ const request = protocol.request(url, { method, headers }, (res) => {
47
+ const chunks = [];
48
+ res.on("data", (chunk) => chunks.push(chunk));
49
+ res.on("end", () => {
50
+ if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
51
+ resolve(Buffer.concat(chunks));
52
+ else
53
+ reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
54
+ });
55
+ res.on("error", (error) => reject(error));
56
+ });
57
+ request.on("error", reject);
58
+ return { request, responseDataPromise };
59
+ }
60
+ httpUtils2.createRequest = createRequest;
61
+ async function getBuffer(url, backoff) {
62
+ return await retryWithBackoff(async () => {
63
+ const { request, responseDataPromise } = createRequest({ url });
64
+ request.end();
65
+ return await responseDataPromise;
66
+ }, backoff);
67
+ }
68
+ httpUtils2.getBuffer = getBuffer;
69
+ async function getText(url, backoff) {
70
+ const buffer = await getBuffer(url, backoff);
71
+ return buffer.toString("utf-8");
72
+ }
73
+ httpUtils2.getText = getText;
74
+ async function getJSON(url) {
75
+ return JSON.parse(await getText(url));
76
+ }
77
+ httpUtils2.getJSON = getJSON;
78
+ async function postText(url, text, backoff) {
79
+ const headers = {
80
+ "Content-Type": "application/json",
81
+ "Content-Length": Buffer.byteLength(text) + ""
82
+ };
83
+ return await retryWithBackoff(async () => {
84
+ const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
85
+ request.write(text);
86
+ request.end();
87
+ return await responseDataPromise;
88
+ }, backoff);
89
+ }
90
+ httpUtils2.postText = postText;
91
+ async function postJSON(url, json, backoff) {
92
+ const buffer = await postText(url, JSON.stringify(json), backoff);
93
+ return JSON.parse(buffer.toString("utf-8"));
94
+ }
95
+ httpUtils2.postJSON = postJSON;
96
+ })(httpUtils || (httpUtils = {}));
97
+ var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
98
+ async function resolveAttachmentPaths(report, attachmentsDir) {
99
+ const attachmentFiles = await listFilesRecursively(attachmentsDir);
100
+ const filenameToPath = new Map(attachmentFiles.map((file) => [path.basename(file), file]));
101
+ const attachmentIdToPath = /* @__PURE__ */ new Map();
102
+ const missingAttachments = /* @__PURE__ */ new Set();
103
+ ReportUtils.visitTests(report, (test) => {
104
+ for (const attempt of test.attempts) {
105
+ for (const attachment of attempt.attachments ?? []) {
106
+ const attachmentPath = filenameToPath.get(attachment.id);
107
+ if (!attachmentPath) {
108
+ missingAttachments.add(attachment.id);
109
+ } else {
110
+ attachmentIdToPath.set(attachment.id, {
111
+ contentType: attachment.contentType,
112
+ id: attachment.id,
113
+ path: attachmentPath
114
+ });
115
+ }
116
+ }
117
+ }
118
+ });
119
+ return { attachmentIdToPath, missingAttachments: Array.from(missingAttachments) };
120
+ }
121
+ async function listFilesRecursively(dir, result = []) {
122
+ const entries = await fs.promises.readdir(dir, { withFileTypes: true });
123
+ for (const entry of entries) {
124
+ const fullPath = path.join(dir, entry.name);
125
+ if (entry.isDirectory())
126
+ await listFilesRecursively(fullPath, result);
127
+ else
128
+ result.push(fullPath);
129
+ }
130
+ return result;
131
+ }
132
+ var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
133
+ var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
134
+
135
+ // src/cli/cmd-upload.ts
136
+ var warn = (txt) => console.warn(chalk.yellow(`[flakiness.io] WARN: ${txt}`));
137
+ var err = (txt) => console.error(chalk.red(`[flakiness.io] Error: ${txt}`));
138
+ var log = (txt) => console.log(`[flakiness.io] ${txt}`);
139
+ async function cmdUpload(relativePaths, options) {
140
+ const uploader = new ReportUploader({
141
+ flakinessAccessToken: options.accessToken,
142
+ flakinessEndpoint: options.endpoint
143
+ });
144
+ for (const relativePath of relativePaths) {
145
+ const fullPath = path2.resolve(relativePath);
146
+ if (!await fs2.access(fullPath, fs2.constants.F_OK).then(() => true).catch(() => false)) {
147
+ err(`Path ${fullPath} is not accessible!`);
148
+ process.exit(1);
149
+ }
150
+ const text = await fs2.readFile(fullPath, "utf-8");
151
+ const report = JSON.parse(text);
152
+ const attachmentsDir = options.attachmentsDir ?? path2.dirname(fullPath);
153
+ const { attachmentIdToPath, missingAttachments } = await resolveAttachmentPaths(report, attachmentsDir);
154
+ if (missingAttachments.length) {
155
+ warn(`Missing ${missingAttachments.length} attachments`);
156
+ }
157
+ const upload = uploader.createUpload(report, Array.from(attachmentIdToPath.values()));
158
+ const uploadResult = await upload.upload();
159
+ if (!uploadResult.success) {
160
+ err(`Failed to upload to ${options.endpoint}: ${uploadResult.message}`);
161
+ } else {
162
+ log(`\u2713 Uploaded ${uploadResult.reportUrl ?? uploadResult.message ?? ""}`);
163
+ }
164
+ }
165
+ }
166
+ export {
167
+ cmdUpload
168
+ };
169
+ //# sourceMappingURL=cmd-upload.js.map