@flakiness/report 0.92.0 → 0.95.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,115 +0,0 @@
1
- import fs from "fs";
2
- import { URL } from "url";
3
- import { brotliCompressSync } from "zlib";
4
- import { brotliCompressAsync, httpUtils, retryWithBackoff } from "./utils.js";
5
- class ReportUploader {
6
- static optionsFromEnv(overrides) {
7
- const flakinessAccessToken = overrides?.flakinessAccessToken ?? process.env["FLAKINESS_ACCESS_TOKEN"];
8
- if (!flakinessAccessToken)
9
- return void 0;
10
- const flakinessEndpoint = overrides?.flakinessEndpoint ?? process.env["FLAKINESS_ENDPOINT"] ?? "https://flakiness.io";
11
- return { flakinessAccessToken, flakinessEndpoint };
12
- }
13
- static async upload(options) {
14
- const uploaderOptions = ReportUploader.optionsFromEnv(options);
15
- if (!uploaderOptions) {
16
- options.log?.(`[flakiness.io] Uploading skipped since no FLAKINESS_ACCESS_TOKEN is specified`);
17
- return void 0;
18
- }
19
- const uploader = new ReportUploader(uploaderOptions);
20
- const upload = uploader.createUpload(options.report, options.attachments);
21
- const uploadResult = await upload.upload();
22
- if (!uploadResult.success) {
23
- options.log?.(`[flakiness.io] X Failed to upload to ${uploaderOptions.flakinessEndpoint}: ${uploadResult.message}`);
24
- return { errorMessage: uploadResult.message };
25
- }
26
- options.log?.(`[flakiness.io] \u2713 Report uploaded ${uploadResult.message ?? ""}`);
27
- if (uploadResult.reportUrl)
28
- options.log?.(`[flakiness.io] ${uploadResult.reportUrl}`);
29
- }
30
- _options;
31
- constructor(options) {
32
- this._options = options;
33
- }
34
- createUpload(report, attachments) {
35
- const upload = new ReportUpload(this._options, report, attachments);
36
- return upload;
37
- }
38
- }
39
- const HTTP_BACKOFF = [100, 500, 1e3, 1e3, 1e3, 1e3];
40
- class ReportUpload {
41
- _report;
42
- _attachments;
43
- _options;
44
- constructor(options, report, attachments) {
45
- this._options = options;
46
- this._report = report;
47
- this._attachments = attachments;
48
- }
49
- async upload(options) {
50
- const response = await httpUtils.postJSON(new URL("/api/run/startUpload", this._options.flakinessEndpoint).toString(), {
51
- flakinessAccessToken: this._options.flakinessAccessToken,
52
- attachmentIds: this._attachments.map((attachment) => attachment.id)
53
- }, HTTP_BACKOFF).then((result) => ({ result, error: void 0 })).catch((e) => ({ error: e, result: void 0 }));
54
- if (response?.error)
55
- return { success: false, message: `flakiness.io returned error: ${response.error.message}` };
56
- await Promise.all([
57
- this._uploadReport(JSON.stringify(this._report), response.result.report_upload_url, options?.syncCompression ?? false),
58
- ...this._attachments.map((attachment) => {
59
- const uploadURL = response.result.attachment_upload_urls[attachment.id];
60
- if (!uploadURL)
61
- throw new Error("Internal error: missing upload URL for attachment!");
62
- return this._uploadAttachment(attachment, uploadURL);
63
- })
64
- ]);
65
- const response2 = await httpUtils.postJSON(new URL("/api/run/completeUpload", this._options.flakinessEndpoint).toString(), {
66
- upload_token: response.result.upload_token
67
- }, HTTP_BACKOFF).then((result) => ({ result, error: void 0 })).catch((e) => ({ error: e, result: void 0 }));
68
- const url = response2?.result.report_url ? new URL(response2?.result.report_url, this._options.flakinessEndpoint).toString() : void 0;
69
- return { success: true, reportUrl: url };
70
- }
71
- async _uploadReport(data, uploadUrl, syncCompression) {
72
- const compressed = syncCompression ? brotliCompressSync(data) : await brotliCompressAsync(data);
73
- const headers = {
74
- "Content-Type": "application/json",
75
- "Content-Length": Buffer.byteLength(compressed) + "",
76
- "Content-Encoding": "br"
77
- };
78
- await retryWithBackoff(async () => {
79
- const { request, responseDataPromise } = httpUtils.createRequest({
80
- url: uploadUrl,
81
- headers,
82
- method: "put"
83
- });
84
- request.write(compressed);
85
- request.end();
86
- await responseDataPromise;
87
- }, HTTP_BACKOFF);
88
- }
89
- async _uploadAttachment(attachment, uploadUrl) {
90
- const bytesLength = attachment.path ? (await fs.promises.stat(attachment.path)).size : attachment.body ? Buffer.byteLength(attachment.body) : 0;
91
- const headers = {
92
- "Content-Type": attachment.contentType,
93
- "Content-Length": bytesLength + ""
94
- };
95
- await retryWithBackoff(async () => {
96
- const { request, responseDataPromise } = httpUtils.createRequest({
97
- url: uploadUrl,
98
- headers,
99
- method: "put"
100
- });
101
- if (attachment.path) {
102
- fs.createReadStream(attachment.path).pipe(request);
103
- } else {
104
- if (attachment.body)
105
- request.write(attachment.body);
106
- request.end();
107
- }
108
- await responseDataPromise;
109
- }, HTTP_BACKOFF);
110
- }
111
- }
112
- export {
113
- ReportUploader
114
- };
115
- //# sourceMappingURL=reportUploader.js.map
@@ -1,70 +0,0 @@
1
- import { spawnSync } from "child_process";
2
- import os from "os";
3
- function getAvailableMemMacOS() {
4
- const lines = spawnSync("vm_stat", { encoding: "utf8" }).stdout.trim().split("\n");
5
- const pageSize = parseInt(lines[0].match(/page size of (\d+) bytes/)[1], 10);
6
- if (isNaN(pageSize)) {
7
- console.warn("[flakiness.io] Error detecting macos page size");
8
- return 0;
9
- }
10
- let totalFree = 0;
11
- for (const line of lines) {
12
- if (/Pages (free|inactive|speculative):/.test(line)) {
13
- const match = line.match(/\d+/);
14
- if (match)
15
- totalFree += parseInt(match[0], 10);
16
- }
17
- }
18
- return totalFree * pageSize;
19
- }
20
- function getSystemUtilization() {
21
- let idleTicks = 0;
22
- let totalTicks = 0;
23
- for (const cpu of os.cpus()) {
24
- totalTicks += cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.irq + cpu.times.idle;
25
- idleTicks += cpu.times.idle;
26
- }
27
- return {
28
- idleTicks,
29
- totalTicks,
30
- timestamp: Date.now(),
31
- freeBytes: os.platform() === "darwin" ? getAvailableMemMacOS() : os.freemem()
32
- };
33
- }
34
- function toFKUtilization(sample, previous) {
35
- const idleTicks = sample.idleTicks - previous.idleTicks;
36
- const totalTicks = sample.totalTicks - previous.totalTicks;
37
- const cpuUtilization = Math.floor((1 - idleTicks / totalTicks) * 1e4) / 100;
38
- const memoryUtilization = Math.floor((1 - sample.freeBytes / os.totalmem()) * 1e4) / 100;
39
- return {
40
- cpuUtilization,
41
- memoryUtilization,
42
- dts: sample.timestamp - previous.timestamp
43
- };
44
- }
45
- class SystemUtilizationSampler {
46
- result;
47
- _lastSample = getSystemUtilization();
48
- _timer;
49
- constructor() {
50
- this.result = {
51
- samples: [],
52
- startTimestamp: this._lastSample.timestamp,
53
- totalMemoryBytes: os.totalmem()
54
- };
55
- this._timer = setTimeout(this._addSample.bind(this), 50);
56
- }
57
- _addSample() {
58
- const sample = getSystemUtilization();
59
- this.result.samples.push(toFKUtilization(sample, this._lastSample));
60
- this._lastSample = sample;
61
- this._timer = setTimeout(this._addSample.bind(this), 1e3);
62
- }
63
- dispose() {
64
- clearTimeout(this._timer);
65
- }
66
- }
67
- export {
68
- SystemUtilizationSampler
69
- };
70
- //# sourceMappingURL=systemUtilizationSampler.js.map
package/lib/utils.js DELETED
@@ -1,322 +0,0 @@
1
- import assert from "assert";
2
- import { spawnSync } from "child_process";
3
- import crypto from "crypto";
4
- import fs from "fs";
5
- import http from "http";
6
- import https from "https";
7
- import os from "os";
8
- import { posix as posixPath, win32 as win32Path } from "path";
9
- import util from "util";
10
- import zlib from "zlib";
11
- const gzipAsync = util.promisify(zlib.gzip);
12
- const gunzipAsync = util.promisify(zlib.gunzip);
13
- const gunzipSync = zlib.gunzipSync;
14
- const brotliCompressAsync = util.promisify(zlib.brotliCompress);
15
- const brotliCompressSync = zlib.brotliCompressSync;
16
- async function existsAsync(aPath) {
17
- return fs.promises.stat(aPath).then(() => true).catch((e) => false);
18
- }
19
- function extractEnvConfiguration() {
20
- const ENV_PREFIX = "FK_ENV_";
21
- return Object.fromEntries(
22
- Object.entries(process.env).filter(([key]) => key.toUpperCase().startsWith(ENV_PREFIX.toUpperCase())).map(([key, value]) => [key.substring(ENV_PREFIX.length).toLowerCase(), (value ?? "").trim().toLowerCase()])
23
- );
24
- }
25
- function sha1File(filePath) {
26
- return new Promise((resolve, reject) => {
27
- const hash = crypto.createHash("sha1");
28
- const stream = fs.createReadStream(filePath);
29
- stream.on("data", (chunk) => {
30
- hash.update(chunk);
31
- });
32
- stream.on("end", () => {
33
- resolve(hash.digest("hex"));
34
- });
35
- stream.on("error", (err) => {
36
- reject(err);
37
- });
38
- });
39
- }
40
- function sha1Buffer(data) {
41
- const hash = crypto.createHash("sha1");
42
- hash.update(data);
43
- return hash.digest("hex");
44
- }
45
- async function retryWithBackoff(job, backoff = []) {
46
- for (const timeout of backoff) {
47
- try {
48
- return await job();
49
- } catch (e) {
50
- if (e instanceof AggregateError)
51
- console.error(`[flakiness.io err]`, e.errors[0].message);
52
- else if (e instanceof Error)
53
- console.error(`[flakiness.io err]`, e.message);
54
- else
55
- console.error(`[flakiness.io err]`, e);
56
- await new Promise((x) => setTimeout(x, timeout));
57
- }
58
- }
59
- return await job();
60
- }
61
- var httpUtils;
62
- ((httpUtils2) => {
63
- function createRequest({ url, method = "get", headers = {} }) {
64
- let resolve;
65
- let reject;
66
- const responseDataPromise = new Promise((a, b) => {
67
- resolve = a;
68
- reject = b;
69
- });
70
- const protocol = url.startsWith("https") ? https : http;
71
- const request = protocol.request(url, { method, headers }, (res) => {
72
- const chunks = [];
73
- res.on("data", (chunk) => chunks.push(chunk));
74
- res.on("end", () => {
75
- if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
76
- resolve(Buffer.concat(chunks));
77
- else
78
- reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
79
- });
80
- res.on("error", (error) => reject(error));
81
- });
82
- request.on("error", reject);
83
- return { request, responseDataPromise };
84
- }
85
- httpUtils2.createRequest = createRequest;
86
- async function getBuffer(url, backoff) {
87
- return await retryWithBackoff(async () => {
88
- const { request, responseDataPromise } = createRequest({ url });
89
- request.end();
90
- return await responseDataPromise;
91
- }, backoff);
92
- }
93
- httpUtils2.getBuffer = getBuffer;
94
- async function getText(url, backoff) {
95
- const buffer = await getBuffer(url, backoff);
96
- return buffer.toString("utf-8");
97
- }
98
- httpUtils2.getText = getText;
99
- async function getJSON(url) {
100
- return JSON.parse(await getText(url));
101
- }
102
- httpUtils2.getJSON = getJSON;
103
- async function postText(url, text, backoff) {
104
- const headers = {
105
- "Content-Type": "application/json",
106
- "Content-Length": Buffer.byteLength(text) + ""
107
- };
108
- return await retryWithBackoff(async () => {
109
- const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
110
- request.write(text);
111
- request.end();
112
- return await responseDataPromise;
113
- }, backoff);
114
- }
115
- httpUtils2.postText = postText;
116
- async function postJSON(url, json, backoff) {
117
- const buffer = await postText(url, JSON.stringify(json), backoff);
118
- return JSON.parse(buffer.toString("utf-8"));
119
- }
120
- httpUtils2.postJSON = postJSON;
121
- })(httpUtils || (httpUtils = {}));
122
- const ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
123
- function stripAnsi(str) {
124
- return str.replace(ansiRegex, "");
125
- }
126
- function shell(command, args, options) {
127
- try {
128
- const result = spawnSync(command, args, { encoding: "utf-8", ...options });
129
- if (result.status !== 0) {
130
- console.log(result);
131
- console.log(options);
132
- return void 0;
133
- }
134
- return result.stdout.trim();
135
- } catch (e) {
136
- console.log(e);
137
- return void 0;
138
- }
139
- }
140
- function readLinuxOSRelease() {
141
- const osReleaseText = fs.readFileSync("/etc/os-release", "utf-8");
142
- return new Map(osReleaseText.toLowerCase().split("\n").filter((line) => line.includes("=")).map((line) => {
143
- line = line.trim();
144
- let [key, value] = line.split("=");
145
- if (value.startsWith('"') && value.endsWith('"'))
146
- value = value.substring(1, value.length - 1);
147
- return [key, value];
148
- }));
149
- }
150
- function osLinuxInfo() {
151
- const arch = shell(`uname`, [`-m`]);
152
- const osReleaseMap = readLinuxOSRelease();
153
- const name = osReleaseMap.get("name") ?? shell(`uname`);
154
- const version = osReleaseMap.get("version_id");
155
- return { name, arch, version };
156
- }
157
- function osDarwinInfo() {
158
- const name = "macos";
159
- const arch = shell(`uname`, [`-m`]);
160
- const version = shell(`sw_vers`, [`-productVersion`]);
161
- return { name, arch, version };
162
- }
163
- function osWinInfo() {
164
- const name = "win";
165
- const arch = process.arch;
166
- const version = os.release();
167
- return { name, arch, version };
168
- }
169
- function getOSInfo() {
170
- if (process.platform === "darwin")
171
- return osDarwinInfo();
172
- if (process.platform === "win32")
173
- return osWinInfo();
174
- return osLinuxInfo();
175
- }
176
- function inferRunUrl() {
177
- if (process.env.GITHUB_REPOSITORY && process.env.GITHUB_RUN_ID)
178
- return `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}`;
179
- return void 0;
180
- }
181
- function parseStringDate(dateString) {
182
- return +new Date(dateString);
183
- }
184
- function gitCommitInfo(gitRepo) {
185
- const sha = shell(`git`, ["rev-parse", "HEAD"], {
186
- cwd: gitRepo,
187
- encoding: "utf-8"
188
- });
189
- assert(sha, `FAILED: git rev-parse HEAD @ ${gitRepo}`);
190
- return sha.trim();
191
- }
192
- function computeGitRoot(somePathInsideGitRepo) {
193
- const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
194
- cwd: somePathInsideGitRepo,
195
- encoding: "utf-8"
196
- });
197
- assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
198
- return normalizePath(root);
199
- }
200
- const IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
201
- const IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
202
- function normalizePath(aPath) {
203
- if (IS_WIN32_PATH.test(aPath)) {
204
- aPath = aPath.split(win32Path.sep).join(posixPath.sep);
205
- }
206
- if (IS_ALMOST_POSIX_PATH.test(aPath))
207
- return "/" + aPath[0] + aPath.substring(2);
208
- return aPath;
209
- }
210
- function getCallerLocation(gitRoot, offset = 0) {
211
- const err = new Error();
212
- const stack = err.stack?.split("\n");
213
- const caller = stack?.[2 + offset]?.trim();
214
- const match = caller?.match(/\((.*):(\d+):(\d+)\)$/);
215
- if (!match)
216
- return void 0;
217
- const [, filePath, line, column] = match;
218
- return {
219
- file: gitFilePath(gitRoot, normalizePath(filePath)),
220
- line: Number(line),
221
- column: Number(column)
222
- };
223
- }
224
- function parseStackLocations() {
225
- const err = new Error();
226
- const stack = err.stack?.split("\n").slice(2);
227
- if (!stack)
228
- return [];
229
- const result = [];
230
- for (const caller of stack) {
231
- const match = caller.trim().match(/\((.*):(\d+):(\d+)\)$/);
232
- if (!match)
233
- continue;
234
- const [, file, line, column] = match;
235
- result.push({ file, line, column });
236
- }
237
- return result;
238
- }
239
- function gitFilePath(gitRoot, absolutePath) {
240
- return posixPath.relative(gitRoot, absolutePath);
241
- }
242
- function parseDurationMS(value) {
243
- if (isNaN(value))
244
- throw new Error("Duration cannot be NaN");
245
- if (value < 0)
246
- throw new Error(`Duration cannot be less than 0, found ${value}`);
247
- return value | 0;
248
- }
249
- function createEnvironment(options) {
250
- const osInfo = getOSInfo();
251
- return {
252
- name: options.name,
253
- systemData: {
254
- osArch: osInfo.arch,
255
- osName: osInfo.name,
256
- osVersion: osInfo.version
257
- },
258
- userSuppliedData: {
259
- ...extractEnvConfiguration(),
260
- ...options.userSuppliedData ?? {}
261
- }
262
- };
263
- }
264
- function createEnvironments(projects) {
265
- const envConfiguration = extractEnvConfiguration();
266
- const osInfo = getOSInfo();
267
- let uniqueNames = /* @__PURE__ */ new Set();
268
- const result = /* @__PURE__ */ new Map();
269
- for (const project of projects) {
270
- let defaultName = project.name;
271
- if (!defaultName.trim())
272
- defaultName = "anonymous";
273
- let name = defaultName;
274
- for (let i = 2; uniqueNames.has(name); ++i)
275
- name = `${defaultName}-${i}`;
276
- uniqueNames.add(defaultName);
277
- result.set(project, {
278
- name,
279
- systemData: {
280
- osArch: osInfo.arch,
281
- osName: osInfo.name,
282
- osVersion: osInfo.version
283
- },
284
- userSuppliedData: {
285
- ...envConfiguration,
286
- ...project.metadata
287
- },
288
- opaqueData: {
289
- project
290
- }
291
- });
292
- }
293
- return result;
294
- }
295
- export {
296
- brotliCompressAsync,
297
- brotliCompressSync,
298
- computeGitRoot,
299
- createEnvironment,
300
- createEnvironments,
301
- existsAsync,
302
- extractEnvConfiguration,
303
- getCallerLocation,
304
- getOSInfo,
305
- gitCommitInfo,
306
- gitFilePath,
307
- gunzipAsync,
308
- gunzipSync,
309
- gzipAsync,
310
- httpUtils,
311
- inferRunUrl,
312
- normalizePath,
313
- parseDurationMS,
314
- parseStackLocations,
315
- parseStringDate,
316
- retryWithBackoff,
317
- sha1Buffer,
318
- sha1File,
319
- shell,
320
- stripAnsi
321
- };
322
- //# sourceMappingURL=utils.js.map
File without changes