@mablhq/mabl-cli 1.55.2 → 1.56.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/api/featureSet.js +4 -0
- package/api/mablApiClient.js +33 -42
- package/browserLauncher/playwrightBrowserLauncher/chromium/chromiumPageDelegate.js +6 -0
- package/browserLauncher/playwrightBrowserLauncher/nonChromium/nonChromiumAbstractPageDelegate.js +3 -0
- package/browserLauncher/playwrightBrowserLauncher/playwrightFrame.js +16 -9
- package/browserLauncher/playwrightBrowserLauncher/playwrightPage.js +3 -0
- package/browserTestMonitoring/cloudMonitoringPerformanceMetrics.js +277 -0
- package/browserTestMonitoring/distributions.js +44 -0
- package/browserTestMonitoring/metricsRecorder.js +112 -0
- package/browserTestMonitoring/types.js +8 -0
- package/coreWebVitals/index.js +0 -8
- package/domUtil/index.js +1 -1
- package/execution/index.js +1 -1
- package/execution/index.js.LICENSE.txt +27 -1
- package/mablApi/index.js +1 -1
- package/mablscriptFind/index.js +1 -1
- package/mablscriptFind/index.js.LICENSE.txt +8 -0
- package/package.json +11 -5
- package/proxy/index.js +1 -1
- package/resources/coreWebVitals.js +1 -1
- package/resources/mablFind.js +1 -1
- package/util/CloudStorageWriter.js +45 -0
- package/util/IdentifierUtil.js +57 -0
- package/util/TestOutputWriter.js +67 -0
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const storage_1 = require("@google-cloud/storage");
|
|
4
|
+
const IdentifierUtil_1 = require("./IdentifierUtil");
|
|
5
|
+
const loggingProvider_1 = require("../providers/logging/loggingProvider");
|
|
6
|
+
const MAX_PUT_ATTEMPTS = 3;
|
|
7
|
+
const AUTO_COMPRESS_MIME_TYPES = new Set([
|
|
8
|
+
'application/json',
|
|
9
|
+
'text/plain',
|
|
10
|
+
]);
|
|
11
|
+
class CloudStorageWriter {
|
|
12
|
+
constructor(projectId, workspaceId) {
|
|
13
|
+
this.bucket = new storage_1.Storage().bucket(`${projectId}-workspace-${(0, IdentifierUtil_1.toCloudSafeId)(workspaceId)}`);
|
|
14
|
+
}
|
|
15
|
+
bucketName() {
|
|
16
|
+
return this.bucket.name;
|
|
17
|
+
}
|
|
18
|
+
async write(path, contentType, data) {
|
|
19
|
+
let attempt = 0;
|
|
20
|
+
while (attempt++ < MAX_PUT_ATTEMPTS) {
|
|
21
|
+
try {
|
|
22
|
+
return await this.writeOnce(path, contentType, data);
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
loggingProvider_1.logger.info(`Error writing file to GCS at ${path} (attempt ${attempt} of ${MAX_PUT_ATTEMPTS})`, error);
|
|
26
|
+
if (attempt === MAX_PUT_ATTEMPTS) {
|
|
27
|
+
throw error;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
writeOnce(path, contentType, data) {
|
|
33
|
+
return this.bucket.file(path).save(data, {
|
|
34
|
+
contentType,
|
|
35
|
+
metadata: {
|
|
36
|
+
'Content-Type': getNowIsoTimestamp(),
|
|
37
|
+
},
|
|
38
|
+
gzip: AUTO_COMPRESS_MIME_TYPES.has(contentType.toLowerCase()),
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
exports.default = CloudStorageWriter;
|
|
43
|
+
function getNowIsoTimestamp() {
|
|
44
|
+
return new Date().toISOString();
|
|
45
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.toCloudSafeId = void 0;
|
|
4
|
+
const legacyUuidPattern = /^[a-fA-F0-9-]{36}$/;
|
|
5
|
+
const untypedUidPattern = /^[a-zA-Z0-9_-]+$/;
|
|
6
|
+
const typedUidPattern = /^(?<id>[a-zA-Z0-9_-]{22})-(?<type>[a-z]+)$/;
|
|
7
|
+
const idTypes = {
|
|
8
|
+
uuid: 1,
|
|
9
|
+
untypedUid: 2,
|
|
10
|
+
typedUid: 3,
|
|
11
|
+
};
|
|
12
|
+
function isLegacyUuid(id) {
|
|
13
|
+
return !!id.match(legacyUuidPattern);
|
|
14
|
+
}
|
|
15
|
+
function isUntypedUid(id) {
|
|
16
|
+
return !!id.match(untypedUidPattern);
|
|
17
|
+
}
|
|
18
|
+
function isTypedUid(id) {
|
|
19
|
+
return !!id.match(typedUidPattern);
|
|
20
|
+
}
|
|
21
|
+
function legacyUuidToHex(id) {
|
|
22
|
+
const idTypeBuffer = Buffer.alloc(1, idTypes.uuid);
|
|
23
|
+
const idBuffer = Buffer.from(id.toLowerCase().replace(/-/g, ''), 'hex');
|
|
24
|
+
return Buffer.concat([idTypeBuffer, idBuffer]).toString('hex');
|
|
25
|
+
}
|
|
26
|
+
function untypedUidToHex(id) {
|
|
27
|
+
const idTypeBuffer = Buffer.alloc(1, idTypes.untypedUid);
|
|
28
|
+
const idBuffer = Buffer.from(id.replace(/-/g, '+').replace(/_/g, '/'), 'base64');
|
|
29
|
+
return Buffer.concat([idTypeBuffer, idBuffer]).toString('hex');
|
|
30
|
+
}
|
|
31
|
+
function typedUidToHex(typedUid) {
|
|
32
|
+
const { id, type } = typedUid.match(typedUidPattern)
|
|
33
|
+
.groups;
|
|
34
|
+
const idTypeBuffer = Buffer.alloc(1, idTypes.typedUid);
|
|
35
|
+
const idBuffer = Buffer.from(id.replace(/-/g, '+').replace(/_/g, '/'), 'base64');
|
|
36
|
+
const typeBuffer = Buffer.from(type, 'utf8');
|
|
37
|
+
return Buffer.concat([idTypeBuffer, idBuffer, typeBuffer]).toString('hex');
|
|
38
|
+
}
|
|
39
|
+
function hexToBase36(hex) {
|
|
40
|
+
return BigInt(`0x${hex}`).toString(36);
|
|
41
|
+
}
|
|
42
|
+
function toCloudSafeId(id) {
|
|
43
|
+
if (!id) {
|
|
44
|
+
throw new Error('id is required');
|
|
45
|
+
}
|
|
46
|
+
if (isLegacyUuid(id)) {
|
|
47
|
+
return hexToBase36(legacyUuidToHex(id));
|
|
48
|
+
}
|
|
49
|
+
if (isTypedUid(id)) {
|
|
50
|
+
return hexToBase36(typedUidToHex(id));
|
|
51
|
+
}
|
|
52
|
+
if (isUntypedUid(id)) {
|
|
53
|
+
return hexToBase36(untypedUidToHex(id));
|
|
54
|
+
}
|
|
55
|
+
throw new Error(`Not a valid ID: [${id}]`);
|
|
56
|
+
}
|
|
57
|
+
exports.toCloudSafeId = toCloudSafeId;
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
+
if (mod && mod.__esModule) return mod;
|
|
20
|
+
var result = {};
|
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
+
__setModuleDefault(result, mod);
|
|
23
|
+
return result;
|
|
24
|
+
};
|
|
25
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
+
};
|
|
28
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
+
const CloudStorageWriter_1 = __importDefault(require("./CloudStorageWriter"));
|
|
30
|
+
const murmurhash = __importStar(require("murmurhash"));
|
|
31
|
+
const loggingProvider_1 = require("../providers/logging/loggingProvider");
|
|
32
|
+
const HASH_PREFIX_LENGTH = 4;
|
|
33
|
+
const EXECUTION_OUTPUT_PATH_PREFIX = 'execution-output';
|
|
34
|
+
const MAX_PATH_LENGTH = 1024;
|
|
35
|
+
class TestOutputWriterImpl {
|
|
36
|
+
constructor(projectId, workspaceId, applicationId, planId, planRunId, testRunId) {
|
|
37
|
+
this.storageWriter = new CloudStorageWriter_1.default(projectId, workspaceId);
|
|
38
|
+
this.applicationId = applicationId;
|
|
39
|
+
this.planId = planId;
|
|
40
|
+
this.planRunId = planRunId;
|
|
41
|
+
this.testRunId = testRunId;
|
|
42
|
+
}
|
|
43
|
+
async writeObjectAsJson(name, data) {
|
|
44
|
+
return this.write(name, 'application/json', Buffer.from(JSON.stringify(data)));
|
|
45
|
+
}
|
|
46
|
+
async write(name, contentType, data) {
|
|
47
|
+
const path = this.nameToFullPath(name);
|
|
48
|
+
await this.storageWriter.write(path, contentType, data);
|
|
49
|
+
return `${this.storageWriter.bucketName()}/${path}`;
|
|
50
|
+
}
|
|
51
|
+
nameToFullPath(name) {
|
|
52
|
+
const directoryPath = `${this.applicationId}/${this.planId}/${this.planRunId}/${this.testRunId}`;
|
|
53
|
+
const hash = this.generateHash(`${directoryPath}/${name}`);
|
|
54
|
+
const fullPath = `${EXECUTION_OUTPUT_PATH_PREFIX}/${hash}/${directoryPath}/${name}`;
|
|
55
|
+
if (fullPath.length <= MAX_PATH_LENGTH) {
|
|
56
|
+
return fullPath;
|
|
57
|
+
}
|
|
58
|
+
loggingProvider_1.logger.info(`WARNING: Truncating path longer than ${MAX_PATH_LENGTH}: ${fullPath}`);
|
|
59
|
+
const overage = fullPath.length - MAX_PATH_LENGTH;
|
|
60
|
+
const truncatedName = name.substring(overage);
|
|
61
|
+
return `${EXECUTION_OUTPUT_PATH_PREFIX}/${hash}/${directoryPath}/${truncatedName}`;
|
|
62
|
+
}
|
|
63
|
+
generateHash(value) {
|
|
64
|
+
return murmurhash.v3(value).toString(36).substring(0, HASH_PREFIX_LENGTH);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
exports.default = TestOutputWriterImpl;
|