@itwin/build-tools 3.0.0-dev.72
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.nycrc +30 -0
- package/CHANGELOG.md +1026 -0
- package/LICENSE.md +9 -0
- package/README.md +59 -0
- package/ThirdPartyNotices.md +15 -0
- package/bin/betools.js +148 -0
- package/mocha-reporter/index.js +76 -0
- package/package.json +64 -0
- package/scripts/config/paths.js +27 -0
- package/scripts/docs.js +100 -0
- package/scripts/extract-api-summary.js +91 -0
- package/scripts/extract-api.js +134 -0
- package/scripts/extract.js +72 -0
- package/scripts/pseudolocalize.js +107 -0
- package/scripts/rush/audit.js +99 -0
- package/scripts/rush/utils.js +39 -0
- package/scripts/utils/simpleSpawn.js +86 -0
- package/scripts/utils/validateTags.js +120 -0
- package/tsconfig-base.json +35 -0
- package/tsconfig.json +19 -0
@@ -0,0 +1,134 @@
|
|
1
|
+
/*---------------------------------------------------------------------------------------------
|
2
|
+
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
|
3
|
+
* See LICENSE.md in the project root for license terms and full copyright notice.
|
4
|
+
*--------------------------------------------------------------------------------------------*/
|
5
|
+
"use strict";
|
6
|
+
|
7
|
+
const { spawn, handleInterrupts } = require("./utils/simpleSpawn");
|
8
|
+
const argv = require("yargs").argv;
|
9
|
+
const fs = require("fs-extra");
|
10
|
+
const path = require("path");
|
11
|
+
const paths = require("./config/paths");
|
12
|
+
|
13
|
+
if (argv.entry === undefined) {
|
14
|
+
console.log("No argument found");
|
15
|
+
return;
|
16
|
+
}
|
17
|
+
|
18
|
+
const isCI = (process.env.TF_BUILD);
|
19
|
+
const entryPointFileName = argv.entry;
|
20
|
+
const ignoreMissingTags = argv.ignoreMissingTags;
|
21
|
+
|
22
|
+
// Resolves the root of the Rush repo
|
23
|
+
const resolveRoot = relativePath => {
|
24
|
+
// recurse until you find the "rush.json"
|
25
|
+
const parts = paths.appSrc.split(path.sep).reverse();
|
26
|
+
while (parts.length > 0) {
|
27
|
+
const resolved = path.join(parts.slice().reverse().join(path.sep), "rush.json");
|
28
|
+
if (fs.existsSync(resolved))
|
29
|
+
return path.join(parts.slice().reverse().join(path.sep), relativePath);
|
30
|
+
parts.shift();
|
31
|
+
}
|
32
|
+
process.stderr.write("Root of the Rush repository not found. Missing a rush.json file?");
|
33
|
+
};
|
34
|
+
const rushCommon = resolveRoot("common");
|
35
|
+
|
36
|
+
const config = {
|
37
|
+
$schema: "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
|
38
|
+
projectFolder: "../",
|
39
|
+
compiler: {
|
40
|
+
tsconfigFilePath: "<projectFolder>/tsconfig.json"
|
41
|
+
},
|
42
|
+
mainEntryPointFilePath: `${entryPointFileName}.d.ts`,
|
43
|
+
apiReport: {
|
44
|
+
enabled: true,
|
45
|
+
reportFolder: path.resolve(path.join(rushCommon, "/api")),
|
46
|
+
reportTempFolder: path.resolve(path.join(rushCommon, "/temp/api")),
|
47
|
+
},
|
48
|
+
docModel: {
|
49
|
+
enabled: false
|
50
|
+
},
|
51
|
+
dtsRollup: {
|
52
|
+
enabled: false
|
53
|
+
},
|
54
|
+
tsdocMetadata: {
|
55
|
+
enabled: false
|
56
|
+
},
|
57
|
+
messages: {
|
58
|
+
tsdocMessageReporting: {
|
59
|
+
default: {
|
60
|
+
logLevel: "none"
|
61
|
+
}
|
62
|
+
},
|
63
|
+
extractorMessageReporting: {
|
64
|
+
default: {
|
65
|
+
logLevel: "error",
|
66
|
+
addToApiReportFile: false
|
67
|
+
},
|
68
|
+
"ae-incompatible-release-tags": {
|
69
|
+
logLevel: "error",
|
70
|
+
addToApiReportFile: false
|
71
|
+
},
|
72
|
+
"ae-missing-release-tag": {
|
73
|
+
logLevel: ignoreMissingTags ? "none" : "error",
|
74
|
+
addToApiReportFile: false
|
75
|
+
},
|
76
|
+
"ae-internal-missing-underscore": {
|
77
|
+
logLevel: "none",
|
78
|
+
addToApiReportFile: false
|
79
|
+
},
|
80
|
+
"ae-forgotten-export": {
|
81
|
+
logLevel: "none",
|
82
|
+
addToApiReportFile: false
|
83
|
+
},
|
84
|
+
"ae-unresolved-inheritdoc-reference": {
|
85
|
+
logLevel: "error",
|
86
|
+
addToApiReportFile: true
|
87
|
+
},
|
88
|
+
"ae-unresolved-inheritdoc-base": {
|
89
|
+
logLevel: "error",
|
90
|
+
addToApiReportFile: true
|
91
|
+
}
|
92
|
+
}
|
93
|
+
}
|
94
|
+
};
|
95
|
+
|
96
|
+
if (!fs.existsSync("lib")) {
|
97
|
+
process.stderr.write("lib folder not found. Run `rush build` before extract-api");
|
98
|
+
process.exit(1);
|
99
|
+
}
|
100
|
+
|
101
|
+
const configFileName = `lib/${entryPointFileName}.json`;
|
102
|
+
fs.writeFileSync(configFileName, JSON.stringify(config, null, 2));
|
103
|
+
|
104
|
+
const args = [
|
105
|
+
"run",
|
106
|
+
"-c", configFileName
|
107
|
+
];
|
108
|
+
if (!isCI)
|
109
|
+
args.push("-l");
|
110
|
+
|
111
|
+
spawn(require.resolve(".bin/api-extractor"), args).then((code) => {
|
112
|
+
if (fs.existsSync(configFileName))
|
113
|
+
fs.unlinkSync(configFileName);
|
114
|
+
|
115
|
+
// Only generate the extraction of the summary locally.
|
116
|
+
if (isCI)
|
117
|
+
process.exit(code);
|
118
|
+
|
119
|
+
const extractSummaryArgs = [
|
120
|
+
path.resolve(__dirname, "extract-api-summary.js"),
|
121
|
+
"--apiSignature", path.resolve(path.join(rushCommon, `/api/${entryPointFileName}.api.md`)),
|
122
|
+
"--outDir", path.resolve(path.join(rushCommon, "/api/summary")),
|
123
|
+
];
|
124
|
+
|
125
|
+
spawn("node", extractSummaryArgs).then((code) => {
|
126
|
+
process.exit(code);
|
127
|
+
});
|
128
|
+
|
129
|
+
if (process.env.GENERATE_FULL_API_REPORT)
|
130
|
+
spawn("node", [...extractSummaryArgs, "--gatherFullReport"]).then((code) => {
|
131
|
+
process.exit(code);
|
132
|
+
});
|
133
|
+
});
|
134
|
+
handleInterrupts();
|
@@ -0,0 +1,72 @@
|
|
1
|
+
/*---------------------------------------------------------------------------------------------
|
2
|
+
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
|
3
|
+
* See LICENSE.md in the project root for license terms and full copyright notice.
|
4
|
+
*--------------------------------------------------------------------------------------------*/
|
5
|
+
"use strict";
|
6
|
+
|
7
|
+
const argv = require("yargs").argv;
|
8
|
+
const path = require("path");
|
9
|
+
const paths = require("./config/paths");
|
10
|
+
const fs = require("fs-extra");
|
11
|
+
const readDirectory = require("recursive-readdir");
|
12
|
+
|
13
|
+
const __PUBLISH_EXTRACT_START__ = "__PUBLISH_EXTRACT_START__";
|
14
|
+
const __PUBLISH_EXTRACT_END__ = "__PUBLISH_EXTRACT_END__";
|
15
|
+
|
16
|
+
const extractDir = (argv.extractFrom === undefined) ? paths.appTest : argv.extractFrom;
|
17
|
+
const outDir = (argv.out === undefined) ? paths.libExtract : argv.out;
|
18
|
+
const fileExt = (argv.fileExt === undefined) ? ["test.ts"] : argv.fileExt.split(",");
|
19
|
+
const recursive = (argv.recursive === undefined) ? false : true;
|
20
|
+
|
21
|
+
const ignoreFunction = (file, stats) => {
|
22
|
+
if (stats.isDirectory())
|
23
|
+
return !recursive; // don't ignore subdirectories in recursive mode
|
24
|
+
return !fileExt.some((ext) => file.endsWith(ext)); // don't ignore files with desired extensions
|
25
|
+
};
|
26
|
+
|
27
|
+
readDirectory(extractDir, [ignoreFunction], (error, inputFileNames) => {
|
28
|
+
for (const inputFileName of inputFileNames) {
|
29
|
+
const inputFileContents = fs.readFileSync(inputFileName, "utf8");
|
30
|
+
|
31
|
+
// Skip the file if there are no occurences of the starting comment.
|
32
|
+
if (inputFileContents.indexOf(__PUBLISH_EXTRACT_START__) <= 0)
|
33
|
+
continue;
|
34
|
+
|
35
|
+
console.log("Processing: " + inputFileName);
|
36
|
+
const inputLines = inputFileContents.split("\n");
|
37
|
+
let outputFileName = undefined;
|
38
|
+
let outputLines = [];
|
39
|
+
let startIndent = 0;
|
40
|
+
|
41
|
+
for (const inputLine of inputLines) {
|
42
|
+
const startIndex = inputLine.indexOf(__PUBLISH_EXTRACT_START__);
|
43
|
+
if (startIndex > 0) {
|
44
|
+
startIndent = startIndex - 3;
|
45
|
+
if (outputFileName)
|
46
|
+
throw new Error("Nested " + __PUBLISH_EXTRACT_START__);
|
47
|
+
|
48
|
+
outputFileName = inputLine.substring(startIndex + __PUBLISH_EXTRACT_START__.length).trim();
|
49
|
+
if (0 === outputFileName.length)
|
50
|
+
throw new Error("Expected output file name after " + __PUBLISH_EXTRACT_START__);
|
51
|
+
} else if (inputLine.indexOf(__PUBLISH_EXTRACT_END__) > 0) {
|
52
|
+
if (!outputFileName)
|
53
|
+
throw new Error("Missing " + __PUBLISH_EXTRACT_START__);
|
54
|
+
|
55
|
+
if (!fs.existsSync(outDir))
|
56
|
+
fs.ensureDirSync(outDir);
|
57
|
+
|
58
|
+
const outputFilePath = path.join(outDir, outputFileName);
|
59
|
+
console.log("> Extracting into: " + outputFilePath);
|
60
|
+
fs.writeFileSync(outputFilePath, outputLines.join("\n"));
|
61
|
+
|
62
|
+
outputFileName = undefined;
|
63
|
+
outputLines = [];
|
64
|
+
} else if (outputFileName) {
|
65
|
+
outputLines.push(inputLine.substring(startIndent).replace(/\s+$/gm, ""));
|
66
|
+
}
|
67
|
+
}
|
68
|
+
|
69
|
+
if (outputFileName)
|
70
|
+
throw new Error("Missing " + __PUBLISH_EXTRACT_END__);
|
71
|
+
}
|
72
|
+
});
|
@@ -0,0 +1,107 @@
|
|
1
|
+
/*---------------------------------------------------------------------------------------------
|
2
|
+
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
|
3
|
+
* See LICENSE.md in the project root for license terms and full copyright notice.
|
4
|
+
*--------------------------------------------------------------------------------------------*/
|
5
|
+
"use strict";
|
6
|
+
|
7
|
+
/** Generates pseudo localization files from the en localization JSON files.
|
8
|
+
*/
|
9
|
+
|
10
|
+
function pseudoLocalizeObject(objIn) {
|
11
|
+
let objOut = {};
|
12
|
+
for (let prop in objIn) {
|
13
|
+
if (objIn.hasOwnProperty(prop)) {
|
14
|
+
if (typeof objIn[prop] === "string") {
|
15
|
+
objOut[prop] = pseudoLocalize(objIn[prop])
|
16
|
+
} else if (typeof objIn[prop] === "object") {
|
17
|
+
objOut[prop] = pseudoLocalizeObject(objIn[prop])
|
18
|
+
}
|
19
|
+
}
|
20
|
+
}
|
21
|
+
return objOut;
|
22
|
+
}
|
23
|
+
|
24
|
+
const replacements = {
|
25
|
+
A: "\u00C0\u00C1,\u00C2\u00C3,\u00C4\u00C5",
|
26
|
+
a: "\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5",
|
27
|
+
B: "\u00DF",
|
28
|
+
c: "\u00A2\u00E7",
|
29
|
+
C: "\u00C7\u0028",
|
30
|
+
D: "\u00D0",
|
31
|
+
E: "\u00C8\u00C9\u00CA\u00CB",
|
32
|
+
e: "\u00E8\u00E9\u00EA\u00EB",
|
33
|
+
I: "\u00CC\u00CD\u00CE\u00CF",
|
34
|
+
i: "\u00EC\u00ED\u00EE\u00EF",
|
35
|
+
L: "\u00A3",
|
36
|
+
N: "\u00D1",
|
37
|
+
n: "\u00F1",
|
38
|
+
O: "\u00D2\u00D3\u00D4\u00D5\u00D6",
|
39
|
+
o: "\u00F2\u00F3\u00F4\u00F5\u00F6\u00F8",
|
40
|
+
S: "\u0024\u00A7",
|
41
|
+
U: "\u00D9\u00DA\u00DB\u00DC",
|
42
|
+
u: "\u00B5\u00F9\u00FA\u00FB\u00FC",
|
43
|
+
x: "\u00D7",
|
44
|
+
Y: "\u00DD\u00A5",
|
45
|
+
y: "\u00FD\u00FF",
|
46
|
+
};
|
47
|
+
|
48
|
+
/** PseudoLocalizes a single string */
|
49
|
+
function pseudoLocalize(inputString) {
|
50
|
+
let inReplace = 0;
|
51
|
+
let outString = "";
|
52
|
+
let replaceIndex = 0; // Note: the pseudoLocalize algorithm would normally use random, but here we cycle through because Javascript doesn't allow setting of the seed for Math.random.
|
53
|
+
for (let iChar = 0; iChar < inputString.length; iChar++) {
|
54
|
+
let thisChar = inputString.charAt(iChar);
|
55
|
+
let nextChar = ((iChar + 1) < inputString.length) ? inputString.charAt(iChar + 1) : 0;
|
56
|
+
|
57
|
+
// handle the {{ and }} delimiters for placeholders - don't want to do anything to characters in between.
|
58
|
+
if (('{' === thisChar) && ('{' === nextChar)) {
|
59
|
+
inReplace++;
|
60
|
+
iChar++;
|
61
|
+
outString = outString.concat("{{");
|
62
|
+
} else if (('}' === thisChar) && ('}' === nextChar) && (inReplace > 0)) {
|
63
|
+
inReplace--;
|
64
|
+
iChar++;
|
65
|
+
outString = outString.concat("}}");
|
66
|
+
} else {
|
67
|
+
let replacementChar = thisChar;
|
68
|
+
if (0 === inReplace) {
|
69
|
+
let replacementsForChar = replacements[thisChar];
|
70
|
+
if (undefined !== replacementsForChar) {
|
71
|
+
replacementChar = replacementsForChar.charAt(replaceIndex++ % replacementsForChar.length);
|
72
|
+
}
|
73
|
+
}
|
74
|
+
outString = outString.concat(replacementChar);
|
75
|
+
}
|
76
|
+
}
|
77
|
+
return outString;
|
78
|
+
}
|
79
|
+
|
80
|
+
function isJsonFile(fileName) {
|
81
|
+
return fileName.endsWith(".json");
|
82
|
+
}
|
83
|
+
|
84
|
+
const argv = require("yargs").argv;
|
85
|
+
const paths = require("./config/paths");
|
86
|
+
const fs = require("fs-extra");
|
87
|
+
const path = require("path");
|
88
|
+
|
89
|
+
const englishDir = (argv.englishDir === undefined) ? paths.appLocalesEnglish : argv.englishDir;
|
90
|
+
const inputFileNames = fs.readdirSync(englishDir).filter(isJsonFile);
|
91
|
+
const outDir = (argv.out === undefined) ? paths.appLocalesPseudolocalize : argv.out;
|
92
|
+
try {
|
93
|
+
fs.mkdirpSync(outDir);
|
94
|
+
} catch (e) {
|
95
|
+
console.log(e);// do nothing
|
96
|
+
}
|
97
|
+
|
98
|
+
for (const inputFileName of inputFileNames) {
|
99
|
+
const inputFilePath = path.join(englishDir, inputFileName);
|
100
|
+
const outputFileName = path.join(outDir, inputFileName);
|
101
|
+
|
102
|
+
let jsonIn = fs.readFileSync(inputFilePath, { encoding: "utf8" });
|
103
|
+
let objIn = JSON.parse(jsonIn);
|
104
|
+
|
105
|
+
let objOut = pseudoLocalizeObject(objIn);
|
106
|
+
fs.writeFileSync(outputFileName, JSON.stringify(objOut, null, 2));
|
107
|
+
}
|
@@ -0,0 +1,99 @@
|
|
1
|
+
/*---------------------------------------------------------------------------------------------
|
2
|
+
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
|
3
|
+
* See LICENSE.md in the project root for license terms and full copyright notice.
|
4
|
+
*--------------------------------------------------------------------------------------------*/
|
5
|
+
|
6
|
+
const fs = require("fs");
|
7
|
+
const path = require("path");
|
8
|
+
const { spawn } = require("child_process");
|
9
|
+
const { logBuildError, logBuildWarning, failBuild, throwAfterTimeout } = require("./utils");
|
10
|
+
|
11
|
+
const rushCommonDir = path.join(__dirname, "../../../../common/");
|
12
|
+
|
13
|
+
(async () => {
|
14
|
+
const commonTempDir = path.join(rushCommonDir, "config/rush");
|
15
|
+
|
16
|
+
// Npm audit will occasionally take minutes to respond - we believe this is just the npm registry being terrible and slow.
|
17
|
+
// We don't want this to slow down our builds though - we'd rather fail fast and try again later. So we'll just timeout after 30 seconds.
|
18
|
+
let jsonOut = {};
|
19
|
+
try {
|
20
|
+
console.time("Audit time");
|
21
|
+
jsonOut = await Promise.race([runPnpmAuditAsync(commonTempDir), throwAfterTimeout(180000, "Timed out contacting npm registry.")]);
|
22
|
+
console.timeEnd("Audit time");
|
23
|
+
console.log();
|
24
|
+
} catch (error) {
|
25
|
+
// We want to stop failing the build on transient failures and instead fail only on high/critical vulnerabilities.
|
26
|
+
logBuildWarning(error);
|
27
|
+
process.exit();
|
28
|
+
}
|
29
|
+
|
30
|
+
if (jsonOut.error) {
|
31
|
+
console.error(jsonOut.error.summary);
|
32
|
+
logBuildWarning("Rush audit failed. This may be caused by a problem with the npm audit server.");
|
33
|
+
}
|
34
|
+
|
35
|
+
// A list of temporary advisories excluded from the High and Critical list.
|
36
|
+
// Warning this should only be used as a temporary measure to avoid build failures
|
37
|
+
// for development dependencies only.
|
38
|
+
// All security issues should be addressed asap.
|
39
|
+
const excludedAdvisories = [
|
40
|
+
1700, // https://npmjs.com/advisories/1700
|
41
|
+
1754, // https://npmjs.com/advisories/1754. Waiting for fix, https://github.com/svg/svgo/pull/1485.
|
42
|
+
1755, // https://npmjs.com/advisories/1755. Waiting for fix, https://github.com/sindresorhus/normalize-url/issues/135
|
43
|
+
1779, // https://npmjs.com/advisories/1779. Waiting for fix in @bentley/react-scripts
|
44
|
+
1780, // https://npmjs.com/advisories/1780. Waiting for fix in @bentley/react-scripts
|
45
|
+
1781, // https://npmjs.com/advisories/1781. Waiting for fix in @bentley/react-scripts
|
46
|
+
];
|
47
|
+
|
48
|
+
let shouldFailBuild = false;
|
49
|
+
for (const action of jsonOut.actions) {
|
50
|
+
for (const issue of action.resolves) {
|
51
|
+
const advisory = jsonOut.advisories[issue.id];
|
52
|
+
|
53
|
+
// TODO: This path no longer resolves to a specific package in the repo. Need to figure out the best way to handle it
|
54
|
+
const mpath = issue.path; // .replace("@rush-temp", "@bentley");
|
55
|
+
|
56
|
+
const severity = advisory.severity.toUpperCase();
|
57
|
+
const message = `${severity} Security Vulnerability: ${advisory.title} in ${advisory.module_name} (from ${mpath}). See ${advisory.url} for more info.`;
|
58
|
+
|
59
|
+
// For now, we'll only treat CRITICAL and HIGH vulnerabilities as errors in CI builds.
|
60
|
+
if (!excludedAdvisories.includes(advisory.id) && (severity === "HIGH" || severity === "CRITICAL")) {
|
61
|
+
logBuildError(message);
|
62
|
+
shouldFailBuild = true;
|
63
|
+
} else if (excludedAdvisories.includes(advisory.id) || severity === "MODERATE") // Only warn on MODERATE severity items
|
64
|
+
logBuildWarning(message);
|
65
|
+
}
|
66
|
+
}
|
67
|
+
|
68
|
+
// For some reason yarn audit can return the json without the vulnerabilities
|
69
|
+
if (undefined === jsonOut.metadata.vulnerabilities || shouldFailBuild)
|
70
|
+
failBuild();
|
71
|
+
|
72
|
+
process.exit();
|
73
|
+
})();
|
74
|
+
|
75
|
+
function runPnpmAuditAsync(cwd) {
|
76
|
+
return new Promise((resolve, reject) => {
|
77
|
+
// pnpm audit requires a package.json file so we temporarily create one and
|
78
|
+
// then delete it later
|
79
|
+
fs.writeFileSync(path.join(rushCommonDir, "config/rush/package.json"), JSON.stringify("{}", null, 2));
|
80
|
+
|
81
|
+
console.log("Running audit");
|
82
|
+
const pnpmPath = path.join(rushCommonDir, "temp/pnpm-local/node_modules/.bin/pnpm");
|
83
|
+
const child = spawn(pnpmPath, ["audit", "--json"], { cwd, shell: true });
|
84
|
+
|
85
|
+
let stdout = "";
|
86
|
+
child.stdout.on('data', (data) => {
|
87
|
+
stdout += data;
|
88
|
+
});
|
89
|
+
|
90
|
+
child.on('error', (data) => {
|
91
|
+
fs.unlinkSync(path.join(rushCommonDir, "config/rush/package.json"));
|
92
|
+
reject(data)
|
93
|
+
});
|
94
|
+
child.on('close', () => {
|
95
|
+
fs.unlinkSync(path.join(rushCommonDir, "config/rush/package.json"));
|
96
|
+
resolve(JSON.parse(stdout.trim()));
|
97
|
+
});
|
98
|
+
});
|
99
|
+
}
|
@@ -0,0 +1,39 @@
|
|
1
|
+
/*---------------------------------------------------------------------------------------------
|
2
|
+
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
|
3
|
+
* See LICENSE.md in the project root for license terms and full copyright notice.
|
4
|
+
*--------------------------------------------------------------------------------------------*/
|
5
|
+
function logBuildWarning(msg) {
|
6
|
+
if (process.env.TF_BUILD)
|
7
|
+
console.error("##vso[task.logissue type=warning;]%s", msg);
|
8
|
+
else
|
9
|
+
console.error("WARNING: %s", msg);
|
10
|
+
}
|
11
|
+
|
12
|
+
function logBuildError(msg) {
|
13
|
+
if (process.env.TF_BUILD)
|
14
|
+
console.error("##vso[task.logissue type=error;]%s", msg);
|
15
|
+
else
|
16
|
+
console.error("ERROR: %s", msg);
|
17
|
+
}
|
18
|
+
|
19
|
+
function failBuild() {
|
20
|
+
if (process.env.TF_BUILD) {
|
21
|
+
console.error("##vso[task.complete result=Failed;]DONE")
|
22
|
+
process.exit(0);
|
23
|
+
} else {
|
24
|
+
process.exit(1);
|
25
|
+
}
|
26
|
+
}
|
27
|
+
|
28
|
+
function throwAfterTimeout(timeout, message) {
|
29
|
+
return new Promise((_resolve, reject) => {
|
30
|
+
setTimeout(() => reject(message), timeout);
|
31
|
+
});
|
32
|
+
}
|
33
|
+
|
34
|
+
module.exports = {
|
35
|
+
logBuildWarning,
|
36
|
+
logBuildError,
|
37
|
+
failBuild,
|
38
|
+
throwAfterTimeout
|
39
|
+
}
|
@@ -0,0 +1,86 @@
|
|
1
|
+
/*---------------------------------------------------------------------------------------------
|
2
|
+
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
|
3
|
+
* See LICENSE.md in the project root for license terms and full copyright notice.
|
4
|
+
*--------------------------------------------------------------------------------------------*/
|
5
|
+
|
6
|
+
const spawn = require("cross-spawn");
|
7
|
+
const chalk = require("chalk");
|
8
|
+
const kill = require("tree-kill");
|
9
|
+
|
10
|
+
function simpleSpawn(cmd, args, cwd, env = {}) {
|
11
|
+
if (!cwd)
|
12
|
+
cwd = process.cwd();
|
13
|
+
|
14
|
+
return new Promise((resolve) => {
|
15
|
+
const child = spawn(cmd, args, {
|
16
|
+
cwd: cwd,
|
17
|
+
env: { FORCE_COLOR: "1", ...env, ...process.env },
|
18
|
+
stdio: "pipe"
|
19
|
+
});
|
20
|
+
|
21
|
+
child.stdout.on("data", (data) => {
|
22
|
+
process.stdout.write(data);
|
23
|
+
})
|
24
|
+
child.stderr.on("data", (data) => {
|
25
|
+
process.stderr.write(data);
|
26
|
+
})
|
27
|
+
child.on("error", function (data) { console.log(chalk.red(data)); });
|
28
|
+
child.on("close", (code) => resolve(code));
|
29
|
+
simpleSpawn.children.push(child);
|
30
|
+
});
|
31
|
+
}
|
32
|
+
|
33
|
+
function simpleSpawnSync(cmd, args, cwd, env = {}) {
|
34
|
+
if (!cwd)
|
35
|
+
cwd = process.cwd();
|
36
|
+
|
37
|
+
const child = spawn.sync(cmd, args, {
|
38
|
+
cwd: cwd,
|
39
|
+
env: { FORCE_COLOR: "1", ...env, ...process.env },
|
40
|
+
stdio: "inherit"
|
41
|
+
});
|
42
|
+
|
43
|
+
if (child.status !== 0) {
|
44
|
+
process.exit(child.status);
|
45
|
+
}
|
46
|
+
}
|
47
|
+
|
48
|
+
simpleSpawn.children = [];
|
49
|
+
simpleSpawn.killAll = async function () {
|
50
|
+
const promises = [];
|
51
|
+
simpleSpawn.children.forEach((proc) => {
|
52
|
+
promises.push(new Promise((resolve) => {
|
53
|
+
kill(proc.pid, undefined, resolve);
|
54
|
+
}));
|
55
|
+
});
|
56
|
+
await Promise.all(promises);
|
57
|
+
}
|
58
|
+
|
59
|
+
function handleInterrupts(callback) {
|
60
|
+
if (!callback) {
|
61
|
+
callback = async () => {
|
62
|
+
await simpleSpawn.killAll();
|
63
|
+
process.exit();
|
64
|
+
};
|
65
|
+
}
|
66
|
+
|
67
|
+
if (process.platform === "win32") {
|
68
|
+
require("readline")
|
69
|
+
.createInterface({
|
70
|
+
input: process.stdin,
|
71
|
+
output: process.stdout
|
72
|
+
});
|
73
|
+
}
|
74
|
+
|
75
|
+
["SIGINT", "SIGTERM"].forEach(function (sig) {
|
76
|
+
process.on(sig, function () {
|
77
|
+
callback();
|
78
|
+
});
|
79
|
+
});
|
80
|
+
}
|
81
|
+
|
82
|
+
module.exports = {
|
83
|
+
spawn: simpleSpawn,
|
84
|
+
spawnSync: simpleSpawnSync,
|
85
|
+
handleInterrupts
|
86
|
+
};
|
@@ -0,0 +1,120 @@
|
|
1
|
+
/*---------------------------------------------------------------------------------------------
|
2
|
+
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
|
3
|
+
* See LICENSE.md in the project root for license terms and full copyright notice.
|
4
|
+
*--------------------------------------------------------------------------------------------*/
|
5
|
+
|
6
|
+
const FS = require('fs-extra');
|
7
|
+
const validTags = [
|
8
|
+
"see",
|
9
|
+
"note",
|
10
|
+
"throws",
|
11
|
+
"param",
|
12
|
+
"module",
|
13
|
+
"type",
|
14
|
+
"minimum",
|
15
|
+
"minlength",
|
16
|
+
"default",
|
17
|
+
"example",
|
18
|
+
"pattern",
|
19
|
+
|
20
|
+
// Following flags are added to support API-extractor (https://api-extractor.com/pages/tsdoc/syntax/#release-tags)
|
21
|
+
"alpha",
|
22
|
+
"beta",
|
23
|
+
"deprecated",
|
24
|
+
"internal",
|
25
|
+
"public",
|
26
|
+
];
|
27
|
+
|
28
|
+
function validateTags(path) {
|
29
|
+
return parseFile(path);
|
30
|
+
}
|
31
|
+
|
32
|
+
function parseFile(path) {
|
33
|
+
let allTags = {};
|
34
|
+
|
35
|
+
if (FS.existsSync(path) && FS.statSync(path).isFile()) {
|
36
|
+
const contents = FS.readFileSync(path, 'utf-8');
|
37
|
+
let jsonContents = JSON.parse(contents);
|
38
|
+
|
39
|
+
let tags = findValues(jsonContents, 'tags');
|
40
|
+
|
41
|
+
for (let j = 0; j < tags.length; j++) {
|
42
|
+
for (let i = 0; i < tags[j].length; i++)
|
43
|
+
allTags[tags[j][i]['tag']] = allTags[tags[j][i]['tag']] ? allTags[tags[j][i]['tag']] + 1 : 1;
|
44
|
+
}
|
45
|
+
|
46
|
+
let invalidTagObjects = [];
|
47
|
+
for (tag in allTags) {
|
48
|
+
if (!validTags.includes(tag))
|
49
|
+
invalidTagObjects.push(tag, findSource(jsonContents, 'tag', tag));
|
50
|
+
}
|
51
|
+
return invalidTagObjects;
|
52
|
+
}
|
53
|
+
}
|
54
|
+
|
55
|
+
function findValues(obj, key) {
|
56
|
+
return findValuesHelper(obj, key, []);
|
57
|
+
}
|
58
|
+
|
59
|
+
function findValuesHelper(obj, key, list) {
|
60
|
+
if (!obj) return list;
|
61
|
+
if (obj instanceof Array) {
|
62
|
+
for (var i in obj)
|
63
|
+
list = list.concat(findValuesHelper(obj[i], key, []));
|
64
|
+
return list;
|
65
|
+
}
|
66
|
+
|
67
|
+
if (obj[key]) list.push(obj[key]);
|
68
|
+
|
69
|
+
if ((typeof obj == "object") && (obj !== null)) {
|
70
|
+
const children = Object.keys(obj);
|
71
|
+
if (children.length > 0) {
|
72
|
+
for (i = 0; i < children.length; i++)
|
73
|
+
list = list.concat(findValuesHelper(obj[children[i]], key, []));
|
74
|
+
}
|
75
|
+
}
|
76
|
+
return list;
|
77
|
+
}
|
78
|
+
|
79
|
+
function findSource(obj, key, value) {
|
80
|
+
return findSourceHelper(obj, key, value, []);
|
81
|
+
}
|
82
|
+
|
83
|
+
function findSourceHelper(obj, key, value, list) {
|
84
|
+
if (!obj) return list;
|
85
|
+
if (obj instanceof Array) {
|
86
|
+
for (var i in obj)
|
87
|
+
list = list.concat(findSourceHelper(obj[i], key, value, []));
|
88
|
+
return list;
|
89
|
+
}
|
90
|
+
|
91
|
+
//Look for tag in signature or in comment
|
92
|
+
if (obj['signatures']) {
|
93
|
+
if (obj['signatures'][0] && obj['signatures'][0]['comment'] && obj['signatures'][0]['comment']['tags']) {
|
94
|
+
for (let tag in obj['signatures'][0]['comment']['tags']) {
|
95
|
+
if (obj['signatures'][0]['comment']['tags'][tag].tag === value && obj['sources'] && obj['sources'][0])
|
96
|
+
list.push(obj['sources'][0]);
|
97
|
+
}
|
98
|
+
}
|
99
|
+
}
|
100
|
+
|
101
|
+
if (obj['comment'] && obj['comment']['tags']) {
|
102
|
+
for (let tag in obj['comment']['tags']) {
|
103
|
+
if (obj['comment']['tags'][tag].tag === value && obj['sources'] && obj['sources'][0])
|
104
|
+
list.push(obj['sources'][0]);
|
105
|
+
}
|
106
|
+
}
|
107
|
+
|
108
|
+
if ((typeof obj == "object") && (obj !== null)) {
|
109
|
+
const children = Object.keys(obj);
|
110
|
+
if (children.length > 0) {
|
111
|
+
for (i = 0; i < children.length; i++)
|
112
|
+
list = list.concat(findSourceHelper(obj[children[i]], key, value, []));
|
113
|
+
}
|
114
|
+
}
|
115
|
+
return list;
|
116
|
+
}
|
117
|
+
|
118
|
+
module.exports = {
|
119
|
+
validateTags: validateTags,
|
120
|
+
};
|
@@ -0,0 +1,35 @@
|
|
1
|
+
{
|
2
|
+
"compilerOptions": {
|
3
|
+
"target": "es2017",
|
4
|
+
"lib": [
|
5
|
+
// Required for async iterators/generators:
|
6
|
+
"esnext.asynciterable",
|
7
|
+
// These are the defaults for "target": "es2017", but they have to be explicitly specified if we want to add anything to "lib":
|
8
|
+
"es2017",
|
9
|
+
"dom",
|
10
|
+
"dom.iterable",
|
11
|
+
"scripthost"
|
12
|
+
],
|
13
|
+
"module": "commonjs",
|
14
|
+
"stripInternal": false,
|
15
|
+
"declaration": true,
|
16
|
+
"strict": true,
|
17
|
+
"noImplicitAny": true,
|
18
|
+
"strictNullChecks": true,
|
19
|
+
"noImplicitThis": true,
|
20
|
+
"alwaysStrict": true,
|
21
|
+
"noUnusedLocals": false,
|
22
|
+
"noUnusedParameters": true,
|
23
|
+
"noImplicitReturns": true,
|
24
|
+
"noImplicitOverride": true,
|
25
|
+
"noFallthroughCasesInSwitch": false,
|
26
|
+
"experimentalDecorators": true,
|
27
|
+
"skipLibCheck": true,
|
28
|
+
"sourceMap": true,
|
29
|
+
"inlineSources": true,
|
30
|
+
"declarationMap": true,
|
31
|
+
"jsx": "react",
|
32
|
+
"forceConsistentCasingInFileNames": true,
|
33
|
+
"incremental": true
|
34
|
+
}
|
35
|
+
}
|