orc-scripts 1.2.0-pre.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +19 -0
- package/README.md +76 -0
- package/babel.js +1 -0
- package/eslint.js +1 -0
- package/jest.js +1 -0
- package/package.json +164 -0
- package/prettier.js +1 -0
- package/src/__mocks__/fileMock.js +1 -0
- package/src/config/babel-preset.js +4 -0
- package/src/config/babel-transform.js +4 -0
- package/src/config/babel-whitelist.json +1 -0
- package/src/config/babelrc.js +38 -0
- package/src/config/eslintrc.js +15 -0
- package/src/config/jest-resolver.js +35 -0
- package/src/config/jest.config.js +45 -0
- package/src/config/jestSetupFiles.js +4 -0
- package/src/config/prettier.config.js +9 -0
- package/src/config/setAssetPath.js +1 -0
- package/src/config/unexpected-form.js +317 -0
- package/src/config/unexpected-form.test.js +2397 -0
- package/src/config/unexpected-module.js +112 -0
- package/src/config/unexpected-module.test.js +1106 -0
- package/src/config/unexpected-styles.js +44 -0
- package/src/config/unexpected-styles.test.js +118 -0
- package/src/config/unexpected.js +117 -0
- package/src/config/unexpected.test.js +393 -0
- package/src/config/webpack.config.js +103 -0
- package/src/index.js +19 -0
- package/src/run-script.js +99 -0
- package/src/scripts/build/cli.js +43 -0
- package/src/scripts/build/index.js +9 -0
- package/src/scripts/build/web.js +24 -0
- package/src/scripts/buildDep.js +122 -0
- package/src/scripts/buildIconsSheet.js +50 -0
- package/src/scripts/clean.js +8 -0
- package/src/scripts/extract-messages.js +22 -0
- package/src/scripts/generateApi.js +152 -0
- package/src/scripts/getDist.js +20 -0
- package/src/scripts/mergeTranslations.js +32 -0
- package/src/scripts/prep.js +28 -0
- package/src/scripts/start.js +45 -0
- package/src/scripts/tag.js +76 -0
- package/src/scripts/test.js +26 -0
- package/src/scripts/validateTranslations.js +72 -0
- package/src/utils.js +95 -0
- package/src/utils.test.js +93 -0
- package/webpack.js +1 -0
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
const path = require("path");
|
|
2
|
+
const util = require("util");
|
|
3
|
+
const fs = require("fs");
|
|
4
|
+
|
|
5
|
+
const readdir = util.promisify(fs.readdir);
|
|
6
|
+
const readFile = util.promisify(fs.readFile);
|
|
7
|
+
const writeFile = util.promisify(fs.writeFile);
|
|
8
|
+
|
|
9
|
+
const appTranslationsDir = path.resolve(process.cwd(), "src/translations");
|
|
10
|
+
const orcSharedTranslationsDir = path.resolve(process.cwd(), "node_modules/orc-shared/src/translations");
|
|
11
|
+
|
|
12
|
+
readdir(appTranslationsDir, (_, files) => {
|
|
13
|
+
files.forEach(file => {
|
|
14
|
+
if (!file.endsWith(".json")) {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const appLocalizationFile = path.resolve(appTranslationsDir, file);
|
|
19
|
+
const correspondingOrcSharedFile = path.resolve(orcSharedTranslationsDir, file);
|
|
20
|
+
|
|
21
|
+
readFile(correspondingOrcSharedFile, "utf8", (_, orcSharedData) => {
|
|
22
|
+
const orcSharedLocalizations = JSON.parse(orcSharedData);
|
|
23
|
+
|
|
24
|
+
readFile(appLocalizationFile, "utf8", (_, appData) => {
|
|
25
|
+
const appLocalizations = JSON.parse(appData);
|
|
26
|
+
const mergedData = { ...appLocalizations, ...orcSharedLocalizations };
|
|
27
|
+
const stringifiedData = JSON.stringify(mergedData, null, 1).concat("\r\n");
|
|
28
|
+
writeFile(appLocalizationFile, stringifiedData, "utf8");
|
|
29
|
+
});
|
|
30
|
+
});
|
|
31
|
+
});
|
|
32
|
+
});
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
const util = require("util");
|
|
2
|
+
const path = require("path");
|
|
3
|
+
const makeDir = require("make-dir");
|
|
4
|
+
|
|
5
|
+
const readdir = util.promisify(require("fs").readdir);
|
|
6
|
+
const copyFile = util.promisify(require("ncp").ncp);
|
|
7
|
+
|
|
8
|
+
const distDir = path.resolve(process.cwd(), "dist");
|
|
9
|
+
const contentDir = path.resolve(process.cwd(), "src/content");
|
|
10
|
+
const staticDir = path.resolve(process.cwd(), "src/static");
|
|
11
|
+
const mockDir = path.resolve(process.cwd(), "src/__mocks__");
|
|
12
|
+
|
|
13
|
+
async function prep() {
|
|
14
|
+
await makeDir("dist");
|
|
15
|
+
try {
|
|
16
|
+
await Promise.all([
|
|
17
|
+
copyFile(contentDir, path.resolve(distDir, "content")),
|
|
18
|
+
copyFile(mockDir, path.resolve(distDir, "__mocks__")),
|
|
19
|
+
]);
|
|
20
|
+
} catch (_) {}
|
|
21
|
+
if (process.env.NODE_ENV === "production") process.exit(0);
|
|
22
|
+
try {
|
|
23
|
+
const files = await readdir(staticDir);
|
|
24
|
+
await Promise.all(files.map(file => copyFile(path.resolve(staticDir, file), path.resolve(distDir, file))));
|
|
25
|
+
} catch (_) {}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
prep();
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
const webpackDevServer = require("webpack-dev-server");
|
|
2
|
+
const webpack = require("webpack");
|
|
3
|
+
|
|
4
|
+
const HOST = process.env.HOSTNAME || "localhost";
|
|
5
|
+
|
|
6
|
+
const args = process.argv.slice(2);
|
|
7
|
+
const argPort = args.indexOf("--port") !== -1 ? args[args.indexOf("--port") + 1] : null;
|
|
8
|
+
const PORT = argPort || process.env.PORT || 5000;
|
|
9
|
+
|
|
10
|
+
const config = require("../config/webpack.config.js");
|
|
11
|
+
const options = {
|
|
12
|
+
contentBase: "./dist",
|
|
13
|
+
publicPath: process.env.WEBPACK_PUBLIC_PATH || "/",
|
|
14
|
+
historyApiFallback: true,
|
|
15
|
+
hotOnly: true,
|
|
16
|
+
port: PORT,
|
|
17
|
+
host: HOST,
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
if (HOST !== "localhost") {
|
|
21
|
+
options.public = HOST;
|
|
22
|
+
}
|
|
23
|
+
if (HOST !== "localhost" || args.indexOf("--https") !== -1 || process.env.HTTPS) {
|
|
24
|
+
options.https = true;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const location = "http" + (options.https ? "s" : "") + "://" + HOST + ":" + PORT;
|
|
28
|
+
|
|
29
|
+
// /mockData/ contains json files to simulate API endpoints
|
|
30
|
+
options.before = (app, server) => {
|
|
31
|
+
app.get("/mockData/*", (req, res, next) => {
|
|
32
|
+
const parsedUrl = new URL(req.url, location);
|
|
33
|
+
parsedUrl.pathname = parsedUrl.pathname + ".json";
|
|
34
|
+
req.url = parsedUrl.href.replace(location, "");
|
|
35
|
+
next();
|
|
36
|
+
});
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
webpackDevServer.addDevServerEntrypoints(config, options);
|
|
40
|
+
const compiler = webpack(config);
|
|
41
|
+
const server = new webpackDevServer(compiler, options);
|
|
42
|
+
|
|
43
|
+
server.listen(PORT, "localhost", () => {
|
|
44
|
+
console.log("dev server listening at " + location);
|
|
45
|
+
});
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
const spawn = require("cross-spawn");
|
|
2
|
+
const readPkgUp = require("read-pkg-up");
|
|
3
|
+
const { inc, lt, eq, coerce, prerelease } = require("semver");
|
|
4
|
+
|
|
5
|
+
const gitDiffResult = spawn.sync("git", ["diff", "HEAD"]);
|
|
6
|
+
if (gitDiffResult.status !== 0 || gitDiffResult.stdout.toString("utf-8")) {
|
|
7
|
+
console.error("Working directory not clean, cannot tag release");
|
|
8
|
+
process.exit(-1);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
const gitBranchResult = spawn.sync("git", ["rev-parse", "--abbrev-ref", "HEAD"]);
|
|
12
|
+
if (gitBranchResult.status !== 0) {
|
|
13
|
+
console.error(gitBranchResult.stderr.toString("utf-8"));
|
|
14
|
+
process.exit(-2);
|
|
15
|
+
}
|
|
16
|
+
const currentBranch = gitBranchResult.stdout.toString("utf-8").trim();
|
|
17
|
+
|
|
18
|
+
const isMaster = currentBranch === "master";
|
|
19
|
+
const isDevelopment = currentBranch === "develop";
|
|
20
|
+
const isRelease = currentBranch.startsWith("releases/");
|
|
21
|
+
const isLegacy = currentBranch.startsWith("legacy/");
|
|
22
|
+
|
|
23
|
+
const { packageJson } = readPkgUp.sync({ normalize: false }) || {};
|
|
24
|
+
const currentVersion = packageJson.version;
|
|
25
|
+
let tag = "";
|
|
26
|
+
if (isMaster) {
|
|
27
|
+
// TODO: Should semver increment major/minor/patch - but which is hard to discover
|
|
28
|
+
// Fail out and tag manually for now
|
|
29
|
+
console.error("Tags from master branch should be made manually with the npm version command");
|
|
30
|
+
process.exit(2);
|
|
31
|
+
} else if (isRelease) {
|
|
32
|
+
const pre = prerelease(currentVersion);
|
|
33
|
+
const branchVersion = coerce(currentBranch.replace(/^.*\//, "")).version;
|
|
34
|
+
if (lt(branchVersion, currentVersion)) {
|
|
35
|
+
// Branch version must be greater or equal
|
|
36
|
+
console.error("Branch version must be higher than or equal to package version");
|
|
37
|
+
process.exit(3);
|
|
38
|
+
}
|
|
39
|
+
if (pre && eq(branchVersion, currentVersion.replace(/-.*$/, ""))) {
|
|
40
|
+
tag = inc(currentVersion, "prerelease", "pre");
|
|
41
|
+
} else {
|
|
42
|
+
// First pre-release needs specific handling
|
|
43
|
+
// see https://www.npmjs.com/package/semver#functions,
|
|
44
|
+
// the inc() function regarding the semantics of "prerelease"
|
|
45
|
+
tag = branchVersion + "-pre.0";
|
|
46
|
+
}
|
|
47
|
+
} else if (isLegacy) {
|
|
48
|
+
// Only patch updates
|
|
49
|
+
tag = inc(currentVersion, "patch") + "+legacy";
|
|
50
|
+
} else if (isDevelopment) {
|
|
51
|
+
const pre = prerelease(currentVersion);
|
|
52
|
+
if (pre && pre[0] !== "dev") {
|
|
53
|
+
tag = inc(currentVersion, "prepatch", "dev");
|
|
54
|
+
} else {
|
|
55
|
+
tag = inc(currentVersion, "prerelease", "dev");
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (!tag) {
|
|
60
|
+
console.error("This branch (" + currentBranch + ") cannot have releases tagged from it.");
|
|
61
|
+
process.exit(4);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const gitTagcheckResult = spawn.sync("git", ["rev-parse", tag]);
|
|
65
|
+
// Failure == no tag == go ahead
|
|
66
|
+
if (gitTagcheckResult.status === 0) {
|
|
67
|
+
console.error("Tag", tag, "already exists, tagging aborted");
|
|
68
|
+
process.exit(-3);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const tagResult = spawn.sync("npm", ["version", tag], {
|
|
72
|
+
stdio: "inherit",
|
|
73
|
+
});
|
|
74
|
+
if (tagResult.status !== 0) {
|
|
75
|
+
process.exit(1);
|
|
76
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
process.env.BABEL_ENV = "test";
|
|
2
|
+
process.env.NODE_ENV = "test";
|
|
3
|
+
|
|
4
|
+
const { hasPkgProp, parseEnv, hasFile } = require("../utils");
|
|
5
|
+
|
|
6
|
+
const args = process.argv.slice(2);
|
|
7
|
+
|
|
8
|
+
const isCI = require("is-ci") || args.includes("--ci");
|
|
9
|
+
|
|
10
|
+
const watch =
|
|
11
|
+
!isCI &&
|
|
12
|
+
!parseEnv("SCRIPTS_PRECOMMIT", false) &&
|
|
13
|
+
!args.includes("--no-watch") &&
|
|
14
|
+
!args.includes("--coverage") &&
|
|
15
|
+
!args.includes("--updateSnapshot")
|
|
16
|
+
? ["--watch"]
|
|
17
|
+
: [];
|
|
18
|
+
|
|
19
|
+
const ci = isCI ? ["--reporters=default", "--reporters=jest-junit"] : [];
|
|
20
|
+
|
|
21
|
+
const config =
|
|
22
|
+
!args.includes("--config") && !hasFile("jest.config.js") && !hasPkgProp("jest")
|
|
23
|
+
? ["--config", JSON.stringify(require("../config/jest.config"))]
|
|
24
|
+
: [];
|
|
25
|
+
|
|
26
|
+
require("jest").run([...config, ...watch, ...ci, ...args]);
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
const path = require("path");
|
|
2
|
+
const util = require("util");
|
|
3
|
+
const fs = require("fs");
|
|
4
|
+
const lodash = require("lodash");
|
|
5
|
+
|
|
6
|
+
const readdir = util.promisify(fs.readdir);
|
|
7
|
+
const readFile = util.promisify(fs.readFile);
|
|
8
|
+
|
|
9
|
+
const appTranslationsDir = path.resolve(process.cwd(), "src/translations");
|
|
10
|
+
const translations = {};
|
|
11
|
+
const filesToValidate = [];
|
|
12
|
+
const args = process.argv.slice(2);
|
|
13
|
+
|
|
14
|
+
if (args.length > 0) {
|
|
15
|
+
for (const arg of args) {
|
|
16
|
+
filesToValidate.push(path.basename(arg.replace("'", "").replace('"', "")).toLowerCase());
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async function validate() {
|
|
21
|
+
let exitCode = 0;
|
|
22
|
+
const files = await readdir(appTranslationsDir);
|
|
23
|
+
|
|
24
|
+
for (const file of files) {
|
|
25
|
+
if (!file.endsWith(".json")) {
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const appLocalizationFile = path.resolve(appTranslationsDir, file);
|
|
30
|
+
const appData = await readFile(appLocalizationFile, "utf8");
|
|
31
|
+
|
|
32
|
+
const appLocalizations = JSON.parse(appData);
|
|
33
|
+
translations[file.toLowerCase()] = Object.keys(appLocalizations);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
for (const language of Object.keys(translations)) {
|
|
37
|
+
const languageKeys = translations[language];
|
|
38
|
+
|
|
39
|
+
if (filesToValidate.length > 0 && !filesToValidate.includes(language)) {
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
for (const otherLanguage of Object.keys(translations).filter(x => x !== language)) {
|
|
44
|
+
const otherLanguageKeys = translations[otherLanguage];
|
|
45
|
+
const additionalTranslations = lodash.without(languageKeys, ...otherLanguageKeys);
|
|
46
|
+
|
|
47
|
+
if (additionalTranslations.length > 0) {
|
|
48
|
+
exitCode = -1;
|
|
49
|
+
console.warn(
|
|
50
|
+
"Validation exception found when comparing language " +
|
|
51
|
+
language +
|
|
52
|
+
" to " +
|
|
53
|
+
otherLanguage +
|
|
54
|
+
". Language " +
|
|
55
|
+
language +
|
|
56
|
+
" has the following additional translations:",
|
|
57
|
+
);
|
|
58
|
+
console.warn(additionalTranslations);
|
|
59
|
+
console.warn("");
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return exitCode;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
validate().then(returnCode => {
|
|
68
|
+
if (returnCode) {
|
|
69
|
+
console.log("Validation failed.");
|
|
70
|
+
}
|
|
71
|
+
process.exit(returnCode);
|
|
72
|
+
});
|
package/src/utils.js
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
const fs = require("fs");
|
|
2
|
+
const path = require("path");
|
|
3
|
+
const arrify = require("arrify");
|
|
4
|
+
const which = require("which");
|
|
5
|
+
const readPkgUp = require("read-pkg-up");
|
|
6
|
+
|
|
7
|
+
const hasOwn = (obj, key) => obj.hasOwnProperty(key) && obj[key] !== null && obj[key] !== undefined;
|
|
8
|
+
|
|
9
|
+
const hasPath = (obj, keys) => {
|
|
10
|
+
const [key, ...tail] = keys;
|
|
11
|
+
if (hasOwn(obj, key)) {
|
|
12
|
+
if (tail && tail.length) {
|
|
13
|
+
return hasPath(obj[key], tail);
|
|
14
|
+
} else {
|
|
15
|
+
return true;
|
|
16
|
+
}
|
|
17
|
+
} else {
|
|
18
|
+
return false;
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
const has = (obj, keypath) => hasPath(obj, keypath.split(/[./]/));
|
|
23
|
+
|
|
24
|
+
const { packageJson: pkg, path: pkgPath } =
|
|
25
|
+
readPkgUp.sync({
|
|
26
|
+
cwd: fs.realpathSync(process.cwd()),
|
|
27
|
+
}) || {};
|
|
28
|
+
|
|
29
|
+
const appDirectory = path.dirname(pkgPath);
|
|
30
|
+
|
|
31
|
+
const fromRoot = (...p) => path.join(appDirectory, ...p);
|
|
32
|
+
const hasFile = (...p) => fs.existsSync(fromRoot(...p));
|
|
33
|
+
|
|
34
|
+
const hasPkgProp = props => arrify(props).some(prop => has(pkg, prop));
|
|
35
|
+
|
|
36
|
+
const hasPkgSubProp = pkgProp => props => hasPkgProp(arrify(props).map(p => `${pkgProp}.${p}`));
|
|
37
|
+
|
|
38
|
+
const hasPeerDep = hasPkgSubProp("peerDependencies");
|
|
39
|
+
const hasDep = hasPkgSubProp("dependencies");
|
|
40
|
+
const hasDevDep = hasPkgSubProp("devDependencies");
|
|
41
|
+
const hasAnyDep = args => [hasDep, hasDevDep, hasPeerDep].some(fn => fn(args));
|
|
42
|
+
|
|
43
|
+
const ifAnyDep = (deps, t, f) => (hasAnyDep(arrify(deps)) ? t : f);
|
|
44
|
+
|
|
45
|
+
function parseEnv(name, def) {
|
|
46
|
+
if (envIsSet(name)) {
|
|
47
|
+
try {
|
|
48
|
+
return JSON.parse(process.env[name] || "<fail>");
|
|
49
|
+
} catch (err) {
|
|
50
|
+
return process.env[name];
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return def;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function envIsSet(name) {
|
|
57
|
+
return process.env.hasOwnProperty(name) && process.env[name] && process.env[name] !== "undefined";
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function resolveBin(modName, { executable = modName, cwd = process.cwd() } = {}) {
|
|
61
|
+
let pathFromWhich;
|
|
62
|
+
try {
|
|
63
|
+
pathFromWhich = fs.realpathSync(which.sync(executable));
|
|
64
|
+
} catch (_error) {
|
|
65
|
+
// ignore _error
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
const modPkgPath = require.resolve(path.join(modName, "package.json"));
|
|
69
|
+
const modPkgDir = path.dirname(modPkgPath);
|
|
70
|
+
const { bin } = require(modPkgPath);
|
|
71
|
+
const binPath = typeof bin === "string" ? bin : bin[executable];
|
|
72
|
+
const fullPathToBin = path.join(modPkgDir, binPath);
|
|
73
|
+
if (pathFromWhich.startsWith(cwd)) {
|
|
74
|
+
return pathFromWhich.replace(cwd, ".");
|
|
75
|
+
}
|
|
76
|
+
if (fullPathToBin === pathFromWhich) {
|
|
77
|
+
return executable;
|
|
78
|
+
}
|
|
79
|
+
return fullPathToBin.replace(cwd, ".");
|
|
80
|
+
} catch (error) {
|
|
81
|
+
if (pathFromWhich) {
|
|
82
|
+
return executable;
|
|
83
|
+
}
|
|
84
|
+
throw error;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
module.exports = {
|
|
89
|
+
fromRoot,
|
|
90
|
+
hasFile,
|
|
91
|
+
hasPkgProp,
|
|
92
|
+
ifAnyDep,
|
|
93
|
+
parseEnv,
|
|
94
|
+
resolveBin,
|
|
95
|
+
};
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
const { fromRoot, hasFile, hasPkgProp, ifAnyDep, parseEnv, resolveBin } = require("./utils");
|
|
2
|
+
const path = require("path");
|
|
3
|
+
|
|
4
|
+
describe("utils", () => {
|
|
5
|
+
describe("fromRoot", () => {
|
|
6
|
+
it("exists", () => expect(fromRoot, "to be a function"));
|
|
7
|
+
|
|
8
|
+
it("appends directory parts to the currently running application's root directory", () =>
|
|
9
|
+
expect(
|
|
10
|
+
fromRoot,
|
|
11
|
+
"when called with",
|
|
12
|
+
["test", "dir"],
|
|
13
|
+
"to equal",
|
|
14
|
+
process.cwd() + path.sep + "test" + path.sep + "dir",
|
|
15
|
+
));
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
describe("hasFile", () => {
|
|
19
|
+
it("exists", () => expect(hasFile, "to be a function"));
|
|
20
|
+
|
|
21
|
+
it("returns true when given file name that exists in CWD", () =>
|
|
22
|
+
expect(hasFile, "when called with", ["package.json"], "to equal", true));
|
|
23
|
+
|
|
24
|
+
it("returns false when given file name that does not exist in CWD", () =>
|
|
25
|
+
expect(hasFile, "when called with", ["foo.bar"], "to equal", false));
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
describe("hasPkgProp", () => {
|
|
29
|
+
it("exists", () => expect(hasPkgProp, "to be a function"));
|
|
30
|
+
|
|
31
|
+
it("returns true if local package.json has at least one of the given first-level keys", () =>
|
|
32
|
+
expect(hasPkgProp, "when called with", [["no", "lint-staged", "dont-have"]], "to equal", true));
|
|
33
|
+
|
|
34
|
+
it("returns false if local package.json does not have any of the given first-level keys", () =>
|
|
35
|
+
expect(hasPkgProp, "when called with", [["no-such-key", "nuh-uh", "nope-not-here"]], "to equal", false));
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
describe("ifAnyDep", () => {
|
|
39
|
+
it("exists", () => expect(ifAnyDep, "to be a function"));
|
|
40
|
+
|
|
41
|
+
it("returns the 'then' parameter if project has one of given dependencies", () =>
|
|
42
|
+
expect(
|
|
43
|
+
ifAnyDep,
|
|
44
|
+
"when called with",
|
|
45
|
+
[["wrong-dependency", "unexpected", "not-this-one"], "then param", "else param"],
|
|
46
|
+
"to be",
|
|
47
|
+
"then param",
|
|
48
|
+
));
|
|
49
|
+
|
|
50
|
+
it("returns the 'else' parameter if project has one of given dependencies", () =>
|
|
51
|
+
expect(
|
|
52
|
+
ifAnyDep,
|
|
53
|
+
"when called with",
|
|
54
|
+
[["wrong-dependency", "expect", "not-this-one"], "then param", "else param"],
|
|
55
|
+
"to be",
|
|
56
|
+
"else param",
|
|
57
|
+
));
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
describe("parseEnv", () => {
|
|
61
|
+
it("exists", () => expect(parseEnv, "to be a function"));
|
|
62
|
+
|
|
63
|
+
it("returns the environment variable given if it exists", () =>
|
|
64
|
+
expect(parseEnv, "when called with", ["BABEL_ENV", "wrong"], "to equal", "test"));
|
|
65
|
+
|
|
66
|
+
it("returns default value if it does not exist", () =>
|
|
67
|
+
expect(parseEnv, "when called with", ["NO_SUCH_variable", "wrong"], "to equal", "wrong"));
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
describe("resolveBin", () => {
|
|
71
|
+
it("exists", () => expect(resolveBin, "to be a function"));
|
|
72
|
+
|
|
73
|
+
it.skip("resolveBin resolves to the full path when it's not in $PATH", () => {
|
|
74
|
+
expect(
|
|
75
|
+
resolveBin("@babel/cli", { executable: "babel" }),
|
|
76
|
+
"to be",
|
|
77
|
+
require.resolve("@babel/cli/bin/babel").replace(process.cwd(), "."),
|
|
78
|
+
);
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it.skip("resolveBin resolves to the .bin path when it's in $PATH but local", () => {
|
|
82
|
+
expect(
|
|
83
|
+
resolveBin("@babel/cli", { executable: "babel" }),
|
|
84
|
+
"to start with",
|
|
85
|
+
require.resolve(".bin/babel").replace(process.cwd(), "."),
|
|
86
|
+
);
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
it.skip("resolveBin resolves to the binary if it's in $PATH", () => {
|
|
90
|
+
expect(resolveBin("node"), "to be", "node");
|
|
91
|
+
});
|
|
92
|
+
});
|
|
93
|
+
});
|
package/webpack.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require("./src/config/webpack.config.js");
|