@rpcbase/cli 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin.js ADDED
@@ -0,0 +1,34 @@
1
+ #!/usr/bin/env node
2
+ /* @flow */
3
+ const path = require("path")
4
+ require("dotenv").config({path: path.join(__dirname, "./.env")})
5
+ const yargs = require("yargs/yargs")
6
+ const {hideBin} = require("yargs/helpers")
7
+
8
+ const default_command = require("./src/default_command")
9
+ const run_client = require("./src/run_client")
10
+ const run_server = require("./src/run_server")
11
+ const update = require("./src/update")
12
+
13
+ let is_command = false
14
+
15
+ const args = yargs(hideBin(process.argv))
16
+ .command(["update", "up"], "Update rb dependencies to latest version", () => {}, async(argv) => {
17
+ is_command = true
18
+ await update()
19
+ })
20
+ .command("ping", "Ping", () => {}, (argv) => {
21
+ is_command = true
22
+ console.log("PING")
23
+ })
24
+ // .option("version", {
25
+ // alias: "v",
26
+ // type: "boolean",
27
+ // default: false,
28
+ // // description: "Displays CLI version"
29
+ // })
30
+ .parse()
31
+
32
+ if (!is_command) {
33
+ default_command(args)
34
+ }
package/package.json ADDED
@@ -0,0 +1,20 @@
1
+ {
2
+ "name": "@rpcbase/cli",
3
+ "version": "0.3.0",
4
+ "license": "SSPL-1.0",
5
+ "bin": {
6
+ "rb": "./bin.js"
7
+ },
8
+ "scripts": {
9
+ "test": "echo \"no test specified exiting with code 0\" && exit 0"
10
+ },
11
+ "dependencies": {
12
+ "bluebird": "3.7.2",
13
+ "chalk": "4.1.2",
14
+ "concurrently": "7.0.0",
15
+ "dotenv": "16.0.0",
16
+ "semver": "7.3.5",
17
+ "validator": "13.7.0",
18
+ "yargs": "17.4.0"
19
+ }
20
+ }
@@ -0,0 +1,3 @@
1
+ /* @flow */
2
+
3
+ // check project folder layout
@@ -0,0 +1,75 @@
1
+ /* @flow */
2
+ const path = require("path")
3
+ const fs = require("fs")
4
+ const chalk = require("chalk")
5
+ const concurrently = require("concurrently")
6
+
7
+ const exit_with_message = require("./helpers/exit_with_message")
8
+ const get_run_configs = require("./helpers/get_run_configs")
9
+ const docker_runtime_check = require("./helpers/docker_runtime_check")
10
+ const verify_project = require("./verify_project")
11
+
12
+
13
+ const default_command = async(args) => {
14
+ const run_configs = get_run_configs()
15
+
16
+ const run_commands = run_configs.map((cfg) => {
17
+ let name
18
+ let command
19
+ let prefixColor
20
+ if (cfg.type === "client") {
21
+ name = "client"
22
+ // command = "yarn dev"
23
+ command = "./node_modules/.bin/rb-webpack-browser dev"
24
+ prefixColor = "#C678DD"
25
+ } else if (cfg.type === "server") {
26
+ name = "server"
27
+ command = "./node_modules/.bin/rb-server start"
28
+ prefixColor = "#61AFEF"
29
+ }
30
+
31
+ // command, name, prefixColor, env, cwd
32
+ return {
33
+ name,
34
+ command,
35
+ prefixColor,
36
+ cwd: cfg.working_dir
37
+ }
38
+ })
39
+
40
+ if (run_configs.filter((c) => c.type === "server").length > 0) {
41
+ docker_runtime_check()
42
+ }
43
+
44
+ let project_errors
45
+
46
+ const {result, commands} = concurrently(run_commands, {
47
+ killOthers: ["success", "failure"],
48
+ handleInput: true,
49
+ defaultInputTarget: "server",
50
+ })
51
+
52
+ result
53
+ .then(() => {
54
+ console.log("Done.")
55
+ })
56
+ .catch((err) => {
57
+ if (project_errors?.length > 0) {
58
+ console.log(`\n${chalk.bold("exited due to configuration errors:")}`)
59
+ project_errors.forEach((err) => {
60
+ console.log(err)
61
+ })
62
+ }
63
+ })
64
+
65
+ // asynchronously verify project config and kill process if any error
66
+ project_errors = await verify_project(run_configs)
67
+
68
+ if (project_errors.length > 0) {
69
+ commands.forEach((c) => {
70
+ c.kill()
71
+ })
72
+ }
73
+ }
74
+
75
+ module.exports = default_command
package/src/errors.js ADDED
@@ -0,0 +1,8 @@
1
+ /* @flow */
2
+ const chalk = require("chalk")
3
+
4
+ const ERRORS = {
5
+ INVALID_CWD: `expected to run in ${chalk.cyan("client/")} or ${chalk.cyan("server/server/")} or project root directory`
6
+ }
7
+
8
+ module.exports = ERRORS
@@ -0,0 +1,14 @@
1
+ /* @flow */
2
+ const {execSync} = require("child_process")
3
+ const chalk = require("chalk")
4
+
5
+ const docker_runtime_check = () => {
6
+ try {
7
+ execSync("docker version -f json")
8
+ } catch (err) {
9
+ console.log(chalk.cyan.bold("docker"), "exited with error")
10
+ process.exit(1)
11
+ }
12
+ }
13
+
14
+ module.exports = docker_runtime_check
@@ -0,0 +1,12 @@
1
+ /* @flow */
2
+ const chalk = require("chalk")
3
+
4
+ const exit_with_message = (message = "", code = 1) => {
5
+ console.log(chalk.red("error:"), message)
6
+
7
+ // TODO: sentry(?) report error
8
+
9
+ process.exit(code)
10
+ }
11
+
12
+ module.exports = exit_with_message
@@ -0,0 +1,57 @@
1
+ /* @flow */
2
+ const path = require("path")
3
+ const fs = require("fs")
4
+
5
+ const ERRORS = require("../errors")
6
+ const exit_with_message = require("./exit_with_message")
7
+
8
+
9
+ // run type is client | server | both
10
+ const get_run_configs = () => {
11
+ const cwd = process.cwd()
12
+
13
+ // check if running parent of rb project
14
+ const dir_contents = fs.readdirSync(cwd)
15
+ const is_parent_dir = dir_contents.includes("client") && dir_contents.includes("server")
16
+
17
+ // check if rb is running in client or server
18
+ const dir = path.basename(cwd)
19
+ const is_rb_dir = ["client", "server"].includes(dir)
20
+
21
+ if (!is_parent_dir && !is_rb_dir) {
22
+ exit_with_message(ERRORS.INVALID_CWD)
23
+ return
24
+ }
25
+
26
+ if (is_parent_dir) {
27
+ return [
28
+ {
29
+ type: "client",
30
+ working_dir: path.join(cwd, "./client")
31
+ },
32
+ {
33
+ type: "server",
34
+ working_dir: path.join(cwd, "./server/server")
35
+ },
36
+ ]
37
+ } else {
38
+ // client
39
+ if (dir === "client") {
40
+ return [{
41
+ type: "client",
42
+ working_dir: cwd
43
+ }]
44
+ // check if parent dir is also server, because we are in server/server
45
+ } else if (dir === "server" && path.basename(path.dirname(cwd)) === "server") {
46
+ return [{
47
+ type: "server",
48
+ working_dir: cwd
49
+ }]
50
+ }
51
+ }
52
+
53
+ // fails when no match found
54
+ exit_with_message(ERRORS.INVALID_CWD)
55
+ }
56
+
57
+ module.exports = get_run_configs
@@ -0,0 +1,5 @@
1
+ /* @flow */
2
+ const path = require("path")
3
+ const fs = require("fs")
4
+
5
+ // console.log("runclient")
File without changes
@@ -0,0 +1,71 @@
1
+ /* @flow */
2
+ const fs = require("fs")
3
+ const path = require("path")
4
+ const util = require("util")
5
+ const {execSync} = require("child_process")
6
+
7
+ const exec = util.promisify(require("child_process").exec)
8
+
9
+
10
+ const get_package_files = () => {
11
+ const working_dir = process.cwd()
12
+ const out = execSync(`find . -name "package.json" -not -path ".*/node_modules/*"`)
13
+ const files = out.toString().trim().split("\n")
14
+ .map((f) => path.join(working_dir, f))
15
+ return files
16
+ }
17
+
18
+ const get_outdated = async(dir) => {
19
+
20
+ const output = execSync("npm outdated --json || true", {cwd: dir})
21
+ const outdated = JSON.parse(output.toString())
22
+
23
+ // WARNING: we only filter rpcbase deps for now
24
+ const target_deps = {}
25
+ Object.keys(outdated)
26
+ .filter((name) => name.startsWith("@rpcbase"))
27
+ .forEach((key) => {
28
+ target_deps[key] = outdated[key].latest
29
+ })
30
+
31
+ return target_deps
32
+ }
33
+
34
+
35
+ const update = async() => {
36
+ const working_dir = process.cwd()
37
+
38
+ const package_files = get_package_files()
39
+
40
+ for (let i = 0; i < package_files.length; i++) {
41
+ const package_file = package_files[i]
42
+ const dir = path.dirname(package_file)
43
+
44
+ console.log("updating", path.relative(working_dir, dir) || ".")
45
+
46
+ const outdated = await get_outdated(dir)
47
+
48
+ const package_json = JSON.parse(fs.readFileSync(package_file, "utf8"))
49
+
50
+ let has_updates = false
51
+
52
+ Object.keys(outdated).forEach((k) => {
53
+ if (package_json.dependencies[k]) {
54
+ package_json.dependencies[k] = outdated[k]
55
+ console.log("updated", k, "to", outdated[k])
56
+ has_updates = true
57
+ }
58
+ })
59
+
60
+ if (has_updates) {
61
+ fs.writeFileSync(package_file, `${JSON.stringify(package_json, null, 2)}\n`)
62
+ execSync("yarn", {cwd: dir, stdio: "inherit"})
63
+ }
64
+ }
65
+
66
+
67
+
68
+
69
+ }
70
+
71
+ module.exports = update
@@ -0,0 +1,59 @@
1
+ /* @flow */
2
+ const path = require("path")
3
+ const _ = require("lodash")
4
+ const chalk = require("chalk")
5
+ const Promise = require("bluebird")
6
+
7
+ const verify_packages_installed = require("./verify_packages_installed")
8
+ const verify_env = require("./verify_env")
9
+
10
+ const get_root_dir = (run_configs) => {
11
+ let project_root_dir
12
+ if (run_configs[0].type === "server") {
13
+ project_root_dir = path.join(run_configs[0].working_dir, "../../")
14
+ } else if (run_configs[0].type === "client") {
15
+ project_root_dir = path.join(run_configs[0].working_dir, "../")
16
+ }
17
+
18
+ if (!project_root_dir) {
19
+ throw new Error("unable to find project root dir")
20
+ }
21
+
22
+ return project_root_dir
23
+ }
24
+
25
+
26
+ const verify_project = async(run_configs) => {
27
+ let result_errors = []
28
+
29
+ const project_root_dir = get_root_dir(run_configs)
30
+
31
+ // check if all installed packages match semver in package json file
32
+ const deps_errors = _.flatten(
33
+ await Promise.map(run_configs.map(({working_dir}) => working_dir), verify_packages_installed(project_root_dir))
34
+ )
35
+
36
+ if (deps_errors.length > 0) {
37
+ let error_str = `${chalk.bold.red("error:")} the following packages are missing from your ${chalk.bold("node_modules")}\n`
38
+ deps_errors.forEach((err) => {
39
+ error_str += ` ${err}\n`
40
+ })
41
+ error_str += `\n run ${chalk.bold("yarn install")} to ensure dependencies are installed\n`
42
+ result_errors.push(error_str)
43
+ }
44
+
45
+ // get env file from server and validate required services and ports
46
+ const env_errors = await verify_env(project_root_dir)
47
+ if (env_errors.length > 0) {
48
+ let error_str = `${chalk.bold.red("error:")} ${chalk.bold(".env")} misconfigurations\n`
49
+ env_errors.forEach((err) => {
50
+ error_str += ` ${err}\n`
51
+ })
52
+ result_errors.push(error_str)
53
+ }
54
+
55
+ return result_errors
56
+ }
57
+
58
+
59
+ module.exports = verify_project
@@ -0,0 +1,83 @@
1
+ /* @flow */
2
+ const fs = require("fs")
3
+ const path = require("path")
4
+ const chalk = require("chalk")
5
+ const dotenv = require("dotenv")
6
+ const validator = require("validator")
7
+
8
+
9
+ const validate_port = (val) => {
10
+ if (!validator.isPort(val)) {
11
+ return `expected ${chalk.bold(val)} to be a valid port number`
12
+ }
13
+ }
14
+
15
+ const SERVICES = [
16
+ {
17
+ name: "client",
18
+ required_env: {
19
+ "CLIENT_PORT": validate_port
20
+ },
21
+ },
22
+ {
23
+ name: "server",
24
+ required_env: {
25
+ "SERVER_PORT": validate_port
26
+ },
27
+ },
28
+ {
29
+ name: "database0",
30
+ required_env: {
31
+ "DATABASE_PORT_0": validate_port,
32
+ "DATABASE_NAME": (val) => {
33
+ if (!/^[$A-Z_][0-9A-Z_$-]*$/i.test(val)) {
34
+ return `expected ${chalk.bold(`'${val}'`)} to be a valid database name`
35
+ }
36
+ }
37
+ },
38
+ },
39
+ {
40
+ name: "database1",
41
+ required_env: {
42
+ "DATABASE_PORT_1": validate_port
43
+ },
44
+ },
45
+ {
46
+ name: "database2",
47
+ required_env: {
48
+ "DATABASE_PORT_2": validate_port
49
+ },
50
+ },
51
+ ]
52
+
53
+
54
+ // TODO: service definitions should be validated in docker compose as well
55
+ const verify_env = async(root_dir) => {
56
+ const env_path = path.join(root_dir, "./server/server/.env")
57
+ const env_buf = fs.readFileSync(env_path)
58
+ const parsed_env = dotenv.parse(env_buf)
59
+
60
+ const result_errors = []
61
+
62
+ SERVICES.forEach((service) => {
63
+ Object.keys(service.required_env).forEach((k) => {
64
+ const val_fn = service.required_env[k]
65
+ const env_val = parsed_env[k]
66
+
67
+ if (!env_val) {
68
+ result_errors.push(`${chalk.yellow.bold(k)} is missing`)
69
+ return
70
+ }
71
+
72
+ const err = val_fn(env_val)
73
+ if (err) {
74
+ result_errors.push(`${chalk.yellow.bold(k)} ${err}`)
75
+ }
76
+ })
77
+ })
78
+
79
+ return result_errors
80
+ }
81
+
82
+
83
+ module.exports = verify_env
@@ -0,0 +1,47 @@
1
+ /* @flow */
2
+ const path = require("path")
3
+ const fs = require("fs")
4
+ const util = require("util")
5
+ const chalk = require("chalk")
6
+ const semverSatisfies = require("semver/functions/satisfies")
7
+
8
+ const exec = util.promisify(require("child_process").exec)
9
+
10
+ const LIST_CMD = "npm ls --depth=0 --json || true"
11
+
12
+ const get_installed_pkgs = async(wd) => {
13
+ const {stdout: npm_out} = await exec(LIST_CMD, {cwd: wd})
14
+ const npm_json = JSON.parse(npm_out)
15
+ const deps = npm_json.dependencies || {}
16
+
17
+ const result = {}
18
+ Object.keys(deps).forEach((k) => {
19
+ result[k] = deps[k].version
20
+ })
21
+
22
+ return result
23
+ }
24
+
25
+
26
+ // check that packages in node_modules match package.json with semver
27
+ const verify_packages_installed = (root_dir) => async(wd) => {
28
+ const rel_dir = path.relative(root_dir, wd)
29
+
30
+ const pack_path = path.join(wd, "./package.json")
31
+ const pack = JSON.parse(fs.readFileSync(pack_path))
32
+ const deps = pack.dependencies || {}
33
+
34
+ const installed = await get_installed_pkgs(wd)
35
+
36
+ const result_errors = []
37
+
38
+ Object.keys(deps).forEach((k) => {
39
+ if (!installed[k]) {
40
+ result_errors.push(`${chalk.yellow.bold(k)} is missing in ${chalk.bold(rel_dir + "/")}`)
41
+ }
42
+ })
43
+
44
+ return result_errors
45
+ }
46
+
47
+ module.exports = verify_packages_installed
@@ -0,0 +1 @@
1
+ /* @flow */