@rpcbase/cli 0.37.0 → 0.39.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin.js +15 -3
- package/package.json +1 -1
- package/src/helpers/docker_runtime_check.js +0 -6
- package/src/helpers/get_run_configs.js +3 -7
- package/src/helpers/tailscale_tunnel.js +3 -2
- package/src/start_command.js +2 -4
- package/src/sync-dotenv/index.js +59 -71
- package/src/sync-dotenv/LICENSE +0 -22
package/bin.js
CHANGED
|
@@ -23,6 +23,11 @@ yargs(hideBin(process.argv))
|
|
|
23
23
|
default: false,
|
|
24
24
|
description: "Adds coverage instrumentation (always disabled in production)"
|
|
25
25
|
})
|
|
26
|
+
.option("docker", {
|
|
27
|
+
type: "boolean",
|
|
28
|
+
default: false,
|
|
29
|
+
description: "Run in docker"
|
|
30
|
+
})
|
|
26
31
|
.option("verbose", {
|
|
27
32
|
type: "boolean",
|
|
28
33
|
default: false,
|
|
@@ -34,7 +39,6 @@ yargs(hideBin(process.argv))
|
|
|
34
39
|
description: "Run with debug logging"
|
|
35
40
|
})
|
|
36
41
|
}, (argv) => {
|
|
37
|
-
console.log("IS start")
|
|
38
42
|
if (argv.version) return print_versions()
|
|
39
43
|
start_command(argv)
|
|
40
44
|
})
|
|
@@ -51,16 +55,24 @@ yargs(hideBin(process.argv))
|
|
|
51
55
|
await update()
|
|
52
56
|
})
|
|
53
57
|
// sync-dotenv
|
|
54
|
-
.command(
|
|
58
|
+
.command("sync-dotenv [source_env] [dest_env]", "Compare and sync dotenv files", (yargs) => {
|
|
55
59
|
yargs
|
|
56
60
|
.option("write", {
|
|
57
61
|
type: "boolean",
|
|
58
62
|
default: false,
|
|
59
63
|
description: "Write the ouput to dest env file"
|
|
60
64
|
})
|
|
65
|
+
.positional("source_env", {
|
|
66
|
+
describe: "source .env file",
|
|
67
|
+
type: "string"
|
|
68
|
+
})
|
|
69
|
+
.positional("dest_env", {
|
|
70
|
+
describe: "destination env file",
|
|
71
|
+
type: "string"
|
|
72
|
+
})
|
|
61
73
|
}, async(argv) => {
|
|
62
74
|
if (argv.version) return print_versions()
|
|
63
|
-
await sync_dotenv()
|
|
75
|
+
await sync_dotenv(argv.source_env, argv.dest_env)
|
|
64
76
|
})
|
|
65
77
|
// Ping
|
|
66
78
|
.command("ping", "Ping", () => {}, (argv) => {
|
package/package.json
CHANGED
|
@@ -3,14 +3,8 @@ const {execSync} = require("child_process")
|
|
|
3
3
|
const os = require("os")
|
|
4
4
|
const colors = require("picocolors")
|
|
5
5
|
|
|
6
|
-
const {CONTAINER_MODE} = process.env
|
|
7
6
|
|
|
8
7
|
const docker_runtime_check = () => {
|
|
9
|
-
// skip check on osx except when forcing docker
|
|
10
|
-
if (os.platform() === "darwin" && CONTAINER_MODE !== "docker") {
|
|
11
|
-
return
|
|
12
|
-
}
|
|
13
|
-
|
|
14
8
|
try {
|
|
15
9
|
execSync("docker version -f json")
|
|
16
10
|
} catch (err) {
|
|
@@ -5,13 +5,8 @@ const fs = require("fs")
|
|
|
5
5
|
const ERRORS = require("../errors")
|
|
6
6
|
const exit_with_message = require("./exit_with_message")
|
|
7
7
|
|
|
8
|
-
const {CONTAINER_MODE} = process.env
|
|
9
|
-
|
|
10
8
|
// run type is client | server | both
|
|
11
|
-
const get_run_configs = () => {
|
|
12
|
-
if (!["docker", "native"].includes(CONTAINER_MODE.trim()))
|
|
13
|
-
throw new Error("expected CONTAINER_MODE to be native|docker")
|
|
14
|
-
|
|
9
|
+
const get_run_configs = (args) => {
|
|
15
10
|
const cwd = process.cwd()
|
|
16
11
|
|
|
17
12
|
// check if running parent of rb project
|
|
@@ -62,8 +57,9 @@ const get_run_configs = () => {
|
|
|
62
57
|
|
|
63
58
|
if (run_configs) {
|
|
64
59
|
// check if there is server we add agent but in native mode only
|
|
60
|
+
// in docker mode, the agent is a container
|
|
65
61
|
const server = run_configs.find((rc) => rc.type === "server")
|
|
66
|
-
if (server &&
|
|
62
|
+
if (server && !args.docker) {
|
|
67
63
|
run_configs.push({
|
|
68
64
|
type: "agent",
|
|
69
65
|
working_dir: server.working_dir,
|
|
@@ -35,7 +35,7 @@ const start = (run_configs) => {
|
|
|
35
35
|
execSync(`${TAILSCALE_PATH} serve https:443 / http://127.0.0.1:${server_port}`, {stdio: "inherit"})
|
|
36
36
|
execSync(`${TAILSCALE_PATH} funnel 443 on`, {stdio: "inherit"})
|
|
37
37
|
} catch (err) {
|
|
38
|
-
console.log("
|
|
38
|
+
console.log("error starting tailscale, is the tailscale app running ?")
|
|
39
39
|
}
|
|
40
40
|
}
|
|
41
41
|
|
|
@@ -49,7 +49,8 @@ const stop = (run_configs) => {
|
|
|
49
49
|
execSync(`${TAILSCALE_PATH} serve https:443 / http://127.0.0.1:${server_port} off`, {stdio: "inherit"})
|
|
50
50
|
execSync(`${TAILSCALE_PATH} funnel 443 off`, {stdio: "inherit"})
|
|
51
51
|
} catch (err) {
|
|
52
|
-
console.log("TAILSCALE ERR", err)
|
|
52
|
+
// console.log("TAILSCALE ERR", err)
|
|
53
|
+
console.log("error stopping tailscale, it probably wasn't initally running. you can probably ignore it")
|
|
53
54
|
}
|
|
54
55
|
}
|
|
55
56
|
|
package/src/start_command.js
CHANGED
|
@@ -18,7 +18,7 @@ const verify_project = require("./verify_project")
|
|
|
18
18
|
// server native + docker + docker-native is way too complex
|
|
19
19
|
// either everything is native, or everything is in docker
|
|
20
20
|
const start_command = async(args) => {
|
|
21
|
-
const run_configs = get_run_configs()
|
|
21
|
+
const run_configs = get_run_configs(args)
|
|
22
22
|
|
|
23
23
|
const run_commands = []
|
|
24
24
|
|
|
@@ -49,7 +49,6 @@ const start_command = async(args) => {
|
|
|
49
49
|
command = `${command} ${hideBin(process.argv).join(" ")}`
|
|
50
50
|
}
|
|
51
51
|
|
|
52
|
-
|
|
53
52
|
// add command
|
|
54
53
|
run_commands.push({
|
|
55
54
|
name,
|
|
@@ -62,9 +61,8 @@ const start_command = async(args) => {
|
|
|
62
61
|
|
|
63
62
|
const has_server = run_configs.findIndex((c) => c.type === "server") > -1
|
|
64
63
|
|
|
65
|
-
// TODO: we shouldnt check in native mode
|
|
66
64
|
// check if docker is running
|
|
67
|
-
if (
|
|
65
|
+
if (args.docker) {
|
|
68
66
|
docker_runtime_check()
|
|
69
67
|
}
|
|
70
68
|
|
package/src/sync-dotenv/index.js
CHANGED
|
@@ -1,137 +1,125 @@
|
|
|
1
|
+
/* @flow */
|
|
1
2
|
const path = require("path")
|
|
2
3
|
const fs = require("fs")
|
|
3
4
|
const os = require('os')
|
|
4
|
-
const
|
|
5
|
-
|
|
5
|
+
const parse_env = require("parse-dotenv")
|
|
6
|
+
|
|
6
7
|
|
|
7
8
|
const DEFAULT_ENV_PATH = path.resolve(process.cwd(), ".env")
|
|
8
|
-
const
|
|
9
|
+
const SAMPLE_ENV_SUFFIX = ".sample"
|
|
10
|
+
const COMMENT_PREFIX = "__COMMENT_"
|
|
9
11
|
|
|
10
|
-
const
|
|
12
|
+
const env_to_string = (parsed) =>
|
|
11
13
|
Object.keys(parsed)
|
|
12
14
|
.map(key => `${key}=${parsed[key] || ""}`)
|
|
13
15
|
.join(os.EOL)
|
|
14
16
|
.replace(/(__\w+_\d+__=)/g, "")
|
|
15
17
|
|
|
16
|
-
|
|
18
|
+
|
|
19
|
+
const write_to_dest_env = (path, parsed_env) => {
|
|
17
20
|
try {
|
|
18
|
-
fs.writeFileSync(path,
|
|
21
|
+
fs.writeFileSync(path, env_to_string(parsed_env))
|
|
19
22
|
} catch (e) {
|
|
20
23
|
throw new Error(`Sync failed. ${e.message}`)
|
|
21
24
|
}
|
|
22
25
|
}
|
|
23
26
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
27
|
+
|
|
28
|
+
const get_empty_obj_props = (obj) => {
|
|
29
|
+
|
|
30
|
+
const obj_copy = { ...obj }
|
|
31
|
+
|
|
32
|
+
Object.keys(obj_copy).forEach(key => {
|
|
33
|
+
if (obj_copy[key].includes("#")) {
|
|
34
|
+
if (obj_copy[key].match(/(".*"|'.*')/g)) {
|
|
35
|
+
const objArr = obj_copy[key].split(/(".*"|'.*')/)
|
|
36
|
+
obj_copy[key] = objArr.slice(-1)[0].trim()
|
|
31
37
|
} else {
|
|
32
|
-
const objArr =
|
|
33
|
-
|
|
38
|
+
const objArr = obj_copy[key].split("#")
|
|
39
|
+
obj_copy[key] = `#${objArr.slice(-1)[0]}`
|
|
34
40
|
}
|
|
35
41
|
|
|
36
42
|
return
|
|
37
43
|
}
|
|
38
44
|
|
|
39
|
-
if (!key.startsWith(
|
|
40
|
-
|
|
45
|
+
if (!key.startsWith(COMMENT_PREFIX)) {
|
|
46
|
+
obj_copy[key] = ""
|
|
41
47
|
}
|
|
42
48
|
})
|
|
43
49
|
|
|
44
|
-
return
|
|
50
|
+
return obj_copy
|
|
45
51
|
}
|
|
46
52
|
|
|
47
|
-
const
|
|
53
|
+
const get_unique_vars_from_envs = (
|
|
48
54
|
env,
|
|
49
|
-
|
|
55
|
+
env_example,
|
|
50
56
|
config = {}
|
|
51
57
|
) => {
|
|
52
|
-
const
|
|
53
|
-
|
|
54
|
-
const uniqueKeys = new Set(Object.keys(env))
|
|
55
|
-
const uniqueKeysArray = Array.from(uniqueKeys)
|
|
58
|
+
const unique_keys = new Set(Object.keys(env))
|
|
59
|
+
const unique_keys_list = Array.from(unique_keys)
|
|
56
60
|
|
|
57
|
-
const
|
|
58
|
-
if (key.startsWith(
|
|
59
|
-
return { [key]:
|
|
61
|
+
const unique_from_source = unique_keys_list.map((key) => {
|
|
62
|
+
if (key.startsWith(COMMENT_PREFIX)) return { [key]: env[key] }
|
|
63
|
+
return { [key]: env_example[key] || "" }
|
|
60
64
|
})
|
|
61
65
|
|
|
62
|
-
const
|
|
63
|
-
.map(key => ({ [key]:
|
|
66
|
+
const preseved_vars = Object.keys(env_example)
|
|
67
|
+
.map(key => ({ [key]: env_example[key] }))
|
|
64
68
|
// .filter(env => {
|
|
65
69
|
// console.log("ICICIC", env)
|
|
66
70
|
// return ignoreKeys.length && ignoreKeys.includes(Object.keys(env)[0])
|
|
67
71
|
// })
|
|
68
72
|
|
|
69
|
-
return [...
|
|
73
|
+
return [...unique_from_source, ...preseved_vars]
|
|
70
74
|
}
|
|
71
75
|
|
|
72
76
|
|
|
73
|
-
const
|
|
74
|
-
|
|
75
|
-
|
|
77
|
+
const sync_with_dest_env = (
|
|
78
|
+
env_path,
|
|
79
|
+
env_example_path
|
|
76
80
|
) => {
|
|
77
81
|
const config = { emptyLines: true, comments: true }
|
|
78
82
|
|
|
79
|
-
const
|
|
80
|
-
|
|
83
|
+
const source_env = get_empty_obj_props(
|
|
84
|
+
parse_env(env_path, config)
|
|
81
85
|
)
|
|
82
|
-
const targetEnv = parseEnv(envExamplePath)
|
|
83
86
|
|
|
84
|
-
const
|
|
85
|
-
|
|
86
|
-
|
|
87
|
+
const target_env = parse_env(env_example_path)
|
|
88
|
+
|
|
89
|
+
const unique_vars = get_unique_vars_from_envs(source_env, target_env, config)
|
|
90
|
+
const env_copy = {}
|
|
91
|
+
|
|
92
|
+
unique_vars.forEach(env => {
|
|
87
93
|
const [key] = Object.keys(env)
|
|
88
|
-
|
|
94
|
+
env_copy[key] = env[key]
|
|
89
95
|
})
|
|
90
96
|
|
|
91
|
-
|
|
97
|
+
write_to_dest_env(env_example_path, env_copy)
|
|
92
98
|
}
|
|
93
99
|
|
|
94
100
|
|
|
95
101
|
const sync_dotenv = (
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
samples
|
|
102
|
+
env_path,
|
|
103
|
+
_dest_env,
|
|
99
104
|
) => {
|
|
100
|
-
if (
|
|
101
|
-
throw new Error(
|
|
105
|
+
if (env_path && !fs.existsSync(env_path)) {
|
|
106
|
+
throw new Error(`env_path: ${env_path} doesn't exist`)
|
|
102
107
|
}
|
|
103
108
|
|
|
104
|
-
const
|
|
105
|
-
? [path.resolve(process.cwd(), sample_env || DEFAULT_SAMPLE_ENV)]
|
|
106
|
-
: globby
|
|
107
|
-
.sync(samples)
|
|
108
|
-
.map((sample) => path.resolve(process.cwd(), sample))
|
|
109
|
-
|
|
110
|
-
const envPath = source
|
|
111
|
-
? fs.existsSync(source)
|
|
112
|
-
? source
|
|
113
|
-
: null
|
|
114
|
-
: DEFAULT_ENV_PATH
|
|
115
|
-
|
|
116
|
-
if (envPath === null) throw new Error(`${source} not found`)
|
|
117
|
-
|
|
118
|
-
if (!source && !fs.existsSync(envPath)) throw new Error(".env doesn't exists")
|
|
119
|
-
|
|
120
|
-
if (!sample_env_paths.length)
|
|
121
|
-
throw new Error(`${samples} did not match any file`)
|
|
109
|
+
const source_path = env_path || DEFAULT_ENV_PATH
|
|
122
110
|
|
|
123
|
-
if (!fs.existsSync(
|
|
124
|
-
|
|
125
|
-
// throw new Error(`${sample_env || path.basename(DEFAULT_SAMPLE_ENV)} not found`)
|
|
111
|
+
if (!fs.existsSync(source_path)) {
|
|
112
|
+
throw new Error(`${env_path || ".env"} file not found`)
|
|
126
113
|
}
|
|
127
114
|
|
|
128
|
-
const
|
|
115
|
+
const dest_env_path = _dest_env || `${source_path}${SAMPLE_ENV_SUFFIX}`
|
|
129
116
|
|
|
130
|
-
|
|
131
|
-
|
|
117
|
+
// creates the sample env file if it doesn't exist
|
|
118
|
+
if (!fs.existsSync(dest_env_path)) {
|
|
119
|
+
fs.writeFileSync(dest_env_path, "")
|
|
132
120
|
}
|
|
133
121
|
|
|
134
|
-
|
|
122
|
+
sync_with_dest_env(source_path, dest_env_path)
|
|
135
123
|
}
|
|
136
124
|
|
|
137
125
|
|
package/src/sync-dotenv/LICENSE
DELETED
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
MIT License
|
|
2
|
-
|
|
3
|
-
Copyright (c) 2019 Luqman Olushi O.
|
|
4
|
-
Copyright (c) 2023 rpcbase
|
|
5
|
-
|
|
6
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
8
|
-
in the Software without restriction, including without limitation the rights
|
|
9
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
11
|
-
furnished to do so, subject to the following conditions:
|
|
12
|
-
|
|
13
|
-
The above copyright notice and this permission notice shall be included in all
|
|
14
|
-
copies or substantial portions of the Software.
|
|
15
|
-
|
|
16
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
-
SOFTWARE.
|