@rpcbase/cli 0.72.0 → 0.74.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +13 -46
- package/src/cmd-deploy.js +137 -0
- package/src/{wait-for → cmd-wait-for}/index.js +6 -7
- package/src/{wait-for → cmd-wait-for}/wait-for.sh +1 -1
- package/src/index.js +71 -0
- package/src/log.js +0 -0
- package/bin.js +0 -129
- package/src/helpers/docker_runtime_check.js +0 -42
- package/src/helpers/exit_with_message.js +0 -12
- package/src/helpers/get_root_dir.js +0 -19
- package/src/helpers/tailscale_tunnel.js +0 -78
- package/src/lint-staged.js +0 -17
- package/src/lint.js +0 -17
- package/src/print_versions.js +0 -15
- package/src/run_agent.js +0 -17
- package/src/start_command.js +0 -61
- package/src/stylelint.js +0 -28
- package/src/sync-dotenv/index.js +0 -126
- package/src/update/index.js +0 -87
- package/src/verify_project/index.js +0 -34
- package/src/verify_project/thresholds.schema.json +0 -28
- package/src/verify_project/verify_env.js +0 -88
- package/src/versions/helpers/sanitize_str.js +0 -5
- package/src/versions/increment-pkg/get_gh_client.js +0 -26
- package/src/versions/increment-pkg/get_gh_token.js +0 -30
- package/src/versions/increment-pkg/index.js +0 -137
- package/stylelint/index.js +0 -15
package/src/start_command.js
DELETED
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
// const debug = require("debug")("rb-cli")
|
|
3
|
-
const path = require("path")
|
|
4
|
-
const colors = require("picocolors")
|
|
5
|
-
const {spawn} = require("child_process")
|
|
6
|
-
|
|
7
|
-
const {hideBin} = require("yargs/helpers")
|
|
8
|
-
|
|
9
|
-
const docker_runtime_check = require("./helpers/docker_runtime_check")
|
|
10
|
-
const exit_with_message = require("./helpers/exit_with_message")
|
|
11
|
-
const tailscale_tunnel = require("./helpers/tailscale_tunnel")
|
|
12
|
-
const verify_project = require("./verify_project")
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
const start_command = async(args) => {
|
|
16
|
-
// check if docker is installed and running, and create the network if necessary
|
|
17
|
-
docker_runtime_check()
|
|
18
|
-
|
|
19
|
-
// start the tailscale tunnel
|
|
20
|
-
console.log("Warning: tmp disabled tailscale tunnels")
|
|
21
|
-
// tailscale_tunnel.start(run_configs)
|
|
22
|
-
|
|
23
|
-
const cwd = process.cwd()
|
|
24
|
-
|
|
25
|
-
const working_dir = path.join(cwd, "./server/server")
|
|
26
|
-
const command = "./node_modules/.bin/bundler-server"
|
|
27
|
-
|
|
28
|
-
const ps = spawn(command, ["dev", ...hideBin(process.argv)], {cwd: working_dir, stdio: "inherit", shell: true})
|
|
29
|
-
|
|
30
|
-
ps.on("close", (code) => {
|
|
31
|
-
// tailscale_tunnel.stop(run_configs)
|
|
32
|
-
console.log("Done.")
|
|
33
|
-
})
|
|
34
|
-
|
|
35
|
-
// let project_errors
|
|
36
|
-
// result
|
|
37
|
-
// .then(() => {
|
|
38
|
-
// console.log("Done.")
|
|
39
|
-
// })
|
|
40
|
-
// .catch((err) => {
|
|
41
|
-
// if (project_errors?.length > 0) {
|
|
42
|
-
// console.log(`\n${colors.bold("exited due to configuration errors:")}`)
|
|
43
|
-
// project_errors.forEach((err) => {
|
|
44
|
-
// console.log(err)
|
|
45
|
-
// })
|
|
46
|
-
// }
|
|
47
|
-
// })
|
|
48
|
-
|
|
49
|
-
// project_errors = await verify_project(configs)
|
|
50
|
-
|
|
51
|
-
// if (project_errors.length > 0) {
|
|
52
|
-
// commands.forEach((c) => {
|
|
53
|
-
// c.kill()
|
|
54
|
-
// })
|
|
55
|
-
// }
|
|
56
|
-
|
|
57
|
-
// server cmd to stop tailscale serve
|
|
58
|
-
// const server_command = commands.find((c) => c.name === "server")
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
module.exports = start_command
|
package/src/stylelint.js
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
const path = require("path")
|
|
2
|
-
const fs = require("fs")
|
|
3
|
-
const {spawn} = require("child_process")
|
|
4
|
-
|
|
5
|
-
const {hideBin} = require("yargs/helpers")
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
module.exports = () => {
|
|
9
|
-
const args = hideBin(process.argv).slice(1) // remove first arg
|
|
10
|
-
|
|
11
|
-
const config_path = path.join(process.cwd(), "./.stylelintrc.js")
|
|
12
|
-
const has_config = fs.existsSync(config_path)
|
|
13
|
-
|
|
14
|
-
if (!has_config) {
|
|
15
|
-
console.error("cannot find .stylelintrc.js config file, will exit without failure")
|
|
16
|
-
process.exit(0)
|
|
17
|
-
return
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
const stylelint_path = path.dirname(require.resolve("stylelint/package.json"))
|
|
21
|
-
const bin_path = path.join(stylelint_path, "./bin/stylelint.mjs")
|
|
22
|
-
|
|
23
|
-
const ps = spawn(bin_path, ["--config", config_path, ...args], {stdio: "inherit"})
|
|
24
|
-
|
|
25
|
-
ps.on("close", (code) => {
|
|
26
|
-
process.exit(code)
|
|
27
|
-
})
|
|
28
|
-
}
|
package/src/sync-dotenv/index.js
DELETED
|
@@ -1,126 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const path = require("path")
|
|
3
|
-
const fs = require("fs")
|
|
4
|
-
const os = require('os')
|
|
5
|
-
const parse_env = require("parse-dotenv")
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
const DEFAULT_ENV_PATH = path.resolve(process.cwd(), ".env")
|
|
9
|
-
const SAMPLE_ENV_SUFFIX = ".sample"
|
|
10
|
-
const COMMENT_PREFIX = "__COMMENT_"
|
|
11
|
-
|
|
12
|
-
const env_to_string = (parsed) =>
|
|
13
|
-
Object.keys(parsed)
|
|
14
|
-
.map(key => `${key}=${parsed[key] || ""}`)
|
|
15
|
-
.join(os.EOL)
|
|
16
|
-
.replace(/(__\w+_\d+__=)/g, "")
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
const write_to_dest_env = (path, parsed_env) => {
|
|
20
|
-
try {
|
|
21
|
-
fs.writeFileSync(path, env_to_string(parsed_env))
|
|
22
|
-
} catch (e) {
|
|
23
|
-
throw new Error(`Sync failed. ${e.message}`)
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
const get_empty_obj_props = (obj) => {
|
|
29
|
-
|
|
30
|
-
const obj_copy = { ...obj }
|
|
31
|
-
|
|
32
|
-
Object.keys(obj_copy).forEach(key => {
|
|
33
|
-
if (obj_copy[key].includes("#")) {
|
|
34
|
-
if (obj_copy[key].match(/(".*"|'.*')/g)) {
|
|
35
|
-
const objArr = obj_copy[key].split(/(".*"|'.*')/)
|
|
36
|
-
obj_copy[key] = objArr.slice(-1)[0].trim()
|
|
37
|
-
} else {
|
|
38
|
-
const objArr = obj_copy[key].split("#")
|
|
39
|
-
obj_copy[key] = `#${objArr.slice(-1)[0]}`
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
return
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
if (!key.startsWith(COMMENT_PREFIX)) {
|
|
46
|
-
obj_copy[key] = ""
|
|
47
|
-
}
|
|
48
|
-
})
|
|
49
|
-
|
|
50
|
-
return obj_copy
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
const get_unique_vars_from_envs = (
|
|
54
|
-
env,
|
|
55
|
-
env_example,
|
|
56
|
-
config = {}
|
|
57
|
-
) => {
|
|
58
|
-
const unique_keys = new Set(Object.keys(env))
|
|
59
|
-
const unique_keys_list = Array.from(unique_keys)
|
|
60
|
-
|
|
61
|
-
const unique_from_source = unique_keys_list.map((key) => {
|
|
62
|
-
if (key.startsWith(COMMENT_PREFIX)) return { [key]: env[key] }
|
|
63
|
-
return { [key]: env_example[key] || "" }
|
|
64
|
-
})
|
|
65
|
-
|
|
66
|
-
const preseved_vars = Object.keys(env_example)
|
|
67
|
-
.map(key => ({ [key]: env_example[key] }))
|
|
68
|
-
// .filter(env => {
|
|
69
|
-
// console.log("ICICIC", env)
|
|
70
|
-
// return ignoreKeys.length && ignoreKeys.includes(Object.keys(env)[0])
|
|
71
|
-
// })
|
|
72
|
-
|
|
73
|
-
return [...unique_from_source, ...preseved_vars]
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
const sync_with_dest_env = (
|
|
78
|
-
env_path,
|
|
79
|
-
env_example_path
|
|
80
|
-
) => {
|
|
81
|
-
const config = { emptyLines: true, comments: true }
|
|
82
|
-
|
|
83
|
-
const source_env = get_empty_obj_props(
|
|
84
|
-
parse_env(env_path, config)
|
|
85
|
-
)
|
|
86
|
-
|
|
87
|
-
const target_env = parse_env(env_example_path)
|
|
88
|
-
|
|
89
|
-
const unique_vars = get_unique_vars_from_envs(source_env, target_env, config)
|
|
90
|
-
const env_copy = {}
|
|
91
|
-
|
|
92
|
-
unique_vars.forEach(env => {
|
|
93
|
-
const [key] = Object.keys(env)
|
|
94
|
-
env_copy[key] = env[key]
|
|
95
|
-
})
|
|
96
|
-
|
|
97
|
-
write_to_dest_env(env_example_path, env_copy)
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
const sync_dotenv = (
|
|
102
|
-
env_path,
|
|
103
|
-
_dest_env,
|
|
104
|
-
) => {
|
|
105
|
-
if (env_path && !fs.existsSync(env_path)) {
|
|
106
|
-
throw new Error(`env_path: ${env_path} doesn't exist`)
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
const source_path = env_path || DEFAULT_ENV_PATH
|
|
110
|
-
|
|
111
|
-
if (!fs.existsSync(source_path)) {
|
|
112
|
-
throw new Error(`${env_path || ".env"} file not found`)
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
const dest_env_path = _dest_env || `${source_path}${SAMPLE_ENV_SUFFIX}`
|
|
116
|
-
|
|
117
|
-
// creates the sample env file if it doesn't exist
|
|
118
|
-
if (!fs.existsSync(dest_env_path)) {
|
|
119
|
-
fs.writeFileSync(dest_env_path, "")
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
sync_with_dest_env(source_path, dest_env_path)
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
module.exports = sync_dotenv
|
package/src/update/index.js
DELETED
|
@@ -1,87 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const fs = require("fs")
|
|
3
|
-
const path = require("path")
|
|
4
|
-
const util = require("util")
|
|
5
|
-
const {execSync} = require("child_process")
|
|
6
|
-
|
|
7
|
-
const exec = util.promisify(require("child_process").exec)
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
const get_package_files = () => {
|
|
11
|
-
const working_dir = process.cwd()
|
|
12
|
-
const out = execSync(`find . -name "package.json" -not -path ".*/node_modules/*"`)
|
|
13
|
-
// TODO: sort files
|
|
14
|
-
const find_output = out.toString().trim().split("\n")
|
|
15
|
-
|
|
16
|
-
const files = find_output.map((f) => path.join(working_dir, f))
|
|
17
|
-
|
|
18
|
-
return files
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
const get_outdated = async({dir, all}) => {
|
|
23
|
-
|
|
24
|
-
const output = execSync("npm outdated --json || true", {cwd: dir})
|
|
25
|
-
const outdated = JSON.parse(output.toString())
|
|
26
|
-
|
|
27
|
-
const target_deps = {}
|
|
28
|
-
|
|
29
|
-
Object.keys(outdated)
|
|
30
|
-
// get our rpcbase packages only
|
|
31
|
-
.filter((name) => all ? true : name.startsWith("@rpcbase"))
|
|
32
|
-
// skip if package is linked but already at latest version
|
|
33
|
-
.filter((name) => {
|
|
34
|
-
const version_data = outdated[name]
|
|
35
|
-
if (version_data.wanted === version_data.latest &&
|
|
36
|
-
version_data.current === "linked") {
|
|
37
|
-
return false
|
|
38
|
-
}
|
|
39
|
-
return true
|
|
40
|
-
})
|
|
41
|
-
.forEach((key) => {
|
|
42
|
-
target_deps[key] = outdated[key].latest
|
|
43
|
-
})
|
|
44
|
-
|
|
45
|
-
return target_deps
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
const update = async(args) => {
|
|
50
|
-
const working_dir = process.cwd()
|
|
51
|
-
|
|
52
|
-
const package_files = get_package_files()
|
|
53
|
-
|
|
54
|
-
for (let i = 0; i < package_files.length; i++) {
|
|
55
|
-
const package_file = package_files[i]
|
|
56
|
-
const dir = path.dirname(package_file)
|
|
57
|
-
|
|
58
|
-
console.log("updating", path.relative(working_dir, dir) || ".")
|
|
59
|
-
|
|
60
|
-
const outdated = await get_outdated({dir, all: args.all})
|
|
61
|
-
|
|
62
|
-
const package_json = JSON.parse(fs.readFileSync(package_file, "utf8"))
|
|
63
|
-
|
|
64
|
-
let has_updates = false
|
|
65
|
-
|
|
66
|
-
Object.keys(outdated).forEach((k) => {
|
|
67
|
-
if (package_json.dependencies[k]) {
|
|
68
|
-
package_json.dependencies[k] = outdated[k]
|
|
69
|
-
has_updates = true
|
|
70
|
-
} else if (package_json.devDependencies[k]) {
|
|
71
|
-
package_json.devDependencies[k] = outdated[k]
|
|
72
|
-
has_updates = true
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
if (has_updates) {
|
|
76
|
-
console.log("updated", k, "to", outdated[k])
|
|
77
|
-
}
|
|
78
|
-
})
|
|
79
|
-
|
|
80
|
-
if (has_updates) {
|
|
81
|
-
fs.writeFileSync(package_file, `${JSON.stringify(package_json, null, 2)}\n`)
|
|
82
|
-
execSync("yarn", {cwd: dir, stdio: "inherit"})
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
module.exports = update
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const _flatten = require("lodash/flatten")
|
|
3
|
-
const colors = require("picocolors")
|
|
4
|
-
const Promise = require("bluebird")
|
|
5
|
-
|
|
6
|
-
const get_root_dir = require("../helpers/get_root_dir")
|
|
7
|
-
|
|
8
|
-
const verify_env = require("./verify_env")
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
const verify_project = async(run_configs) => {
|
|
12
|
-
let result_errors = []
|
|
13
|
-
|
|
14
|
-
console.log("tmp skipping verify env")
|
|
15
|
-
return result_errors
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
const project_root_dir = get_root_dir(run_configs)
|
|
19
|
-
|
|
20
|
-
// get env file from server and validate required services and ports
|
|
21
|
-
const env_errors = await verify_env(project_root_dir)
|
|
22
|
-
if (env_errors.length > 0) {
|
|
23
|
-
let error_str = `${colors.red("error:")} ${colors.bold(".env")} misconfigurations\n`
|
|
24
|
-
env_errors.forEach((err) => {
|
|
25
|
-
error_str += ` ${err}\n`
|
|
26
|
-
})
|
|
27
|
-
result_errors.push(error_str)
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
return result_errors
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
module.exports = verify_project
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
3
|
-
"$id": "https://schemas.rpcbase.com/schemas/thresholds.schema.json",
|
|
4
|
-
"title": "thresholds",
|
|
5
|
-
"type": "object",
|
|
6
|
-
"properties": {
|
|
7
|
-
"statements": {
|
|
8
|
-
"type": "number",
|
|
9
|
-
"minimum": 0,
|
|
10
|
-
"maximum": 100,
|
|
11
|
-
},
|
|
12
|
-
"branches": {
|
|
13
|
-
"type": "number",
|
|
14
|
-
"minimum": 0,
|
|
15
|
-
"maximum": 100,
|
|
16
|
-
},
|
|
17
|
-
"functions": {
|
|
18
|
-
"type": "number",
|
|
19
|
-
"minimum": 0,
|
|
20
|
-
"maximum": 100,
|
|
21
|
-
},
|
|
22
|
-
"lines": {
|
|
23
|
-
"type": "number",
|
|
24
|
-
"minimum": 0,
|
|
25
|
-
"maximum": 100,
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
}
|
|
@@ -1,88 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const fs = require("fs")
|
|
3
|
-
const path = require("path")
|
|
4
|
-
|
|
5
|
-
const colors = require("picocolors")
|
|
6
|
-
const dotenv = require("dotenv")
|
|
7
|
-
const validator = require("validator")
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
const validate_port = (val) => {
|
|
11
|
-
if (!validator.isPort(val)) {
|
|
12
|
-
return `expected ${colors.bold(val)} to be a valid port number`
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
const SERVICES = [
|
|
17
|
-
{
|
|
18
|
-
name: "client",
|
|
19
|
-
required_env: {
|
|
20
|
-
"CLIENT_PORT": validate_port
|
|
21
|
-
},
|
|
22
|
-
},
|
|
23
|
-
{
|
|
24
|
-
name: "server",
|
|
25
|
-
required_env: {
|
|
26
|
-
"SERVER_PORT": validate_port
|
|
27
|
-
},
|
|
28
|
-
},
|
|
29
|
-
{
|
|
30
|
-
name: "database",
|
|
31
|
-
required_env: {
|
|
32
|
-
"DATABASE_PORT": validate_port,
|
|
33
|
-
"RB_APP_NAME": (val) => {
|
|
34
|
-
if (!/^[$A-Z_][0-9A-Z_$-]*$/i.test(val)) {
|
|
35
|
-
return `expected ${colors.bold(`'${val}'`)} to be a valid app name`
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
},
|
|
39
|
-
},
|
|
40
|
-
// {
|
|
41
|
-
// name: "worker-queue",
|
|
42
|
-
// required_env: {
|
|
43
|
-
// "WORKER_QUEUE_PORT": validate_port
|
|
44
|
-
// },
|
|
45
|
-
// },
|
|
46
|
-
// {
|
|
47
|
-
// name: "session-store",
|
|
48
|
-
// required_env: {
|
|
49
|
-
// "SESSION_STORE_PORT": validate_port
|
|
50
|
-
// },
|
|
51
|
-
// },
|
|
52
|
-
]
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
// TODO: service definitions should be validated in docker compose as well
|
|
56
|
-
const verify_env = async(root_dir) => {
|
|
57
|
-
const env_path = path.join(root_dir, "./server/server/.env")
|
|
58
|
-
const env_buf = fs.readFileSync(env_path)
|
|
59
|
-
const parsed_env = dotenv.parse(env_buf)
|
|
60
|
-
|
|
61
|
-
const result_errors = []
|
|
62
|
-
|
|
63
|
-
SERVICES.forEach((service) => {
|
|
64
|
-
Object.keys(service.required_env).forEach((k) => {
|
|
65
|
-
const val_fn = service.required_env[k]
|
|
66
|
-
const env_val = parsed_env[k]
|
|
67
|
-
|
|
68
|
-
if (!env_val) {
|
|
69
|
-
result_errors.push(`${colors.yellow(k)} is missing`)
|
|
70
|
-
return
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
const err = val_fn(env_val)
|
|
74
|
-
if (err) {
|
|
75
|
-
result_errors.push(`${colors.yellow(k)} ${err}`)
|
|
76
|
-
}
|
|
77
|
-
})
|
|
78
|
-
})
|
|
79
|
-
|
|
80
|
-
if (parsed_env.DATABASE_NAME) {
|
|
81
|
-
throw new Error("DATABASE_NAME must not be defined in env, it has been replaced by RB_APP_NAME")
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
return result_errors
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
module.exports = verify_env
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const {Octokit} = require("octokit")
|
|
3
|
-
const colors = require("picocolors")
|
|
4
|
-
|
|
5
|
-
const get_gh_token = require("./get_gh_token")
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
const get_gh_client = () => {
|
|
9
|
-
const gh_token = get_gh_token()
|
|
10
|
-
|
|
11
|
-
if (!gh_token) return
|
|
12
|
-
|
|
13
|
-
const octokit = new Octokit({
|
|
14
|
-
auth: gh_token
|
|
15
|
-
})
|
|
16
|
-
|
|
17
|
-
if (!octokit) {
|
|
18
|
-
console.log(colors.bold(colors.yellow("rb increment-pkg:")), "unable to get a github client, do you have a token in .npmrc or .github_token ?")
|
|
19
|
-
process.exit(0)
|
|
20
|
-
return
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
return octokit
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
module.exports = get_gh_client
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const fs = require("fs")
|
|
3
|
-
const path = require("path")
|
|
4
|
-
|
|
5
|
-
const get_gh_token = () => {
|
|
6
|
-
let token
|
|
7
|
-
|
|
8
|
-
const npmrc_path = path.join(process.cwd(), "./.npmrc")
|
|
9
|
-
const gh_token_path = path.join(process.cwd(), "./.gh_token")
|
|
10
|
-
|
|
11
|
-
if (fs.existsSync(npmrc_path)) {
|
|
12
|
-
const content = fs.readFileSync(npmrc_path, "utf8")
|
|
13
|
-
const match = content.match(/^\/\/npm\.pkg\.github\.com\/:_authToken=(\S+)$/m)
|
|
14
|
-
|
|
15
|
-
if (match) {
|
|
16
|
-
token = match[1]
|
|
17
|
-
}
|
|
18
|
-
} else if (fs.existsSync(gh_token_path)) {
|
|
19
|
-
try {
|
|
20
|
-
token = JSON.parse(fs.readFileSync(gh_token_path))
|
|
21
|
-
} catch (err) {
|
|
22
|
-
console.log("unable to parse token from .gh_token")
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
return token
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
module.exports = get_gh_token
|
|
@@ -1,137 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const fs = require("fs")
|
|
3
|
-
const path = require("path")
|
|
4
|
-
const {execSync} = require("child_process")
|
|
5
|
-
|
|
6
|
-
const Promise = require("bluebird")
|
|
7
|
-
const semver = require("semver")
|
|
8
|
-
|
|
9
|
-
const sanitize_str = require("../helpers/sanitize_str")
|
|
10
|
-
|
|
11
|
-
const get_gh_client = require("./get_gh_client")
|
|
12
|
-
|
|
13
|
-
const octokit = get_gh_client()
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
const resolve_repo = () => {
|
|
17
|
-
const origin_url = execSync("git remote get-url origin").toString().trim()
|
|
18
|
-
const owner = path.dirname(origin_url).split(":").pop()
|
|
19
|
-
const repo = path.basename(origin_url).replace(/\.git$/, "")
|
|
20
|
-
|
|
21
|
-
const branch_name = execSync("git branch --show-current").toString().trim()
|
|
22
|
-
|
|
23
|
-
return {repo, owner, branch_name}
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
const get_latest_branch_version = async({owner, repo, remote_path, branch_name}) => {
|
|
28
|
-
let res
|
|
29
|
-
try {
|
|
30
|
-
res = await octokit.request("GET /repos/{owner}/{repo}/contents/{path}", {
|
|
31
|
-
owner,
|
|
32
|
-
repo,
|
|
33
|
-
path: remote_path,
|
|
34
|
-
ref: branch_name,
|
|
35
|
-
})
|
|
36
|
-
} catch (err) {
|
|
37
|
-
if (err.response.status === 404) {
|
|
38
|
-
return
|
|
39
|
-
} else {
|
|
40
|
-
console.log(err)
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
const file_contents = Buffer.from(res.data.content, "base64").toString()
|
|
45
|
-
const pack_obj = JSON.parse(file_contents)
|
|
46
|
-
const branch_version = pack_obj.version
|
|
47
|
-
|
|
48
|
-
return branch_version
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
// lint-staged script to increment package version when any package in the pkg/ folder is modified
|
|
53
|
-
const increment_pkg = async(args) => {
|
|
54
|
-
const {files} = args
|
|
55
|
-
|
|
56
|
-
const touched_packages = {}
|
|
57
|
-
|
|
58
|
-
const packages_dir = path.join(process.cwd(), "./pkg")
|
|
59
|
-
|
|
60
|
-
files.forEach((f) => {
|
|
61
|
-
const rel_path = path.relative(packages_dir, f)
|
|
62
|
-
// get package dirname
|
|
63
|
-
const [pack_dir] = path.dirname(rel_path).split("/")
|
|
64
|
-
// check if it is actually a package
|
|
65
|
-
const pack_json_path = path.join(packages_dir, `./${pack_dir}/package.json`)
|
|
66
|
-
if (fs.existsSync(pack_json_path)) {
|
|
67
|
-
touched_packages[pack_dir] = true
|
|
68
|
-
}
|
|
69
|
-
})
|
|
70
|
-
|
|
71
|
-
const {repo, owner, branch_name} = resolve_repo()
|
|
72
|
-
|
|
73
|
-
const process_package = async(pack_dir) => {
|
|
74
|
-
const local_pack_path = path.join(packages_dir, `./${pack_dir}/package.json`)
|
|
75
|
-
const local_pack = JSON.parse(fs.readFileSync(local_pack_path))
|
|
76
|
-
|
|
77
|
-
const local_version = local_pack.version
|
|
78
|
-
if (!local_version) {
|
|
79
|
-
// skipping because package has no local version
|
|
80
|
-
return
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
if (!semver.valid(local_version)) {
|
|
84
|
-
console.log("error:", "local version not valid, got:", JSON.stringify(local_version), "skipping")
|
|
85
|
-
return
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
// get remote + next version
|
|
89
|
-
const remote_path = `pkg/${pack_dir}/package.json`
|
|
90
|
-
|
|
91
|
-
const master_version = await get_latest_branch_version({
|
|
92
|
-
owner,
|
|
93
|
-
repo,
|
|
94
|
-
remote_path,
|
|
95
|
-
branch_name: "master"
|
|
96
|
-
})
|
|
97
|
-
|
|
98
|
-
if (!master_version || !semver.valid(master_version)) {
|
|
99
|
-
console.log("error:", "remote master version not valid, got:", JSON.stringify(master_version), "skipping")
|
|
100
|
-
return
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
const bumped_version = semver.inc(master_version, "minor")
|
|
104
|
-
|
|
105
|
-
if (semver.lt(local_version, bumped_version)) {
|
|
106
|
-
local_pack.version = bumped_version,
|
|
107
|
-
fs.writeFileSync(local_pack_path, JSON.stringify(local_pack, null, 2) + "\n")
|
|
108
|
-
|
|
109
|
-
console.log("bumped", local_pack.name, "to", bumped_version)
|
|
110
|
-
|
|
111
|
-
execSync(`git add ${local_pack_path}`)
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
// TODO: this will run on the PR workflow but not before commit here, as we only want a minor bump when incrementing from master, the release channels are handled in CI
|
|
115
|
-
// const {remote_version, next_version} = await get_remote_next_version({owner, repo, branch_name, remote_path, local_version})
|
|
116
|
-
//
|
|
117
|
-
// if (!semver.valid(remote_version)) {
|
|
118
|
-
// console.log("warning:", "live version not valid, got:", JSON.stringify(remote_version), "skipping")
|
|
119
|
-
// return
|
|
120
|
-
// }
|
|
121
|
-
//
|
|
122
|
-
// // if (next_version && semver.neq(remote_version, next_version)) {
|
|
123
|
-
// if (next_version && semver.neq(local_version, next_version)) {
|
|
124
|
-
//
|
|
125
|
-
//
|
|
126
|
-
// local_pack.version = next_version
|
|
127
|
-
//
|
|
128
|
-
// fs.writeFileSync(local_pack_path, JSON.stringify(local_pack, null, 2) + "\n")
|
|
129
|
-
// // add to git index, in case change was not triggered by package.json
|
|
130
|
-
// execSync(`git add ${local_pack_path}`)
|
|
131
|
-
// }
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
await Promise.map(Object.keys(touched_packages), process_package)
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
module.exports = increment_pkg
|
package/stylelint/index.js
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
module.exports = {
|
|
2
|
-
"extends": [
|
|
3
|
-
"stylelint-config-standard-scss"
|
|
4
|
-
],
|
|
5
|
-
"rules": {
|
|
6
|
-
"color-named": ["never", {severity: "warning"}],
|
|
7
|
-
"color-no-hex": [true, {severity: "warning"}],
|
|
8
|
-
"declaration-block-no-redundant-longhand-properties": null,
|
|
9
|
-
"declaration-empty-line-before": null,
|
|
10
|
-
"no-descending-specificity": [true, {severity: "warning"}],
|
|
11
|
-
"scss/comment-no-empty": null,
|
|
12
|
-
"scss/no-global-function-names": [true, {severity: "warning"}],
|
|
13
|
-
"selector-class-pattern": null,
|
|
14
|
-
}
|
|
15
|
-
}
|