server-control-s3 0.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -0
- package/package.json +35 -0
- package/scripts/instance_update.sh +13 -0
- package/scripts/remove_old_target.sh +45 -0
- package/scripts/update_to_hash.sh +85 -0
- package/src/index.js +617 -0
- package/src/request.js +40 -0
package/README.md
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
# server-control-s3
|
|
2
|
+
|
|
3
|
+
* create AMI with supervisor, git and node
|
|
4
|
+
* create user node with home in /var/node
|
|
5
|
+
* clone your project
|
|
6
|
+
|
|
7
|
+
sample config to add to your app:
|
|
8
|
+
|
|
9
|
+
```javascript
|
|
10
|
+
|
|
11
|
+
sc.init(app, {
|
|
12
|
+
prefix: '/',
|
|
13
|
+
repo_dir: '/var/node/project',
|
|
14
|
+
secret: "update-secret"
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
* assign an IAM role with the following configuration:
|
|
20
|
+
```json
|
|
21
|
+
{
|
|
22
|
+
"Version": "2012-10-17",
|
|
23
|
+
"Statement": [
|
|
24
|
+
{
|
|
25
|
+
"Sid": "Stmt1412101976000",
|
|
26
|
+
"Effect": "Allow",
|
|
27
|
+
"Action": [
|
|
28
|
+
"ec2:DescribeInstances"
|
|
29
|
+
],
|
|
30
|
+
"Resource": [
|
|
31
|
+
"*"
|
|
32
|
+
]
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
"Sid": "Stmt1412101976001",
|
|
36
|
+
"Effect": "Allow",
|
|
37
|
+
"Action": [
|
|
38
|
+
"ec2:DescribeLaunchTemplateVersions",
|
|
39
|
+
"ec2:ModifyLaunchTemplate",
|
|
40
|
+
"ec2:CreateLaunchTemplateVersion"
|
|
41
|
+
],
|
|
42
|
+
"Resource": [
|
|
43
|
+
"*"
|
|
44
|
+
]
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
"Sid": "Stmt1412102095000",
|
|
48
|
+
"Effect": "Allow",
|
|
49
|
+
"Action": [
|
|
50
|
+
"autoscaling:DescribeAutoScalingGroups"
|
|
51
|
+
],
|
|
52
|
+
"Resource": [
|
|
53
|
+
"*"
|
|
54
|
+
]
|
|
55
|
+
}
|
|
56
|
+
]
|
|
57
|
+
}
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
* also make sure you role can passrole itself
|
|
61
|
+
```json
|
|
62
|
+
{
|
|
63
|
+
"Version": "2012-10-17",
|
|
64
|
+
"Statement": [
|
|
65
|
+
{
|
|
66
|
+
"Effect": "Allow",
|
|
67
|
+
"Action": "iam:PassRole",
|
|
68
|
+
"Resource": "arn:aws:iam::833562685972:role/<rolename>"
|
|
69
|
+
}
|
|
70
|
+
]
|
|
71
|
+
}
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
* launch!
|
package/package.json
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "server-control-s3",
|
|
3
|
+
"version": "0.0.11",
|
|
4
|
+
"author": {
|
|
5
|
+
"name": "Jim Lake"
|
|
6
|
+
},
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"description": "Easy updating of Amazon AWS instances from s3 packages",
|
|
9
|
+
"keywords": [
|
|
10
|
+
"aws",
|
|
11
|
+
"update",
|
|
12
|
+
"asg",
|
|
13
|
+
"ec2"
|
|
14
|
+
],
|
|
15
|
+
"repository": {
|
|
16
|
+
"type": "git",
|
|
17
|
+
"url": "git@github.com:jim-lake/server-control-s3.git"
|
|
18
|
+
},
|
|
19
|
+
"files": [
|
|
20
|
+
"src/*",
|
|
21
|
+
"scripts/*"
|
|
22
|
+
],
|
|
23
|
+
"main": "src/index.js",
|
|
24
|
+
"scripts": {
|
|
25
|
+
"lint": "eslint src --ext js",
|
|
26
|
+
"pretty": "prettier --write src/*.js"
|
|
27
|
+
},
|
|
28
|
+
"dependencies": {
|
|
29
|
+
"async": "3.2.4",
|
|
30
|
+
"aws-sdk": "2.1366.0",
|
|
31
|
+
"body-parser": "1.20.2",
|
|
32
|
+
"cookie-parser": "1.4.6",
|
|
33
|
+
"request": "2.88.2"
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
source <( wget "http://169.254.169.254/latest/user-data" -O - 2>/dev/null )
|
|
3
|
+
|
|
4
|
+
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
|
5
|
+
|
|
6
|
+
if [ "${SC_UPDATE_URL}" != "" ]; then
|
|
7
|
+
echo `date` ": Updating using URL: $SC_UPDATE_URL" >>/tmp/instance-update.log
|
|
8
|
+
pushd $SC_REPO_DIR >/dev/null 2>/dev/null
|
|
9
|
+
su node -c "${SCRIPT_DIR}/update_to_hash.sh ${SC_UPDATE_URL}" >>/tmp/instance-update.log 2>&1
|
|
10
|
+
popd >/dev/null 2>/dev/null
|
|
11
|
+
else
|
|
12
|
+
echo `date` ": No SC_UPDATE_URL so not doing anything" >>/tmp/instance-update.log
|
|
13
|
+
fi
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
if [ "$#" -lt 1 ]; then
|
|
4
|
+
echo "Usage: $(basename $0) <repo_dir>"
|
|
5
|
+
exit 1
|
|
6
|
+
fi
|
|
7
|
+
|
|
8
|
+
REPO_DIR=$1
|
|
9
|
+
REAL_REPO_DIR=$(readlink -fn "$REPO_DIR")
|
|
10
|
+
|
|
11
|
+
OLD_TARGET_FILE="$REPO_DIR/.sc_old_target"
|
|
12
|
+
if ! [[ -f "$OLD_TARGET_FILE" ]] ; then
|
|
13
|
+
echo "Old target file doesnt exist, quitting"
|
|
14
|
+
exit 0
|
|
15
|
+
fi
|
|
16
|
+
|
|
17
|
+
OLD_TARGET=$(cat "$OLD_TARGET_FILE")
|
|
18
|
+
if ! [[ -d "$OLD_TARGET" ]] ; then
|
|
19
|
+
echo "Old target: $OLD_TARGET is not a dir, ignoring"
|
|
20
|
+
rm -f "$OLD_TARGET_FILE"
|
|
21
|
+
exit 0
|
|
22
|
+
fi
|
|
23
|
+
|
|
24
|
+
if [[ -L "$OLD_TARGET" ]]; then
|
|
25
|
+
echo " - WARNING!!! Old Target: $OLD_TARGET is a link?!"
|
|
26
|
+
rm -f "$OLD_TARGET_FILE"
|
|
27
|
+
exit 0
|
|
28
|
+
fi
|
|
29
|
+
|
|
30
|
+
REAL_OLD_TARGET=$(readlink -fn "$OLD_TARGET")
|
|
31
|
+
if [[ "$REAL_REPO_DIR" = "$REAL_OLD_TARGET" ]] ; then
|
|
32
|
+
echo "Old and new are the same, not removing"
|
|
33
|
+
rm -f "$OLD_TARGET_FILE"
|
|
34
|
+
exit 0
|
|
35
|
+
fi
|
|
36
|
+
|
|
37
|
+
echo "removing old target: $OLD_TARGET"
|
|
38
|
+
rm -rf "$OLD_TARGET"
|
|
39
|
+
if [ "$?" -ne 0 ]; then
|
|
40
|
+
echo "Remove old target failed"
|
|
41
|
+
exit 2
|
|
42
|
+
fi
|
|
43
|
+
|
|
44
|
+
rm -f "$OLD_TARGET_FILE"
|
|
45
|
+
exit 0
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
if [ "$#" -lt 1 ]; then
|
|
4
|
+
echo "Usage: $(basename $0) <url>"
|
|
5
|
+
echo " - script should be run from the repo dir"
|
|
6
|
+
exit 1
|
|
7
|
+
fi
|
|
8
|
+
|
|
9
|
+
URL=$1
|
|
10
|
+
REPO_DIR=`pwd`
|
|
11
|
+
OLD_TARGET=$(readlink -fn "$REPO_DIR")
|
|
12
|
+
TARGET_NAME=$(basename "$REPO_DIR")
|
|
13
|
+
PARENT_DIR=$(dirname "$REPO_DIR")
|
|
14
|
+
ARCHIVE_NAME=${URL##*/}
|
|
15
|
+
ARCHIVE_BASE_NAME=${ARCHIVE_NAME%%.tar.gz}
|
|
16
|
+
DATE=$(date +%Y%m%d-%H%M%S)
|
|
17
|
+
TMP_DIR="$PARENT_DIR/$ARCHIVE_BASE_NAME-$DATE"
|
|
18
|
+
|
|
19
|
+
echo "updating repo with url: $URL"
|
|
20
|
+
echo " - whoami: $(whoami)"
|
|
21
|
+
echo " - HOME: $HOME"
|
|
22
|
+
echo " - REPO_DIR: $REPO_DIR"
|
|
23
|
+
echo " - OLD_TARGET: $OLD_TARGET"
|
|
24
|
+
|
|
25
|
+
if ! [[ -L "$REPO_DIR" ]]; then
|
|
26
|
+
echo " - WARNING!!! REPO_DIR is not a link, this script wont work"
|
|
27
|
+
fi
|
|
28
|
+
|
|
29
|
+
echo " - mkdir for tmp dir: $TMP_DIR"
|
|
30
|
+
mkdir "$TMP_DIR"
|
|
31
|
+
if [ "$?" -ne 0 ]; then
|
|
32
|
+
echo " - mkdir failed with parent $PARENT_DIR"
|
|
33
|
+
exit 2
|
|
34
|
+
fi
|
|
35
|
+
|
|
36
|
+
pushd "$TMP_DIR"
|
|
37
|
+
echo " - downloading file to temp dir: $TMP_DIR"
|
|
38
|
+
if [[ $URL == http* ]] ; then
|
|
39
|
+
wget "$URL" -O output.tar.gz
|
|
40
|
+
else
|
|
41
|
+
aws s3 cp "$URL" output.tar.gz
|
|
42
|
+
fi
|
|
43
|
+
if [ "$?" -ne 0 ]; then
|
|
44
|
+
echo " - pull $URL failed"
|
|
45
|
+
rm -rf "$TMP_DIR"
|
|
46
|
+
popd
|
|
47
|
+
exit 3
|
|
48
|
+
fi
|
|
49
|
+
|
|
50
|
+
echo " - untar output.tar.gz"
|
|
51
|
+
tar xzf output.tar.gz
|
|
52
|
+
if [ "$?" -ne 0 ]; then
|
|
53
|
+
echo " - untar failed"
|
|
54
|
+
rm -rf "$TMP_DIR"
|
|
55
|
+
popd
|
|
56
|
+
exit 4
|
|
57
|
+
fi
|
|
58
|
+
rm output.tar.gz
|
|
59
|
+
|
|
60
|
+
echo " - npm ci"
|
|
61
|
+
npm ci
|
|
62
|
+
if [ "$?" -ne 0 ]; then
|
|
63
|
+
echo " - npm ci failed"
|
|
64
|
+
rm -rf "$TMP_DIR"
|
|
65
|
+
popd
|
|
66
|
+
exit 5
|
|
67
|
+
fi
|
|
68
|
+
|
|
69
|
+
echo " - link $TMP_DIR over $REPO_DIR"
|
|
70
|
+
if [[ $OSTYPE == darwin* ]]; then
|
|
71
|
+
ln -shf "$TMP_DIR" "$REPO_DIR"
|
|
72
|
+
else
|
|
73
|
+
ln -sTf "$TMP_DIR" "$REPO_DIR"
|
|
74
|
+
fi
|
|
75
|
+
if [ "$?" -ne 0 ]; then
|
|
76
|
+
echo " - link failed"
|
|
77
|
+
rm -rf "$TMP_DIR"
|
|
78
|
+
popd
|
|
79
|
+
exit 6
|
|
80
|
+
fi
|
|
81
|
+
|
|
82
|
+
echo "$OLD_TARGET" > "$TMP_DIR/.sc_old_target"
|
|
83
|
+
|
|
84
|
+
popd
|
|
85
|
+
exit 0
|
package/src/index.js
ADDED
|
@@ -0,0 +1,617 @@
|
|
|
1
|
+
const async = require('async');
|
|
2
|
+
require('aws-sdk/lib/maintenance_mode_message').suppress = true;
|
|
3
|
+
const AWS = require('aws-sdk');
|
|
4
|
+
const body_parser = require('body-parser');
|
|
5
|
+
const child_process = require('child_process');
|
|
6
|
+
const cookie_parser = require('cookie-parser');
|
|
7
|
+
const fs = require('fs');
|
|
8
|
+
const { join: pathJoin } = require('path');
|
|
9
|
+
const { webRequest, headUrl, fetchFileContents } = require('./request');
|
|
10
|
+
|
|
11
|
+
exports.init = init;
|
|
12
|
+
exports.getGitCommitHash = getGitCommitHash;
|
|
13
|
+
|
|
14
|
+
const MAX_WAIT_COUNT = 12;
|
|
15
|
+
const SERVER_WAIT_MS = 10 * 1000;
|
|
16
|
+
const DEFAULT_CONFIG = {
|
|
17
|
+
route_prefix: '',
|
|
18
|
+
secret: 'secret',
|
|
19
|
+
sc_update_url_key_name: 'SC_UPDATE_URL',
|
|
20
|
+
restart_function: _defaultRestartFunction,
|
|
21
|
+
service_port: 80,
|
|
22
|
+
http_proto: 'http',
|
|
23
|
+
auth_middleware: false,
|
|
24
|
+
repo_dir: process.env.PWD,
|
|
25
|
+
console_log: console.log,
|
|
26
|
+
error_log: console.error,
|
|
27
|
+
update_launch_default: true,
|
|
28
|
+
remove_old_target: true,
|
|
29
|
+
};
|
|
30
|
+
const g_config = {};
|
|
31
|
+
let g_gitCommitHash = false;
|
|
32
|
+
let g_updateHash = '';
|
|
33
|
+
|
|
34
|
+
function init(app, config) {
|
|
35
|
+
Object.assign(g_config, DEFAULT_CONFIG, config);
|
|
36
|
+
if (typeof g_config.route_prefix !== 'string') {
|
|
37
|
+
throw 'server-control route_prefix required';
|
|
38
|
+
}
|
|
39
|
+
if (!g_config.remote_repo_prefix) {
|
|
40
|
+
throw 'server-control remote_repo_prefix required';
|
|
41
|
+
}
|
|
42
|
+
g_config.route_prefix.replace(/\/$/, '');
|
|
43
|
+
g_config.remote_repo_prefix.replace(/\/$/, '');
|
|
44
|
+
|
|
45
|
+
_getAwsRegion();
|
|
46
|
+
getGitCommitHash();
|
|
47
|
+
const { route_prefix } = g_config;
|
|
48
|
+
if (g_config.remove_old_target) {
|
|
49
|
+
_removeOldTarget();
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
app.get(
|
|
53
|
+
route_prefix + '/server_data',
|
|
54
|
+
_parseQuery,
|
|
55
|
+
body_parser.json(),
|
|
56
|
+
body_parser.urlencoded({ extended: false }),
|
|
57
|
+
cookie_parser(),
|
|
58
|
+
_secretOrAuth,
|
|
59
|
+
_serverData
|
|
60
|
+
);
|
|
61
|
+
app.get(
|
|
62
|
+
route_prefix + '/group_data',
|
|
63
|
+
_parseQuery,
|
|
64
|
+
body_parser.json(),
|
|
65
|
+
body_parser.urlencoded({ extended: false }),
|
|
66
|
+
cookie_parser(),
|
|
67
|
+
_secretOrAuth,
|
|
68
|
+
_groupData
|
|
69
|
+
);
|
|
70
|
+
app.get(
|
|
71
|
+
route_prefix + '/update_group',
|
|
72
|
+
_parseQuery,
|
|
73
|
+
body_parser.json(),
|
|
74
|
+
body_parser.urlencoded({ extended: false }),
|
|
75
|
+
cookie_parser(),
|
|
76
|
+
_secretOrAuth,
|
|
77
|
+
_updateGroup
|
|
78
|
+
);
|
|
79
|
+
app.get(
|
|
80
|
+
route_prefix + '/update_server',
|
|
81
|
+
_parseQuery,
|
|
82
|
+
body_parser.json(),
|
|
83
|
+
body_parser.urlencoded({ extended: false }),
|
|
84
|
+
cookie_parser(),
|
|
85
|
+
_secretOrAuth,
|
|
86
|
+
_updateServer
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
function _parseQuery(req, res, next) {
|
|
90
|
+
if (typeof req.query === 'string') {
|
|
91
|
+
const query = {};
|
|
92
|
+
req.query.split('&').forEach((key_val) => {
|
|
93
|
+
const split = key_val.split('=');
|
|
94
|
+
query[split[0]] = split[1] || '';
|
|
95
|
+
});
|
|
96
|
+
req.query = query;
|
|
97
|
+
}
|
|
98
|
+
next();
|
|
99
|
+
}
|
|
100
|
+
function _secretOrAuth(req, res, next) {
|
|
101
|
+
if (req.headers && req.headers['x-sc-secret'] === g_config.secret) {
|
|
102
|
+
next();
|
|
103
|
+
} else if (
|
|
104
|
+
req.body &&
|
|
105
|
+
req.body.secret &&
|
|
106
|
+
req.body.secret === g_config.secret
|
|
107
|
+
) {
|
|
108
|
+
next();
|
|
109
|
+
} else if (
|
|
110
|
+
req.cookies &&
|
|
111
|
+
req.cookies.secret &&
|
|
112
|
+
req.cookies.secret === g_config.secret
|
|
113
|
+
) {
|
|
114
|
+
next();
|
|
115
|
+
} else if (g_config.auth_middleware) {
|
|
116
|
+
g_config.auth_middleware(req, res, next);
|
|
117
|
+
} else {
|
|
118
|
+
res.sendStatus(403);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
function _serverData(req, res) {
|
|
122
|
+
res.header('Cache-Control', 'no-cache, no-store, must-revalidate');
|
|
123
|
+
|
|
124
|
+
getGitCommitHash((err, git_commit_hash) => {
|
|
125
|
+
const body = {
|
|
126
|
+
git_commit_hash,
|
|
127
|
+
uptime: process.uptime(),
|
|
128
|
+
};
|
|
129
|
+
if (err) {
|
|
130
|
+
res.status(500);
|
|
131
|
+
body.err = err;
|
|
132
|
+
}
|
|
133
|
+
res.send(body);
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
function _groupData(req, res) {
|
|
138
|
+
res.header('Cache-Control', 'no-cache, no-store, must-revalidate');
|
|
139
|
+
|
|
140
|
+
_getGroupData((err, result) => {
|
|
141
|
+
const body = {
|
|
142
|
+
LATEST: result.latest || 'unknown',
|
|
143
|
+
InstanceId: result.InstanceId || 'unknown',
|
|
144
|
+
instance_list: result.instance_list,
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
if (result.auto_scale_group) {
|
|
148
|
+
body.auto_scale_group = {
|
|
149
|
+
AutoScalingGroupName: result.auto_scale_group.AutoScalingGroupName,
|
|
150
|
+
LaunchTemplate: result.auto_scale_group.LaunchTemplate,
|
|
151
|
+
};
|
|
152
|
+
if (result.launch_template) {
|
|
153
|
+
body.launch_template = result.launch_template;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (err) {
|
|
158
|
+
res.status(500).send({ err, body });
|
|
159
|
+
} else {
|
|
160
|
+
res.send(body);
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
function _getGroupData(done) {
|
|
166
|
+
const autoscaling = _getAutoscaling();
|
|
167
|
+
const ec2 = _getEC2();
|
|
168
|
+
let latest = false;
|
|
169
|
+
let InstanceId = false;
|
|
170
|
+
let asg = false;
|
|
171
|
+
let instance_list = false;
|
|
172
|
+
let launch_template = false;
|
|
173
|
+
|
|
174
|
+
async.series(
|
|
175
|
+
[
|
|
176
|
+
(done) => {
|
|
177
|
+
_getLatest((err, result) => {
|
|
178
|
+
if (err) {
|
|
179
|
+
_errorLog('_getGroupData: latest err:', err);
|
|
180
|
+
}
|
|
181
|
+
latest = result;
|
|
182
|
+
done();
|
|
183
|
+
});
|
|
184
|
+
},
|
|
185
|
+
(done) => {
|
|
186
|
+
const meta = _getMetadataService();
|
|
187
|
+
meta.request('/latest/meta-data/instance-id', (err, results) => {
|
|
188
|
+
if (err) {
|
|
189
|
+
_errorLog('_getGroupData: Failed to get instance id:', err);
|
|
190
|
+
}
|
|
191
|
+
InstanceId = results || '';
|
|
192
|
+
done();
|
|
193
|
+
});
|
|
194
|
+
},
|
|
195
|
+
(done) => {
|
|
196
|
+
autoscaling.describeAutoScalingGroups({}, (err, data) => {
|
|
197
|
+
if (err) {
|
|
198
|
+
_errorLog('_getGroupData: find asg err:', err);
|
|
199
|
+
} else {
|
|
200
|
+
asg = data.AutoScalingGroups.find((group) => {
|
|
201
|
+
return (
|
|
202
|
+
group.AutoScalingGroupName === g_config.asg_name ||
|
|
203
|
+
group.Instances.find((i) => i.InstanceId === InstanceId)
|
|
204
|
+
);
|
|
205
|
+
});
|
|
206
|
+
if (!asg) {
|
|
207
|
+
err = 'asg_not_found';
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
done(err);
|
|
211
|
+
});
|
|
212
|
+
},
|
|
213
|
+
(done) => {
|
|
214
|
+
const opts = {
|
|
215
|
+
InstanceIds: asg.Instances.map((i) => i.InstanceId),
|
|
216
|
+
};
|
|
217
|
+
ec2.describeInstances(opts, (err, results) => {
|
|
218
|
+
if (err) {
|
|
219
|
+
_errorLog('_getGroupData: describeInstances err:', err);
|
|
220
|
+
} else {
|
|
221
|
+
instance_list = [];
|
|
222
|
+
results.Reservations.forEach((reservation) => {
|
|
223
|
+
reservation.Instances.forEach((i) => {
|
|
224
|
+
instance_list.push({
|
|
225
|
+
InstanceId: i.InstanceId,
|
|
226
|
+
PrivateIpAddress: i.PrivateIpAddress,
|
|
227
|
+
PublicIpAddress: i.PublicIpAddress,
|
|
228
|
+
LaunchTime: i.LaunchTime,
|
|
229
|
+
ImageId: i.ImageId,
|
|
230
|
+
InstanceType: i.InstanceType,
|
|
231
|
+
State: i.State,
|
|
232
|
+
});
|
|
233
|
+
});
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
done(err);
|
|
237
|
+
});
|
|
238
|
+
},
|
|
239
|
+
(done) => {
|
|
240
|
+
const list = instance_list.filter((i) => i.State.Name === 'running');
|
|
241
|
+
async.each(
|
|
242
|
+
list,
|
|
243
|
+
(instance, done) => {
|
|
244
|
+
_getServerData(instance, (err, body) => {
|
|
245
|
+
instance.git_commit_hash = body && body.git_commit_hash;
|
|
246
|
+
instance.uptime = body && body.uptime;
|
|
247
|
+
done(err);
|
|
248
|
+
});
|
|
249
|
+
},
|
|
250
|
+
done
|
|
251
|
+
);
|
|
252
|
+
},
|
|
253
|
+
(done) => {
|
|
254
|
+
const opts = {
|
|
255
|
+
LaunchTemplateId: asg.LaunchTemplate.LaunchTemplateId,
|
|
256
|
+
Versions: [asg.LaunchTemplate.Version],
|
|
257
|
+
};
|
|
258
|
+
ec2.describeLaunchTemplateVersions(opts, (err, data) => {
|
|
259
|
+
if (err) {
|
|
260
|
+
_errorLog('_getGroupData: launch template fetch error:', err);
|
|
261
|
+
} else if (data && data.LaunchTemplateVersions.length === 0) {
|
|
262
|
+
err = 'launch_template_not_found';
|
|
263
|
+
} else {
|
|
264
|
+
launch_template = data.LaunchTemplateVersions[0];
|
|
265
|
+
const ud = launch_template.LaunchTemplateData.UserData;
|
|
266
|
+
if (ud) {
|
|
267
|
+
const s = Buffer.from(ud, 'base64').toString('utf8');
|
|
268
|
+
launch_template.LaunchTemplateData.UserData = s;
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
done(err);
|
|
272
|
+
});
|
|
273
|
+
},
|
|
274
|
+
],
|
|
275
|
+
(err) => {
|
|
276
|
+
const ret = {
|
|
277
|
+
latest,
|
|
278
|
+
InstanceId,
|
|
279
|
+
auto_scale_group: asg,
|
|
280
|
+
launch_template,
|
|
281
|
+
instance_list,
|
|
282
|
+
};
|
|
283
|
+
done(err, ret);
|
|
284
|
+
}
|
|
285
|
+
);
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
function _getServerData(instance, done) {
|
|
289
|
+
const proto = g_config.http_proto;
|
|
290
|
+
const ip = instance.PrivateIpAddress;
|
|
291
|
+
const port = g_config.service_port;
|
|
292
|
+
const prefix = g_config.route_prefix;
|
|
293
|
+
const url = `${proto}://${ip}:${port}${prefix}/server_data`;
|
|
294
|
+
const opts = {
|
|
295
|
+
strictSSL: false,
|
|
296
|
+
url,
|
|
297
|
+
method: 'GET',
|
|
298
|
+
headers: {
|
|
299
|
+
'x-sc-secret': g_config.secret,
|
|
300
|
+
},
|
|
301
|
+
json: {
|
|
302
|
+
secret: g_config.secret,
|
|
303
|
+
},
|
|
304
|
+
};
|
|
305
|
+
webRequest(opts, (err, body) => {
|
|
306
|
+
if (err) {
|
|
307
|
+
_errorLog('_getServerData: request err:', err);
|
|
308
|
+
}
|
|
309
|
+
done(err, body);
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
function _updateServer(req, res) {
|
|
313
|
+
res.header('Cache-Control', 'no-cache, no-store, must-revalidate');
|
|
314
|
+
const hash = req.body.hash || req.query.hash;
|
|
315
|
+
if (hash) {
|
|
316
|
+
_updateSelf(hash, (err) => {
|
|
317
|
+
if (err) {
|
|
318
|
+
res.status(500).send(err);
|
|
319
|
+
} else {
|
|
320
|
+
res.send('Restarting server');
|
|
321
|
+
g_config.restart_function();
|
|
322
|
+
}
|
|
323
|
+
});
|
|
324
|
+
} else {
|
|
325
|
+
res.status(400).send('hash is required');
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
function _updateSelf(hash, done) {
|
|
329
|
+
const dir = g_config.repo_dir;
|
|
330
|
+
const url = `${g_config.remote_repo_prefix}/${hash}.tar.gz`;
|
|
331
|
+
const cmd = `cd ${dir} && ${__dirname}/../scripts/update_to_hash.sh ${url}`;
|
|
332
|
+
child_process.exec(cmd, (err, stdout, stderr) => {
|
|
333
|
+
if (err) {
|
|
334
|
+
_errorLog(
|
|
335
|
+
'_updateSelf: update_to_hash.sh failed with err:',
|
|
336
|
+
err,
|
|
337
|
+
'stdout:',
|
|
338
|
+
stdout,
|
|
339
|
+
'stderr:',
|
|
340
|
+
stderr
|
|
341
|
+
);
|
|
342
|
+
err = 'update_failed';
|
|
343
|
+
} else {
|
|
344
|
+
g_updateHash = hash;
|
|
345
|
+
}
|
|
346
|
+
done(err);
|
|
347
|
+
});
|
|
348
|
+
}
|
|
349
|
+
function _removeOldTarget() {
|
|
350
|
+
const dir = g_config.repo_dir;
|
|
351
|
+
const cmd = `${__dirname}/../scripts/remove_old_target.sh ${dir}`;
|
|
352
|
+
child_process.exec(cmd, (err, stdout, stderr) => {
|
|
353
|
+
if (err) {
|
|
354
|
+
_errorLog(
|
|
355
|
+
'_removeOldTarget: remove_old_target.sh failed with err:',
|
|
356
|
+
err,
|
|
357
|
+
'stdout:',
|
|
358
|
+
stdout,
|
|
359
|
+
'stderr:',
|
|
360
|
+
stderr
|
|
361
|
+
);
|
|
362
|
+
}
|
|
363
|
+
});
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
function _updateGroup(req, res) {
|
|
367
|
+
res.header('Cache-Control', 'no-cache, no-store, must-revalidate');
|
|
368
|
+
|
|
369
|
+
const hash = req.body.hash || req.query.hash;
|
|
370
|
+
if (hash) {
|
|
371
|
+
const url = `${g_config.remote_repo_prefix}/${hash}.tar.gz`;
|
|
372
|
+
const key_name = g_config.sc_update_url_key_name;
|
|
373
|
+
const ami_id = req.body.ami_id || req.query.ami_id || false;
|
|
374
|
+
|
|
375
|
+
const ec2 = _getEC2();
|
|
376
|
+
let group_data = false;
|
|
377
|
+
let old_data = '';
|
|
378
|
+
let new_version;
|
|
379
|
+
const server_result = {};
|
|
380
|
+
async.series(
|
|
381
|
+
[
|
|
382
|
+
(done) => {
|
|
383
|
+
_getGroupData((err, result) => {
|
|
384
|
+
if (!err) {
|
|
385
|
+
group_data = result;
|
|
386
|
+
const data = result.launch_template.LaunchTemplateData.UserData;
|
|
387
|
+
data.split('\n').forEach((line) => {
|
|
388
|
+
if (line.length && line.indexOf(key_name) === -1) {
|
|
389
|
+
old_data += line + '\n';
|
|
390
|
+
}
|
|
391
|
+
});
|
|
392
|
+
}
|
|
393
|
+
done(err);
|
|
394
|
+
});
|
|
395
|
+
},
|
|
396
|
+
(done) => {
|
|
397
|
+
headUrl(url, (err) => {
|
|
398
|
+
if (err) {
|
|
399
|
+
_errorLog('_updateGroup: head url:', url, 'err:', err);
|
|
400
|
+
err = 'url_not_found';
|
|
401
|
+
}
|
|
402
|
+
done(err);
|
|
403
|
+
});
|
|
404
|
+
},
|
|
405
|
+
(done) => {
|
|
406
|
+
const new_data = `${old_data}${key_name}=${url}\n`;
|
|
407
|
+
const opts = {
|
|
408
|
+
LaunchTemplateId: group_data.launch_template.LaunchTemplateId,
|
|
409
|
+
SourceVersion: String(group_data.launch_template.VersionNumber),
|
|
410
|
+
LaunchTemplateData: {
|
|
411
|
+
UserData: Buffer.from(new_data, 'utf8').toString('base64'),
|
|
412
|
+
},
|
|
413
|
+
};
|
|
414
|
+
if (ami_id) {
|
|
415
|
+
opts.LaunchTemplateData.ImageId = ami_id;
|
|
416
|
+
}
|
|
417
|
+
ec2.createLaunchTemplateVersion(opts, (err, data) => {
|
|
418
|
+
if (err) {
|
|
419
|
+
_errorLog('_updateGroup: failed to create version, err:', err);
|
|
420
|
+
} else {
|
|
421
|
+
new_version = data.LaunchTemplateVersion.VersionNumber;
|
|
422
|
+
}
|
|
423
|
+
done(err);
|
|
424
|
+
});
|
|
425
|
+
},
|
|
426
|
+
(done) => {
|
|
427
|
+
if (g_config.update_launch_default) {
|
|
428
|
+
const opts = {
|
|
429
|
+
DefaultVersion: String(new_version),
|
|
430
|
+
LaunchTemplateId: group_data.launch_template.LaunchTemplateId,
|
|
431
|
+
};
|
|
432
|
+
ec2.modifyLaunchTemplate(opts, function (err) {
|
|
433
|
+
if (err) {
|
|
434
|
+
_errorLog('_updateGroup: failed to update default, err:', err);
|
|
435
|
+
}
|
|
436
|
+
done(err);
|
|
437
|
+
});
|
|
438
|
+
} else {
|
|
439
|
+
done();
|
|
440
|
+
}
|
|
441
|
+
},
|
|
442
|
+
(done) => {
|
|
443
|
+
let group_err;
|
|
444
|
+
async.each(
|
|
445
|
+
group_data.instance_list,
|
|
446
|
+
(instance, done) => {
|
|
447
|
+
if (instance.InstanceId === group_data.InstanceId) {
|
|
448
|
+
done();
|
|
449
|
+
} else {
|
|
450
|
+
_updateInstance(hash, instance, (err) => {
|
|
451
|
+
if (err) {
|
|
452
|
+
_errorLog(
|
|
453
|
+
'_updateGroup: update instance:',
|
|
454
|
+
instance.InstanceId,
|
|
455
|
+
'err:',
|
|
456
|
+
err
|
|
457
|
+
);
|
|
458
|
+
group_err = err;
|
|
459
|
+
}
|
|
460
|
+
server_result[instance.InstanceId] = err;
|
|
461
|
+
done();
|
|
462
|
+
});
|
|
463
|
+
}
|
|
464
|
+
},
|
|
465
|
+
() => done(group_err)
|
|
466
|
+
);
|
|
467
|
+
},
|
|
468
|
+
(done) => {
|
|
469
|
+
_updateSelf(hash, (err) => {
|
|
470
|
+
server_result[group_data.InstanceId] = err;
|
|
471
|
+
done(err);
|
|
472
|
+
});
|
|
473
|
+
},
|
|
474
|
+
],
|
|
475
|
+
(err) => {
|
|
476
|
+
const body = {
|
|
477
|
+
err,
|
|
478
|
+
server_result,
|
|
479
|
+
launch_template_version: new_version,
|
|
480
|
+
};
|
|
481
|
+
if (err) {
|
|
482
|
+
res.status(500).send(body);
|
|
483
|
+
} else {
|
|
484
|
+
body._msg =
|
|
485
|
+
'Successful updating all servers, restarting this server.';
|
|
486
|
+
res.send(body);
|
|
487
|
+
g_config.restart_function();
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
);
|
|
491
|
+
} else {
|
|
492
|
+
res.status(400).send('hash is required');
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
function _updateInstance(hash, instance, done) {
|
|
496
|
+
async.series(
|
|
497
|
+
[
|
|
498
|
+
(done) => {
|
|
499
|
+
const proto = g_config.http_proto;
|
|
500
|
+
const ip = instance.PrivateIpAddress;
|
|
501
|
+
const port = g_config.service_port;
|
|
502
|
+
const prefix = g_config.route_prefix;
|
|
503
|
+
const url = `${proto}://${ip}:${port}${prefix}/update_server`;
|
|
504
|
+
const opts = {
|
|
505
|
+
strictSSL: false,
|
|
506
|
+
url,
|
|
507
|
+
method: 'GET',
|
|
508
|
+
headers: {
|
|
509
|
+
'x-sc-secret': g_config.secret,
|
|
510
|
+
},
|
|
511
|
+
json: {
|
|
512
|
+
hash,
|
|
513
|
+
secret: g_config.secret,
|
|
514
|
+
},
|
|
515
|
+
};
|
|
516
|
+
webRequest(opts, done);
|
|
517
|
+
},
|
|
518
|
+
(done) => _waitForServer({ instance, hash }, done),
|
|
519
|
+
],
|
|
520
|
+
done
|
|
521
|
+
);
|
|
522
|
+
}
|
|
523
|
+
function _waitForServer(params, done) {
|
|
524
|
+
const { instance, hash } = params;
|
|
525
|
+
let count = 0;
|
|
526
|
+
|
|
527
|
+
async.forever(
|
|
528
|
+
(done) => {
|
|
529
|
+
count++;
|
|
530
|
+
_getServerData(instance, (err, body) => {
|
|
531
|
+
if (!err && body && body.git_commit_hash === hash) {
|
|
532
|
+
done('stop');
|
|
533
|
+
} else if (count > MAX_WAIT_COUNT) {
|
|
534
|
+
done('too_many_tries');
|
|
535
|
+
} else {
|
|
536
|
+
setTimeout(done, SERVER_WAIT_MS);
|
|
537
|
+
}
|
|
538
|
+
});
|
|
539
|
+
},
|
|
540
|
+
(err) => {
|
|
541
|
+
if (err === 'stop') {
|
|
542
|
+
err = null;
|
|
543
|
+
}
|
|
544
|
+
done(err);
|
|
545
|
+
}
|
|
546
|
+
);
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
function _getLatest(done) {
|
|
550
|
+
const url = g_config.remote_repo_prefix + '/LATEST';
|
|
551
|
+
fetchFileContents(url, (err, body) => {
|
|
552
|
+
done(err, body && body.trim());
|
|
553
|
+
});
|
|
554
|
+
}
|
|
555
|
+
function getGitCommitHash(done) {
|
|
556
|
+
if (g_gitCommitHash) {
|
|
557
|
+
done && done(null, g_gitCommitHash);
|
|
558
|
+
} else {
|
|
559
|
+
const file = pathJoin(g_config.repo_dir, '.git_commit_hash');
|
|
560
|
+
fs.readFile(file, 'utf8', (err, result) => {
|
|
561
|
+
if (!err && !result) {
|
|
562
|
+
err = 'no_result';
|
|
563
|
+
}
|
|
564
|
+
if (err) {
|
|
565
|
+
_errorLog('getGitCommitHash: err:', err, 'file:', file);
|
|
566
|
+
} else {
|
|
567
|
+
g_gitCommitHash = result.trim();
|
|
568
|
+
}
|
|
569
|
+
done && done(err, g_gitCommitHash);
|
|
570
|
+
});
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
function _getAwsRegion() {
|
|
574
|
+
if (!g_config.region) {
|
|
575
|
+
const meta = _getMetadataService();
|
|
576
|
+
meta.request(
|
|
577
|
+
'/latest/dynamic/instance-identity/document',
|
|
578
|
+
(err, results) => {
|
|
579
|
+
if (err) {
|
|
580
|
+
_errorLog('_getAwsRegion: metadata err:', err);
|
|
581
|
+
} else {
|
|
582
|
+
try {
|
|
583
|
+
const json = JSON.parse(results);
|
|
584
|
+
if (json && json.region) {
|
|
585
|
+
g_config.region = json.region;
|
|
586
|
+
}
|
|
587
|
+
} catch (e) {
|
|
588
|
+
_errorLog('_getAwsRegion: threw:', e);
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
);
|
|
593
|
+
}
|
|
594
|
+
}
|
|
595
|
+
function _getMetadataService() {
|
|
596
|
+
const opts = g_config.metadata_opts || {};
|
|
597
|
+
return new AWS.MetadataService(opts);
|
|
598
|
+
}
|
|
599
|
+
function _getAutoscaling() {
|
|
600
|
+
return new AWS.AutoScaling({ region: g_config.region });
|
|
601
|
+
}
|
|
602
|
+
function _getEC2() {
|
|
603
|
+
return new AWS.EC2({ region: g_config.region });
|
|
604
|
+
}
|
|
605
|
+
function _errorLog(...args) {
|
|
606
|
+
g_config.error_log(...args);
|
|
607
|
+
}
|
|
608
|
+
function _defaultRestartFunction() {
|
|
609
|
+
g_config.console_log(
|
|
610
|
+
'server-control: updated to: ',
|
|
611
|
+
g_updateHash,
|
|
612
|
+
'restarting...'
|
|
613
|
+
);
|
|
614
|
+
setTimeout(function () {
|
|
615
|
+
process.exit(0);
|
|
616
|
+
}, 100);
|
|
617
|
+
}
|
package/src/request.js
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
const AWS = require('aws-sdk');
|
|
2
|
+
const request = require('request');
|
|
3
|
+
|
|
4
|
+
exports.webRequest = webRequest;
|
|
5
|
+
exports.headUrl = headUrl;
|
|
6
|
+
exports.fetchFileContents = fetchFileContents;
|
|
7
|
+
|
|
8
|
+
function webRequest(opts, done) {
|
|
9
|
+
request(opts, (err, response, body) => {
|
|
10
|
+
const statusCode = response && response.statusCode;
|
|
11
|
+
if (!err && (statusCode < 200 || statusCode > 299)) {
|
|
12
|
+
err = statusCode;
|
|
13
|
+
}
|
|
14
|
+
done(err, body);
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
function headUrl(url, done) {
|
|
18
|
+
if (url.indexOf('http') === 0) {
|
|
19
|
+
webRequest({ url, method: 'HEAD' }, done);
|
|
20
|
+
} else {
|
|
21
|
+
const parts = url.match(/s3:\/\/([^/]*)\/(.*)/);
|
|
22
|
+
const Bucket = parts && parts[1];
|
|
23
|
+
const Key = parts && parts[2];
|
|
24
|
+
const s3 = new AWS.S3();
|
|
25
|
+
s3.headObject({ Bucket, Key }, done);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
function fetchFileContents(url, done) {
|
|
29
|
+
if (url.indexOf('http') === 0) {
|
|
30
|
+
webRequest({ url }, done);
|
|
31
|
+
} else {
|
|
32
|
+
const parts = url.match(/s3:\/\/([^/]*)\/(.*)/);
|
|
33
|
+
const Bucket = parts && parts[1];
|
|
34
|
+
const Key = parts && parts[2];
|
|
35
|
+
const s3 = new AWS.S3();
|
|
36
|
+
s3.getObject({ Bucket, Key }, (err, data) => {
|
|
37
|
+
done(err, data && data.Body && data.Body.toString('utf8'));
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
}
|