@bitblit/ratchet-aws-node-only 4.0.80-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/athena/alb-athena-log-ratchet.js +158 -0
- package/dist/cjs/athena/athena-ratchet.js +159 -0
- package/dist/cjs/build/ratchet-aws-node-only-info.js +18 -0
- package/dist/cjs/cli/dynamo-exporter.js +81 -0
- package/dist/cjs/cli/ratchet-cli-handler.js +19 -0
- package/dist/cjs/cli/site-uploader/site-uploader.js +117 -0
- package/dist/cjs/cli/start-instance-and-ssh.js +67 -0
- package/dist/cjs/index.js +11 -0
- package/dist/cjs/mail/inbound/inbound-email-ratchet.js +54 -0
- package/dist/es/athena/alb-athena-log-ratchet.js +153 -0
- package/dist/es/athena/athena-ratchet.js +154 -0
- package/dist/es/build/ratchet-aws-node-only-info.js +14 -0
- package/dist/es/cli/dynamo-exporter.js +76 -0
- package/dist/es/cli/ratchet-cli-handler.js +15 -0
- package/dist/es/cli/site-uploader/site-uploader.js +112 -0
- package/dist/es/cli/start-instance-and-ssh.js +62 -0
- package/dist/es/index.js +8 -0
- package/dist/es/mail/inbound/inbound-email-ratchet.js +49 -0
- package/dist/tsconfig.cjs.tsbuildinfo +1 -0
- package/dist/tsconfig.es.tsbuildinfo +1 -0
- package/dist/tsconfig.types.tsbuildinfo +1 -0
- package/dist/types/athena/alb-athena-log-ratchet.d.ts +57 -0
- package/dist/types/athena/athena-ratchet.d.ts +16 -0
- package/dist/types/build/ratchet-aws-node-only-info.d.ts +5 -0
- package/dist/types/cli/dynamo-exporter.d.ts +13 -0
- package/dist/types/cli/ratchet-cli-handler.d.ts +6 -0
- package/dist/types/cli/site-uploader/site-uploader.d.ts +12 -0
- package/dist/types/cli/start-instance-and-ssh.d.ts +12 -0
- package/dist/types/index.d.ts +11 -0
- package/dist/types/mail/inbound/inbound-email-ratchet.d.ts +18 -0
- package/includes/cli.js +12 -0
- package/package.json +97 -0
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AlbAthenaLogRatchet = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const fs_1 = require("fs");
|
|
6
|
+
const path_1 = tslib_1.__importDefault(require("path"));
|
|
7
|
+
const ratchet_common_1 = require("@bitblit/ratchet-common");
|
|
8
|
+
const ratchet_common_2 = require("@bitblit/ratchet-common");
|
|
9
|
+
const ratchet_common_3 = require("@bitblit/ratchet-common");
|
|
10
|
+
const ratchet_aws_1 = require("@bitblit/ratchet-aws");
|
|
11
|
+
const ratchet_node_only_1 = require("@bitblit/ratchet-node-only");
|
|
12
|
+
class AlbAthenaLogRatchet {
|
|
13
|
+
constructor(athena, athenaTableName) {
|
|
14
|
+
this.athena = athena;
|
|
15
|
+
this.athenaTableName = athenaTableName;
|
|
16
|
+
ratchet_common_2.RequireRatchet.notNullOrUndefined(athena, 'athena');
|
|
17
|
+
ratchet_common_2.RequireRatchet.notNullOrUndefined(ratchet_common_3.StringRatchet.trimToNull(athenaTableName), 'athenaTableName');
|
|
18
|
+
}
|
|
19
|
+
async updatePartitions(rootPath, s3, startTimeEpochMS = new Date().getTime() - 1000 * 60 * 60 * 24, endTimeEpochMS = new Date().getTime()) {
|
|
20
|
+
ratchet_common_2.RequireRatchet.true(ratchet_aws_1.S3Ratchet.checkS3UrlForValidity(rootPath), 'root path not valid');
|
|
21
|
+
ratchet_common_2.RequireRatchet.notNullOrUndefined(s3, 's3');
|
|
22
|
+
ratchet_common_1.Logger.info('Updating partitions for %s from %s', this.athenaTableName, rootPath);
|
|
23
|
+
const bucketName = ratchet_aws_1.S3Ratchet.extractBucketFromURL(rootPath);
|
|
24
|
+
const rootKey = ratchet_aws_1.S3Ratchet.extractKeyFromURL(rootPath);
|
|
25
|
+
let current = startTimeEpochMS;
|
|
26
|
+
const clauses = [];
|
|
27
|
+
while (current < endTimeEpochMS) {
|
|
28
|
+
const dateUtcVal = new Date(current).toISOString().substring(0, 10);
|
|
29
|
+
ratchet_common_1.Logger.info('d:%s', dateUtcVal);
|
|
30
|
+
const dateParts = dateUtcVal.split('-');
|
|
31
|
+
clauses.push("PARTITION (date_utc_partition='" +
|
|
32
|
+
dateUtcVal +
|
|
33
|
+
"') LOCATION '" +
|
|
34
|
+
rootPath +
|
|
35
|
+
'/' +
|
|
36
|
+
dateParts[0] +
|
|
37
|
+
'/' +
|
|
38
|
+
dateParts[1] +
|
|
39
|
+
'/' +
|
|
40
|
+
dateParts[2] +
|
|
41
|
+
"'");
|
|
42
|
+
current += 1000 * 60 * 60 * 24;
|
|
43
|
+
}
|
|
44
|
+
if (clauses.length > 0) {
|
|
45
|
+
const stmt = 'ALTER TABLE ' + this.athenaTableName + ' ADD IF NOT EXISTS \n' + clauses.join('\n');
|
|
46
|
+
await this.athena.runQueryToObjects(stmt);
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
ratchet_common_1.Logger.warn('Not updating partitions - no time between time clauses');
|
|
50
|
+
}
|
|
51
|
+
return clauses;
|
|
52
|
+
}
|
|
53
|
+
async createTable(rootPath, replaceIfPresent = false) {
|
|
54
|
+
ratchet_common_2.RequireRatchet.true(ratchet_aws_1.S3Ratchet.checkS3UrlForValidity(rootPath), 'root path not valid');
|
|
55
|
+
let rval = false;
|
|
56
|
+
ratchet_common_1.Logger.info('Creating ALB table %s', this.athenaTableName);
|
|
57
|
+
if (replaceIfPresent) {
|
|
58
|
+
ratchet_common_1.Logger.info('Replace if present specified, removed old table');
|
|
59
|
+
try {
|
|
60
|
+
await this.athena.runQueryToObjects('drop table ' + this.athenaTableName);
|
|
61
|
+
}
|
|
62
|
+
catch (err) {
|
|
63
|
+
ratchet_common_1.Logger.info('Drop error : %j', err);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
let tableCreateQry = (0, fs_1.readFileSync)(path_1.default.join(__dirname, '../static/albAthenaTableCreate.txt')).toString();
|
|
67
|
+
tableCreateQry = tableCreateQry.split('{{TABLE NAME}}').join(this.athenaTableName);
|
|
68
|
+
tableCreateQry = tableCreateQry.split('{{ALB_LOG_ROOT}}').join(rootPath);
|
|
69
|
+
ratchet_common_1.Logger.info('Creating table with %s', tableCreateQry);
|
|
70
|
+
try {
|
|
71
|
+
await this.athena.runQueryToObjects(tableCreateQry);
|
|
72
|
+
rval = true;
|
|
73
|
+
}
|
|
74
|
+
catch (err) {
|
|
75
|
+
ratchet_common_1.Logger.error('Error creating table : %s', err);
|
|
76
|
+
}
|
|
77
|
+
return rval;
|
|
78
|
+
}
|
|
79
|
+
static async readLogObjectsFromCsvStream(readStream) {
|
|
80
|
+
return ratchet_node_only_1.CsvRatchet.streamParse(readStream, (p) => p);
|
|
81
|
+
}
|
|
82
|
+
static async readLogObjectsFromFile(fileName) {
|
|
83
|
+
return ratchet_node_only_1.CsvRatchet.fileParse(fileName, (p) => p);
|
|
84
|
+
}
|
|
85
|
+
async fetchAlbLogRecords(qry) {
|
|
86
|
+
const tempFile = await this.fetchAlbLogRecordsToFile(qry);
|
|
87
|
+
return AlbAthenaLogRatchet.readLogObjectsFromFile(tempFile);
|
|
88
|
+
}
|
|
89
|
+
async fetchAlbLogRecordsToFile(qry, outputFileName = null) {
|
|
90
|
+
ratchet_common_1.Logger.info('Querying %s : %j', this.athenaTableName, qry);
|
|
91
|
+
let qrySt = 'select * from ' + this.athenaTableName + ' where 1=1 ';
|
|
92
|
+
if (qry.startTimeEpochMS) {
|
|
93
|
+
if (qry.startTimeEpochMS) {
|
|
94
|
+
qrySt += " AND time >= '" + new Date(qry.startTimeEpochMS).toISOString() + "'";
|
|
95
|
+
qrySt += " AND date_utc_partition >='" + new Date(qry.startTimeEpochMS).toISOString().substring(0, 10) + "'";
|
|
96
|
+
}
|
|
97
|
+
if (qry.endTimeEpochMS) {
|
|
98
|
+
qrySt += " AND time < '" + new Date(qry.endTimeEpochMS).toISOString() + "'";
|
|
99
|
+
qrySt += " AND date_utc_partition <='" + new Date(qry.endTimeEpochMS).toISOString().substring(0, 10) + "'";
|
|
100
|
+
}
|
|
101
|
+
if (qry.requestUrlFilter) {
|
|
102
|
+
qrySt += " AND request_url LIKE '" + qry.requestUrlFilter + "'";
|
|
103
|
+
}
|
|
104
|
+
if (qry.limit) {
|
|
105
|
+
qrySt += ' LIMIT ' + qry.limit;
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
const result = await this.athena.runQueryToFile(qrySt, null, outputFileName);
|
|
109
|
+
return result;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
exports.AlbAthenaLogRatchet = AlbAthenaLogRatchet;
|
|
113
|
+
AlbAthenaLogRatchet.CREATE_TABLE_STATEMENT = 'CREATE EXTERNAL TABLE IF NOT EXISTS `{{TABLE NAME}}`(\n' +
|
|
114
|
+
" `type` string COMMENT '',\n" +
|
|
115
|
+
" `time` string COMMENT '',\n" +
|
|
116
|
+
" `elb` string COMMENT '',\n" +
|
|
117
|
+
" `client_ip` string COMMENT '',\n" +
|
|
118
|
+
" `client_port` int COMMENT '',\n" +
|
|
119
|
+
" `target_ip` string COMMENT '',\n" +
|
|
120
|
+
" `target_port` int COMMENT '',\n" +
|
|
121
|
+
" `request_processing_time` double COMMENT '',\n" +
|
|
122
|
+
" `target_processing_time` double COMMENT '',\n" +
|
|
123
|
+
" `response_processing_time` double COMMENT '',\n" +
|
|
124
|
+
" `elb_status_code` string COMMENT '',\n" +
|
|
125
|
+
" `target_status_code` string COMMENT '',\n" +
|
|
126
|
+
" `received_bytes` bigint COMMENT '',\n" +
|
|
127
|
+
" `sent_bytes` bigint COMMENT '',\n" +
|
|
128
|
+
" `request_verb` string COMMENT '',\n" +
|
|
129
|
+
" `request_url` string COMMENT '',\n" +
|
|
130
|
+
" `request_proto` string COMMENT '',\n" +
|
|
131
|
+
" `user_agent` string COMMENT '',\n" +
|
|
132
|
+
" `ssl_cipher` string COMMENT '',\n" +
|
|
133
|
+
" `ssl_protocol` string COMMENT '',\n" +
|
|
134
|
+
" `target_group_arn` string COMMENT '',\n" +
|
|
135
|
+
" `trace_id` string COMMENT '',\n" +
|
|
136
|
+
" `domain_name` string COMMENT '',\n" +
|
|
137
|
+
" `chosen_cert_arn` string COMMENT '',\n" +
|
|
138
|
+
" `matched_rule_priority` string COMMENT '',\n" +
|
|
139
|
+
" `request_creation_time` string COMMENT '',\n" +
|
|
140
|
+
" `actions_executed` string COMMENT '',\n" +
|
|
141
|
+
" `redirect_url` string COMMENT '',\n" +
|
|
142
|
+
" `lambda_error_reason` string COMMENT '',\n" +
|
|
143
|
+
" `target_port_list` string COMMENT '',\n" +
|
|
144
|
+
" `target_status_code_list` string COMMENT '',\n" +
|
|
145
|
+
" `new_field` string COMMENT '')\n" +
|
|
146
|
+
'PARTITIONED BY (\n' +
|
|
147
|
+
' `date_utc_partition` string\n' +
|
|
148
|
+
')\n' +
|
|
149
|
+
'ROW FORMAT SERDE\n' +
|
|
150
|
+
" 'org.apache.hadoop.hive.serde2.RegexSerDe'\n" +
|
|
151
|
+
'WITH SERDEPROPERTIES (\n' +
|
|
152
|
+
' \'input.regex\'=\'([^ ]*) ([^ ]*) ([^ ]*) ([^ ]*):([0-9]*) ([^ ]*)[:-]([0-9]*) ([-.0-9]*) ([-.0-9]*) ([-.0-9]*) (|[-0-9]*) (-|[-0-9]*) ([-0-9]*) ([-0-9]*) \\"([^ ]*) ([^ ]*) (- |[^ ]*)\\" \\"([^\\"]*)\\" ([A-Z0-9-]+) ([A-Za-z0-9.-]*) ([^ ]*) \\"([^\\"]*)\\" \\"([^\\"]*)\\" \\"([^\\"]*)\\" ([-.0-9]*) ([^ ]*) \\"([^\\"]*)\\" \\"([^\\"]*)\\" \\"([^ ]*)\\" \\"([^s]+)\\" \\"([^s]+)\\"(.*)\')\n' +
|
|
153
|
+
'STORED AS INPUTFORMAT\n' +
|
|
154
|
+
" 'org.apache.hadoop.mapred.TextInputFormat'\n" +
|
|
155
|
+
'OUTPUTFORMAT\n' +
|
|
156
|
+
" 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\n" +
|
|
157
|
+
'LOCATION\n' +
|
|
158
|
+
" '{{ALB_LOG_ROOT}}'\n";
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AthenaRatchet = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const client_athena_1 = require("@aws-sdk/client-athena");
|
|
6
|
+
const ratchet_common_1 = require("@bitblit/ratchet-common");
|
|
7
|
+
const ratchet_common_2 = require("@bitblit/ratchet-common");
|
|
8
|
+
const ratchet_common_3 = require("@bitblit/ratchet-common");
|
|
9
|
+
const ratchet_common_4 = require("@bitblit/ratchet-common");
|
|
10
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
11
|
+
const tmp_1 = tslib_1.__importDefault(require("tmp"));
|
|
12
|
+
const fs_1 = tslib_1.__importDefault(require("fs"));
|
|
13
|
+
const ratchet_node_only_1 = require("@bitblit/ratchet-node-only");
|
|
14
|
+
const ratchet_common_5 = require("@bitblit/ratchet-common");
|
|
15
|
+
class AthenaRatchet {
|
|
16
|
+
constructor(athena, s3, outputLocation) {
|
|
17
|
+
this.athena = athena;
|
|
18
|
+
this.s3 = s3;
|
|
19
|
+
this.outputLocation = outputLocation;
|
|
20
|
+
ratchet_common_5.RequireRatchet.notNullOrUndefined(athena);
|
|
21
|
+
ratchet_common_5.RequireRatchet.notNullOrUndefined(s3);
|
|
22
|
+
ratchet_common_5.RequireRatchet.notNullOrUndefined(outputLocation);
|
|
23
|
+
ratchet_common_5.RequireRatchet.true(outputLocation.startsWith('s3://'));
|
|
24
|
+
}
|
|
25
|
+
static athenaRowsToObject(input) {
|
|
26
|
+
const colNames = input[0].Data.map((d) => d.VarCharValue);
|
|
27
|
+
const temp = input.slice(1);
|
|
28
|
+
const rval = temp.map((t) => {
|
|
29
|
+
const newItem = {};
|
|
30
|
+
for (let i = 0; i < t.Data.length; i++) {
|
|
31
|
+
newItem[colNames[i]] = t.Data[i].VarCharValue;
|
|
32
|
+
}
|
|
33
|
+
return newItem;
|
|
34
|
+
});
|
|
35
|
+
return rval;
|
|
36
|
+
}
|
|
37
|
+
static applyParamsToQuery(query, queryParams) {
|
|
38
|
+
let rval = query;
|
|
39
|
+
if (!!rval && !!queryParams) {
|
|
40
|
+
Object.keys(queryParams).forEach((k) => {
|
|
41
|
+
const val = ratchet_common_1.StringRatchet.safeString(queryParams[k]);
|
|
42
|
+
const kk = '{' + k + '}';
|
|
43
|
+
rval = rval.split(kk).join(val);
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
return rval;
|
|
47
|
+
}
|
|
48
|
+
async fetchQueryIds() {
|
|
49
|
+
const params = {
|
|
50
|
+
NextToken: null,
|
|
51
|
+
};
|
|
52
|
+
let rval = [];
|
|
53
|
+
let next = null;
|
|
54
|
+
do {
|
|
55
|
+
next = await this.athena.send(new client_athena_1.ListNamedQueriesCommand(params));
|
|
56
|
+
rval = rval.concat(next.NamedQueryIds);
|
|
57
|
+
params.NextToken = next.NextToken;
|
|
58
|
+
} while (!!params.NextToken);
|
|
59
|
+
return rval;
|
|
60
|
+
}
|
|
61
|
+
async listQueries() {
|
|
62
|
+
const rval = [];
|
|
63
|
+
const ids = await this.fetchQueryIds();
|
|
64
|
+
ratchet_common_2.Logger.debug('Finding %d items', ids.length);
|
|
65
|
+
for (let i = 0; i < ids.length; i++) {
|
|
66
|
+
const params = {
|
|
67
|
+
NamedQueryId: ids[i],
|
|
68
|
+
};
|
|
69
|
+
const val = await this.athena.send(new client_athena_1.GetNamedQueryCommand(params));
|
|
70
|
+
rval.push(val.NamedQuery);
|
|
71
|
+
}
|
|
72
|
+
return rval;
|
|
73
|
+
}
|
|
74
|
+
async findQueryByName(name) {
|
|
75
|
+
const all = await this.listQueries();
|
|
76
|
+
const rval = all.find((a) => a.Name.toLowerCase() == name.toLowerCase());
|
|
77
|
+
return rval;
|
|
78
|
+
}
|
|
79
|
+
async runQueryToObjects(queryIn, queryParams = {}, pingTimeMS = 2000) {
|
|
80
|
+
ratchet_common_2.Logger.info('Running query to objects');
|
|
81
|
+
const outputLoc = await this.runQueryToOutputLocation(queryIn, queryParams, pingTimeMS);
|
|
82
|
+
ratchet_common_2.Logger.info('Query succeeded, processing file from %s', outputLoc);
|
|
83
|
+
const bucketName = outputLoc.substring(5, outputLoc.indexOf('/', 5));
|
|
84
|
+
const obKey = outputLoc.substring(outputLoc.indexOf('/', 5) + 1);
|
|
85
|
+
const req = {
|
|
86
|
+
Bucket: bucketName,
|
|
87
|
+
Key: obKey,
|
|
88
|
+
};
|
|
89
|
+
const getFileOut = await this.s3.send(new client_s3_1.GetObjectCommand(req));
|
|
90
|
+
const rval = await ratchet_node_only_1.CsvRatchet.stringParse(getFileOut.Body.toString(), (p) => {
|
|
91
|
+
return p;
|
|
92
|
+
}, { columns: true, skip_empty_lines: true });
|
|
93
|
+
return rval;
|
|
94
|
+
}
|
|
95
|
+
async runQueryToFile(queryIn, queryParams = {}, targetDataFileIn = null, pingTimeMS = 2000) {
|
|
96
|
+
ratchet_common_2.Logger.info('Running query to file');
|
|
97
|
+
const outputLoc = await this.runQueryToOutputLocation(queryIn, queryParams, pingTimeMS);
|
|
98
|
+
ratchet_common_2.Logger.info('Query succeeded, pulling file from %s', outputLoc);
|
|
99
|
+
const bucketName = outputLoc.substring(5, outputLoc.indexOf('/', 5));
|
|
100
|
+
const obKey = outputLoc.substring(outputLoc.indexOf('/', 5) + 1);
|
|
101
|
+
const req = {
|
|
102
|
+
Bucket: bucketName,
|
|
103
|
+
Key: obKey,
|
|
104
|
+
};
|
|
105
|
+
const targetDataFile = targetDataFileIn || tmp_1.default.fileSync({ postfix: '.csv', keep: false }).name;
|
|
106
|
+
const fileStream = fs_1.default.createWriteStream(targetDataFile);
|
|
107
|
+
const output = await this.s3.send(new client_s3_1.GetObjectCommand(req));
|
|
108
|
+
const readStream = output.Body;
|
|
109
|
+
readStream.pipe(fileStream);
|
|
110
|
+
const rval = await ratchet_common_4.PromiseRatchet.resolveOnEvent(readStream, ['finish', 'close'], ['error'], targetDataFile);
|
|
111
|
+
ratchet_common_2.Logger.silly('Response: %s', rval);
|
|
112
|
+
return targetDataFile;
|
|
113
|
+
}
|
|
114
|
+
async runQueryToOutputLocation(queryIn, queryParams = {}, pingTimeMS = 2000) {
|
|
115
|
+
let rval = null;
|
|
116
|
+
const timer = new ratchet_common_3.StopWatch();
|
|
117
|
+
const query = AthenaRatchet.applyParamsToQuery(queryIn, queryParams);
|
|
118
|
+
try {
|
|
119
|
+
ratchet_common_2.Logger.info('Starting query : %s', query);
|
|
120
|
+
const token = ratchet_common_1.StringRatchet.createType4Guid();
|
|
121
|
+
const params = {
|
|
122
|
+
QueryString: query,
|
|
123
|
+
ResultConfiguration: {
|
|
124
|
+
OutputLocation: this.outputLocation,
|
|
125
|
+
EncryptionConfiguration: {
|
|
126
|
+
EncryptionOption: 'SSE_S3',
|
|
127
|
+
},
|
|
128
|
+
},
|
|
129
|
+
ClientRequestToken: token,
|
|
130
|
+
QueryExecutionContext: {
|
|
131
|
+
Database: 'default',
|
|
132
|
+
},
|
|
133
|
+
};
|
|
134
|
+
const startToken = await this.athena.send(new client_athena_1.StartQueryExecutionCommand(params));
|
|
135
|
+
const getExecParams = {
|
|
136
|
+
QueryExecutionId: startToken.QueryExecutionId,
|
|
137
|
+
};
|
|
138
|
+
const finalStates = ['FAILED', 'CANCELLED', 'SUCCEEDED'];
|
|
139
|
+
let curState = await this.athena.send(new client_athena_1.GetQueryExecutionCommand(getExecParams));
|
|
140
|
+
while (finalStates.indexOf(curState.QueryExecution.Status.State) === -1) {
|
|
141
|
+
await ratchet_common_4.PromiseRatchet.createTimeoutPromise('wait', pingTimeMS);
|
|
142
|
+
ratchet_common_2.Logger.debug('%s : %s : %s', curState.QueryExecution.Status.State, timer.dump(), query);
|
|
143
|
+
curState = await this.athena.send(new client_athena_1.GetQueryExecutionCommand(getExecParams));
|
|
144
|
+
}
|
|
145
|
+
if (curState.QueryExecution.Status.State === 'FAILED') {
|
|
146
|
+
ratchet_common_2.Logger.warn('Query failed : %s', curState.QueryExecution.Status.StateChangeReason);
|
|
147
|
+
}
|
|
148
|
+
else if (curState.QueryExecution.Status.State === 'SUCCEEDED') {
|
|
149
|
+
rval = curState.QueryExecution.ResultConfiguration.OutputLocation;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
catch (err) {
|
|
153
|
+
ratchet_common_2.Logger.warn('Failure : %s', err, err);
|
|
154
|
+
}
|
|
155
|
+
ratchet_common_2.Logger.info('Query took %s : %s', timer.dump(), query);
|
|
156
|
+
return rval;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
exports.AthenaRatchet = AthenaRatchet;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.RatchetAwsNodeOnlyInfo = void 0;
|
|
4
|
+
class RatchetAwsNodeOnlyInfo {
|
|
5
|
+
constructor() { }
|
|
6
|
+
static buildInformation() {
|
|
7
|
+
const val = {
|
|
8
|
+
version: '80',
|
|
9
|
+
hash: '10fcb761c2fa4186df89e527e948f5780a4e14ac',
|
|
10
|
+
branch: 'alpha-2023-03-12-2',
|
|
11
|
+
tag: 'alpha-2023-03-12-2',
|
|
12
|
+
timeBuiltISO: '2023-03-12T20:00:51-0700',
|
|
13
|
+
notes: '',
|
|
14
|
+
};
|
|
15
|
+
return val;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
exports.RatchetAwsNodeOnlyInfo = RatchetAwsNodeOnlyInfo;
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DynamoExporter = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const ratchet_common_1 = require("@bitblit/ratchet-common");
|
|
6
|
+
const ratchet_common_2 = require("@bitblit/ratchet-common");
|
|
7
|
+
const ratchet_common_3 = require("@bitblit/ratchet-common");
|
|
8
|
+
const ratchet_common_4 = require("@bitblit/ratchet-common");
|
|
9
|
+
const fs_1 = tslib_1.__importDefault(require("fs"));
|
|
10
|
+
const readline_1 = tslib_1.__importDefault(require("readline"));
|
|
11
|
+
class DynamoExporter {
|
|
12
|
+
constructor() { }
|
|
13
|
+
static async importJsonLFileToTable(dynamo, tableName, filename) {
|
|
14
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
|
|
15
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(tableName, 'tableName');
|
|
16
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(filename, 'filename');
|
|
17
|
+
const fileStream = fs_1.default.createReadStream(filename);
|
|
18
|
+
const rl = readline_1.default.createInterface({
|
|
19
|
+
input: fileStream,
|
|
20
|
+
crlfDelay: Infinity,
|
|
21
|
+
});
|
|
22
|
+
let rval = 0;
|
|
23
|
+
for await (const line of rl) {
|
|
24
|
+
if (rval % 100 === 0) {
|
|
25
|
+
ratchet_common_2.Logger.info('Importing line %d', rval);
|
|
26
|
+
}
|
|
27
|
+
if (ratchet_common_1.StringRatchet.trimToNull(line)) {
|
|
28
|
+
const parsed = JSON.parse(line);
|
|
29
|
+
await dynamo.simplePut(tableName, parsed);
|
|
30
|
+
rval++;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
return rval;
|
|
34
|
+
}
|
|
35
|
+
static async exportScanToJsonLFile(dynamo, scan, filename) {
|
|
36
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
|
|
37
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(scan, 'scan');
|
|
38
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(filename, 'filename');
|
|
39
|
+
const ws = fs_1.default.createWriteStream(filename);
|
|
40
|
+
ws.on('end', () => {
|
|
41
|
+
ratchet_common_2.Logger.debug('Write complete');
|
|
42
|
+
});
|
|
43
|
+
const rval = await DynamoExporter.exportScanToJsonLWriteStream(dynamo, scan, ws);
|
|
44
|
+
await ratchet_common_3.PromiseRatchet.resolveOnEvent(ws, ['finish', 'close'], ['error']);
|
|
45
|
+
ws.close();
|
|
46
|
+
return rval;
|
|
47
|
+
}
|
|
48
|
+
static async exportQueryToJsonLFile(dynamo, qry, filename) {
|
|
49
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
|
|
50
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(qry, 'qry');
|
|
51
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(filename, 'filename');
|
|
52
|
+
const ws = fs_1.default.createWriteStream(filename);
|
|
53
|
+
ws.on('end', () => {
|
|
54
|
+
ratchet_common_2.Logger.debug('Write complete');
|
|
55
|
+
});
|
|
56
|
+
const rval = await DynamoExporter.exportQueryToJsonLWriteStream(dynamo, qry, ws);
|
|
57
|
+
await ratchet_common_3.PromiseRatchet.resolveOnEvent(ws, ['finish', 'close'], ['error']);
|
|
58
|
+
ws.close();
|
|
59
|
+
return rval;
|
|
60
|
+
}
|
|
61
|
+
static async exportScanToJsonLWriteStream(dynamo, scan, target) {
|
|
62
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
|
|
63
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(scan, 'scan');
|
|
64
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(target, 'target');
|
|
65
|
+
const rval = await dynamo.fullyExecuteProcessOverScan(scan, async (row) => DynamoExporter.writeItemToJsonLStream(row, target, false));
|
|
66
|
+
return rval;
|
|
67
|
+
}
|
|
68
|
+
static async exportQueryToJsonLWriteStream(dynamo, qry, target) {
|
|
69
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
|
|
70
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(qry, 'qry');
|
|
71
|
+
ratchet_common_4.RequireRatchet.notNullOrUndefined(target, 'target');
|
|
72
|
+
const rval = await dynamo.fullyExecuteProcessOverQuery(qry, async (row) => DynamoExporter.writeItemToJsonLStream(row, target, false));
|
|
73
|
+
return rval;
|
|
74
|
+
}
|
|
75
|
+
static writeItemToJsonLStream(item, target, includeNulls = false) {
|
|
76
|
+
if (!!item || includeNulls) {
|
|
77
|
+
target.write(JSON.stringify(item) + '\n');
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
exports.DynamoExporter = DynamoExporter;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.RatchetCliHandler = void 0;
|
|
4
|
+
const ratchet_node_only_1 = require("@bitblit/ratchet-node-only");
|
|
5
|
+
const site_uploader_1 = require("./site-uploader/site-uploader");
|
|
6
|
+
const start_instance_and_ssh_1 = require("./start-instance-and-ssh");
|
|
7
|
+
const ratchet_aws_node_only_info_1 = require("../build/ratchet-aws-node-only-info");
|
|
8
|
+
class RatchetCliHandler extends ratchet_node_only_1.AbstractRatchetCliHandler {
|
|
9
|
+
fetchHandlerMap() {
|
|
10
|
+
return {
|
|
11
|
+
'site-uploader': site_uploader_1.SiteUploader.runFromCliArgs,
|
|
12
|
+
'start-instance-and-ssh': start_instance_and_ssh_1.StartInstanceAndSsh.runFromCliArgs,
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
fetchVersionInfo() {
|
|
16
|
+
return ratchet_aws_node_only_info_1.RatchetAwsNodeOnlyInfo.buildInformation();
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
exports.RatchetCliHandler = RatchetCliHandler;
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SiteUploader = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const fs_1 = tslib_1.__importDefault(require("fs"));
|
|
6
|
+
const walk_1 = tslib_1.__importDefault(require("walk"));
|
|
7
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
8
|
+
const path_1 = tslib_1.__importDefault(require("path"));
|
|
9
|
+
const mime_types_1 = tslib_1.__importDefault(require("mime-types"));
|
|
10
|
+
const ratchet_common_1 = require("@bitblit/ratchet-common");
|
|
11
|
+
const lib_storage_1 = require("@aws-sdk/lib-storage");
|
|
12
|
+
class SiteUploader {
|
|
13
|
+
constructor(srcDir, bucketName, configFile) {
|
|
14
|
+
this.s3 = new client_s3_1.S3Client({ region: 'us-east-1' });
|
|
15
|
+
this.srcDir = srcDir;
|
|
16
|
+
this.bucketName = bucketName;
|
|
17
|
+
this.config = JSON.parse(fs_1.default.readFileSync(configFile).toString('ascii'));
|
|
18
|
+
}
|
|
19
|
+
static createFromArgs(args) {
|
|
20
|
+
if (args && args.length === 3) {
|
|
21
|
+
const src = args[0];
|
|
22
|
+
const bucket = args[1];
|
|
23
|
+
const configFile = args[2];
|
|
24
|
+
return new SiteUploader(src, bucket, configFile);
|
|
25
|
+
}
|
|
26
|
+
else {
|
|
27
|
+
console.log('Usage : node ratchet-site-uploader {srcDir} {bucket} {configFile} (Found ' + args + ' arguments, need 3)');
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
static async runFromCliArgs(args) {
|
|
32
|
+
const inst = SiteUploader.createFromArgs(args);
|
|
33
|
+
return inst.runPump();
|
|
34
|
+
}
|
|
35
|
+
findMatch(prefix, fileName, config) {
|
|
36
|
+
let found = null;
|
|
37
|
+
if (prefix != null && fileName != null && config != null && config.mapping != null) {
|
|
38
|
+
config.mapping.forEach((entry) => {
|
|
39
|
+
if (found == null) {
|
|
40
|
+
if (entry.prefixMatch == null || prefix.match(entry.prefixMatch)) {
|
|
41
|
+
if (entry.fileMatch == null || fileName.match(entry.fileMatch)) {
|
|
42
|
+
found = entry;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
return found;
|
|
49
|
+
}
|
|
50
|
+
findMime(fileName, config) {
|
|
51
|
+
let found = null;
|
|
52
|
+
if (config != null && config.customMimeTypeMapping != null) {
|
|
53
|
+
Object.keys(config.customMimeTypeMapping).forEach((k) => {
|
|
54
|
+
if (found == null && fileName.endsWith(k)) {
|
|
55
|
+
found = config.customMimeTypeMapping[k];
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
if (found == null) {
|
|
60
|
+
found = mime_types_1.default.lookup(fileName);
|
|
61
|
+
}
|
|
62
|
+
if (found == null) {
|
|
63
|
+
found = 'binary/octet-stream';
|
|
64
|
+
}
|
|
65
|
+
return found;
|
|
66
|
+
}
|
|
67
|
+
runPump() {
|
|
68
|
+
return new Promise((resolve, reject) => {
|
|
69
|
+
ratchet_common_1.Logger.info('Uploading contents of %s to %s using %j as config', this.srcDir, this.bucketName, this.config);
|
|
70
|
+
const options = {};
|
|
71
|
+
const walker = walk_1.default.walk(this.srcDir, options);
|
|
72
|
+
walker.on('file', function (root, fileStats, next) {
|
|
73
|
+
ratchet_common_1.Logger.info('Processing %j', fileStats.name);
|
|
74
|
+
const prefix = root == this.srcDir ? '' : root.substring(this.srcDir.length + 1) + '/';
|
|
75
|
+
const proc = this.findMatch(prefix, fileStats.name, this.config);
|
|
76
|
+
const key = prefix + fileStats.name;
|
|
77
|
+
ratchet_common_1.Logger.info('Uploading file : %s/%s to key %s with %j', root, fileStats.name, key, proc);
|
|
78
|
+
const params = proc && proc.putParams ? JSON.parse(JSON.stringify(proc.putParams)) : {};
|
|
79
|
+
params.Bucket = this.bucketName;
|
|
80
|
+
params.Key = key;
|
|
81
|
+
params.Body = fs_1.default.readFileSync(path_1.default.join(root, fileStats.name));
|
|
82
|
+
if (!params.ContentType) {
|
|
83
|
+
params.ContentType = this.findMime(fileStats.name, this.config);
|
|
84
|
+
}
|
|
85
|
+
const upload = new lib_storage_1.Upload({
|
|
86
|
+
client: this.s3,
|
|
87
|
+
params: params,
|
|
88
|
+
tags: [],
|
|
89
|
+
queueSize: 4,
|
|
90
|
+
partSize: 1024 * 1024 * 5,
|
|
91
|
+
leavePartsOnError: false,
|
|
92
|
+
});
|
|
93
|
+
upload.on('httpUploadProgress', (progress) => {
|
|
94
|
+
ratchet_common_1.Logger.info('Uploading : %s', progress);
|
|
95
|
+
});
|
|
96
|
+
upload
|
|
97
|
+
.done()
|
|
98
|
+
.then((result) => {
|
|
99
|
+
ratchet_common_1.Logger.info('Finished upload of %s: %j', key, result);
|
|
100
|
+
next();
|
|
101
|
+
})
|
|
102
|
+
.catch((err) => {
|
|
103
|
+
ratchet_common_1.Logger.warn('%s failed to upload : %s : Continuing', key, err);
|
|
104
|
+
next();
|
|
105
|
+
});
|
|
106
|
+
}.bind(this));
|
|
107
|
+
walker.on('errors', function (root, nodeStatsArray, next) {
|
|
108
|
+
next();
|
|
109
|
+
});
|
|
110
|
+
walker.on('end', function () {
|
|
111
|
+
ratchet_common_1.Logger.info('All done');
|
|
112
|
+
resolve(true);
|
|
113
|
+
});
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
exports.SiteUploader = SiteUploader;
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StartInstanceAndSsh = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const ratchet_common_1 = require("@bitblit/ratchet-common");
|
|
6
|
+
const ratchet_aws_1 = require("@bitblit/ratchet-aws");
|
|
7
|
+
const child_process_1 = require("child_process");
|
|
8
|
+
const fs_1 = tslib_1.__importDefault(require("fs"));
|
|
9
|
+
const os_1 = tslib_1.__importDefault(require("os"));
|
|
10
|
+
const path_1 = tslib_1.__importDefault(require("path"));
|
|
11
|
+
class StartInstanceAndSsh {
|
|
12
|
+
constructor(instanceId, publicKeyFile = path_1.default.join(os_1.default.homedir(), '.ssh', 'id_rsa.pub'), instanceOsUser = 'ec2-user', region = 'us-east-1', availabilityZone = 'us-east-1a') {
|
|
13
|
+
this.instanceId = instanceId;
|
|
14
|
+
this.publicKeyFile = publicKeyFile;
|
|
15
|
+
this.instanceOsUser = instanceOsUser;
|
|
16
|
+
this.region = region;
|
|
17
|
+
this.availabilityZone = availabilityZone;
|
|
18
|
+
this.ec2Ratchet = new ratchet_aws_1.Ec2Ratchet(this.region, this.availabilityZone);
|
|
19
|
+
}
|
|
20
|
+
static createFromArgs(args) {
|
|
21
|
+
if ((args === null || args === void 0 ? void 0 : args.length) === 1 || (args === null || args === void 0 ? void 0 : args.length) === 2) {
|
|
22
|
+
const instanceId = args[0];
|
|
23
|
+
return new StartInstanceAndSsh(instanceId);
|
|
24
|
+
}
|
|
25
|
+
else {
|
|
26
|
+
ratchet_common_1.Logger.info('Usage : ratchet-start-instance-and-ssh {instanceId} {publicKeyFile} (Found %s arguments, need 1 or 2)', args);
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
static async runFromCliArgs(args) {
|
|
31
|
+
const inst = StartInstanceAndSsh.createFromArgs(args);
|
|
32
|
+
return inst.run();
|
|
33
|
+
}
|
|
34
|
+
async run() {
|
|
35
|
+
let instance = await this.ec2Ratchet.describeInstance(this.instanceId);
|
|
36
|
+
if (!!instance) {
|
|
37
|
+
let launched = false;
|
|
38
|
+
if (instance.State.Code == 16) {
|
|
39
|
+
ratchet_common_1.Logger.info('Instance is already running...');
|
|
40
|
+
launched = true;
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
ratchet_common_1.Logger.info('Instance is not running... starting up : %s', this.instanceId);
|
|
44
|
+
launched = await this.ec2Ratchet.launchInstance(this.instanceId, 1000 * 30);
|
|
45
|
+
}
|
|
46
|
+
if (launched) {
|
|
47
|
+
ratchet_common_1.Logger.info('Uploading public key...');
|
|
48
|
+
const publicKeyText = fs_1.default.readFileSync(this.publicKeyFile).toString();
|
|
49
|
+
const publicKeyResponse = await this.ec2Ratchet.sendPublicKeyToEc2Instance(this.instanceId, publicKeyText, this.instanceOsUser);
|
|
50
|
+
ratchet_common_1.Logger.info('Key response : %j', publicKeyResponse);
|
|
51
|
+
instance = instance && instance.PublicIpAddress ? instance : await this.ec2Ratchet.describeInstance(this.instanceId);
|
|
52
|
+
ratchet_common_1.Logger.info('Instance IP address is %s', instance.PublicIpAddress);
|
|
53
|
+
const ret = (0, child_process_1.spawnSync)('ssh', [this.instanceOsUser + '@' + instance.PublicIpAddress], {
|
|
54
|
+
stdio: 'inherit',
|
|
55
|
+
});
|
|
56
|
+
ratchet_common_1.Logger.info('%j', ret);
|
|
57
|
+
}
|
|
58
|
+
else {
|
|
59
|
+
ratchet_common_1.Logger.info('Instance could not start - check logs');
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
ratchet_common_1.Logger.info('No such instance found - check your AWS keys? : %s', this.instanceId);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
exports.StartInstanceAndSsh = StartInstanceAndSsh;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const tslib_1 = require("tslib");
|
|
4
|
+
tslib_1.__exportStar(require("./athena/alb-athena-log-ratchet"), exports);
|
|
5
|
+
tslib_1.__exportStar(require("./athena/athena-ratchet"), exports);
|
|
6
|
+
tslib_1.__exportStar(require("./build/ratchet-aws-node-only-info"), exports);
|
|
7
|
+
tslib_1.__exportStar(require("./cli/dynamo-exporter"), exports);
|
|
8
|
+
tslib_1.__exportStar(require("./cli/ratchet-cli-handler"), exports);
|
|
9
|
+
tslib_1.__exportStar(require("./cli/start-instance-and-ssh"), exports);
|
|
10
|
+
tslib_1.__exportStar(require("./cli/site-uploader/site-uploader"), exports);
|
|
11
|
+
tslib_1.__exportStar(require("./mail/inbound/inbound-email-ratchet"), exports);
|