@bitblit/ratchet-aws-node-only 4.0.80-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/dist/cjs/athena/alb-athena-log-ratchet.js +158 -0
  2. package/dist/cjs/athena/athena-ratchet.js +159 -0
  3. package/dist/cjs/build/ratchet-aws-node-only-info.js +18 -0
  4. package/dist/cjs/cli/dynamo-exporter.js +81 -0
  5. package/dist/cjs/cli/ratchet-cli-handler.js +19 -0
  6. package/dist/cjs/cli/site-uploader/site-uploader.js +117 -0
  7. package/dist/cjs/cli/start-instance-and-ssh.js +67 -0
  8. package/dist/cjs/index.js +11 -0
  9. package/dist/cjs/mail/inbound/inbound-email-ratchet.js +54 -0
  10. package/dist/es/athena/alb-athena-log-ratchet.js +153 -0
  11. package/dist/es/athena/athena-ratchet.js +154 -0
  12. package/dist/es/build/ratchet-aws-node-only-info.js +14 -0
  13. package/dist/es/cli/dynamo-exporter.js +76 -0
  14. package/dist/es/cli/ratchet-cli-handler.js +15 -0
  15. package/dist/es/cli/site-uploader/site-uploader.js +112 -0
  16. package/dist/es/cli/start-instance-and-ssh.js +62 -0
  17. package/dist/es/index.js +8 -0
  18. package/dist/es/mail/inbound/inbound-email-ratchet.js +49 -0
  19. package/dist/tsconfig.cjs.tsbuildinfo +1 -0
  20. package/dist/tsconfig.es.tsbuildinfo +1 -0
  21. package/dist/tsconfig.types.tsbuildinfo +1 -0
  22. package/dist/types/athena/alb-athena-log-ratchet.d.ts +57 -0
  23. package/dist/types/athena/athena-ratchet.d.ts +16 -0
  24. package/dist/types/build/ratchet-aws-node-only-info.d.ts +5 -0
  25. package/dist/types/cli/dynamo-exporter.d.ts +13 -0
  26. package/dist/types/cli/ratchet-cli-handler.d.ts +6 -0
  27. package/dist/types/cli/site-uploader/site-uploader.d.ts +12 -0
  28. package/dist/types/cli/start-instance-and-ssh.d.ts +12 -0
  29. package/dist/types/index.d.ts +11 -0
  30. package/dist/types/mail/inbound/inbound-email-ratchet.d.ts +18 -0
  31. package/includes/cli.js +12 -0
  32. package/package.json +97 -0
@@ -0,0 +1,54 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.MultiStream = exports.InboundEmailRatchet = void 0;
4
+ const mailparser_1 = require("mailparser");
5
+ const stream_1 = require("stream");
6
+ const ratchet_common_1 = require("@bitblit/ratchet-common");
7
+ class InboundEmailRatchet {
8
+ constructor(cache) {
9
+ this.cache = cache;
10
+ ratchet_common_1.RequireRatchet.notNullOrUndefined(this.cache, 'cache');
11
+ ratchet_common_1.RequireRatchet.notNullOrUndefined(this.cache.getDefaultBucket(), 'cache.defaultBucket');
12
+ }
13
+ async processEmailFromS3(key) {
14
+ const rval = false;
15
+ if (await this.cache.fileExists(key)) {
16
+ const data = await this.cache.fetchCacheFileAsString(key);
17
+ return this.processEmailFromBuffer(new Buffer(data));
18
+ }
19
+ else {
20
+ ratchet_common_1.Logger.warn('Cannot process inbound email - no such key : %s', key);
21
+ }
22
+ return rval;
23
+ }
24
+ async processEmailFromBuffer(buf) {
25
+ const rval = false;
26
+ ratchet_common_1.RequireRatchet.notNullOrUndefined(buf, 'buf');
27
+ ratchet_common_1.Logger.info('Processing inbound email - size %d bytes', buf.length);
28
+ const message = await (0, mailparser_1.simpleParser)(buf);
29
+ ratchet_common_1.Logger.info('Found mail from "%s" subject "%s" with %d attachments', message.from.text, message.subject, message.attachments.length);
30
+ if (!!message &&
31
+ !!message.subject &&
32
+ message.subject.toLowerCase().startsWith('reach inventory') &&
33
+ !!message.attachments &&
34
+ message.attachments.length === 1 &&
35
+ message.attachments[0].contentType.toLowerCase() === 'application/zip') {
36
+ }
37
+ else {
38
+ ratchet_common_1.Logger.info('Unrecognized email - not processing');
39
+ }
40
+ return rval;
41
+ }
42
+ }
43
+ exports.InboundEmailRatchet = InboundEmailRatchet;
44
+ class MultiStream extends stream_1.Readable {
45
+ constructor(object, options = {}) {
46
+ super(object instanceof Buffer || typeof object === 'string' ? options : { objectMode: true });
47
+ this._object = object;
48
+ }
49
+ _read() {
50
+ this.push(this._object);
51
+ this._object = null;
52
+ }
53
+ }
54
+ exports.MultiStream = MultiStream;
@@ -0,0 +1,153 @@
1
+ import { readFileSync } from 'fs';
2
+ import path from 'path';
3
+ import { Logger } from '@bitblit/ratchet-common';
4
+ import { RequireRatchet } from '@bitblit/ratchet-common';
5
+ import { StringRatchet } from '@bitblit/ratchet-common';
6
+ import { S3Ratchet } from '@bitblit/ratchet-aws';
7
+ import { CsvRatchet } from '@bitblit/ratchet-node-only';
8
+ export class AlbAthenaLogRatchet {
9
+ constructor(athena, athenaTableName) {
10
+ this.athena = athena;
11
+ this.athenaTableName = athenaTableName;
12
+ RequireRatchet.notNullOrUndefined(athena, 'athena');
13
+ RequireRatchet.notNullOrUndefined(StringRatchet.trimToNull(athenaTableName), 'athenaTableName');
14
+ }
15
+ async updatePartitions(rootPath, s3, startTimeEpochMS = new Date().getTime() - 1000 * 60 * 60 * 24, endTimeEpochMS = new Date().getTime()) {
16
+ RequireRatchet.true(S3Ratchet.checkS3UrlForValidity(rootPath), 'root path not valid');
17
+ RequireRatchet.notNullOrUndefined(s3, 's3');
18
+ Logger.info('Updating partitions for %s from %s', this.athenaTableName, rootPath);
19
+ const bucketName = S3Ratchet.extractBucketFromURL(rootPath);
20
+ const rootKey = S3Ratchet.extractKeyFromURL(rootPath);
21
+ let current = startTimeEpochMS;
22
+ const clauses = [];
23
+ while (current < endTimeEpochMS) {
24
+ const dateUtcVal = new Date(current).toISOString().substring(0, 10);
25
+ Logger.info('d:%s', dateUtcVal);
26
+ const dateParts = dateUtcVal.split('-');
27
+ clauses.push("PARTITION (date_utc_partition='" +
28
+ dateUtcVal +
29
+ "') LOCATION '" +
30
+ rootPath +
31
+ '/' +
32
+ dateParts[0] +
33
+ '/' +
34
+ dateParts[1] +
35
+ '/' +
36
+ dateParts[2] +
37
+ "'");
38
+ current += 1000 * 60 * 60 * 24;
39
+ }
40
+ if (clauses.length > 0) {
41
+ const stmt = 'ALTER TABLE ' + this.athenaTableName + ' ADD IF NOT EXISTS \n' + clauses.join('\n');
42
+ await this.athena.runQueryToObjects(stmt);
43
+ }
44
+ else {
45
+ Logger.warn('Not updating partitions - no time between time clauses');
46
+ }
47
+ return clauses;
48
+ }
49
+ async createTable(rootPath, replaceIfPresent = false) {
50
+ RequireRatchet.true(S3Ratchet.checkS3UrlForValidity(rootPath), 'root path not valid');
51
+ let rval = false;
52
+ Logger.info('Creating ALB table %s', this.athenaTableName);
53
+ if (replaceIfPresent) {
54
+ Logger.info('Replace if present specified, removed old table');
55
+ try {
56
+ await this.athena.runQueryToObjects('drop table ' + this.athenaTableName);
57
+ }
58
+ catch (err) {
59
+ Logger.info('Drop error : %j', err);
60
+ }
61
+ }
62
+ let tableCreateQry = readFileSync(path.join(__dirname, '../static/albAthenaTableCreate.txt')).toString();
63
+ tableCreateQry = tableCreateQry.split('{{TABLE NAME}}').join(this.athenaTableName);
64
+ tableCreateQry = tableCreateQry.split('{{ALB_LOG_ROOT}}').join(rootPath);
65
+ Logger.info('Creating table with %s', tableCreateQry);
66
+ try {
67
+ await this.athena.runQueryToObjects(tableCreateQry);
68
+ rval = true;
69
+ }
70
+ catch (err) {
71
+ Logger.error('Error creating table : %s', err);
72
+ }
73
+ return rval;
74
+ }
75
+ static async readLogObjectsFromCsvStream(readStream) {
76
+ return CsvRatchet.streamParse(readStream, (p) => p);
77
+ }
78
+ static async readLogObjectsFromFile(fileName) {
79
+ return CsvRatchet.fileParse(fileName, (p) => p);
80
+ }
81
+ async fetchAlbLogRecords(qry) {
82
+ const tempFile = await this.fetchAlbLogRecordsToFile(qry);
83
+ return AlbAthenaLogRatchet.readLogObjectsFromFile(tempFile);
84
+ }
85
+ async fetchAlbLogRecordsToFile(qry, outputFileName = null) {
86
+ Logger.info('Querying %s : %j', this.athenaTableName, qry);
87
+ let qrySt = 'select * from ' + this.athenaTableName + ' where 1=1 ';
88
+ if (qry.startTimeEpochMS) {
89
+ if (qry.startTimeEpochMS) {
90
+ qrySt += " AND time >= '" + new Date(qry.startTimeEpochMS).toISOString() + "'";
91
+ qrySt += " AND date_utc_partition >='" + new Date(qry.startTimeEpochMS).toISOString().substring(0, 10) + "'";
92
+ }
93
+ if (qry.endTimeEpochMS) {
94
+ qrySt += " AND time < '" + new Date(qry.endTimeEpochMS).toISOString() + "'";
95
+ qrySt += " AND date_utc_partition <='" + new Date(qry.endTimeEpochMS).toISOString().substring(0, 10) + "'";
96
+ }
97
+ if (qry.requestUrlFilter) {
98
+ qrySt += " AND request_url LIKE '" + qry.requestUrlFilter + "'";
99
+ }
100
+ if (qry.limit) {
101
+ qrySt += ' LIMIT ' + qry.limit;
102
+ }
103
+ }
104
+ const result = await this.athena.runQueryToFile(qrySt, null, outputFileName);
105
+ return result;
106
+ }
107
+ }
108
+ AlbAthenaLogRatchet.CREATE_TABLE_STATEMENT = 'CREATE EXTERNAL TABLE IF NOT EXISTS `{{TABLE NAME}}`(\n' +
109
+ " `type` string COMMENT '',\n" +
110
+ " `time` string COMMENT '',\n" +
111
+ " `elb` string COMMENT '',\n" +
112
+ " `client_ip` string COMMENT '',\n" +
113
+ " `client_port` int COMMENT '',\n" +
114
+ " `target_ip` string COMMENT '',\n" +
115
+ " `target_port` int COMMENT '',\n" +
116
+ " `request_processing_time` double COMMENT '',\n" +
117
+ " `target_processing_time` double COMMENT '',\n" +
118
+ " `response_processing_time` double COMMENT '',\n" +
119
+ " `elb_status_code` string COMMENT '',\n" +
120
+ " `target_status_code` string COMMENT '',\n" +
121
+ " `received_bytes` bigint COMMENT '',\n" +
122
+ " `sent_bytes` bigint COMMENT '',\n" +
123
+ " `request_verb` string COMMENT '',\n" +
124
+ " `request_url` string COMMENT '',\n" +
125
+ " `request_proto` string COMMENT '',\n" +
126
+ " `user_agent` string COMMENT '',\n" +
127
+ " `ssl_cipher` string COMMENT '',\n" +
128
+ " `ssl_protocol` string COMMENT '',\n" +
129
+ " `target_group_arn` string COMMENT '',\n" +
130
+ " `trace_id` string COMMENT '',\n" +
131
+ " `domain_name` string COMMENT '',\n" +
132
+ " `chosen_cert_arn` string COMMENT '',\n" +
133
+ " `matched_rule_priority` string COMMENT '',\n" +
134
+ " `request_creation_time` string COMMENT '',\n" +
135
+ " `actions_executed` string COMMENT '',\n" +
136
+ " `redirect_url` string COMMENT '',\n" +
137
+ " `lambda_error_reason` string COMMENT '',\n" +
138
+ " `target_port_list` string COMMENT '',\n" +
139
+ " `target_status_code_list` string COMMENT '',\n" +
140
+ " `new_field` string COMMENT '')\n" +
141
+ 'PARTITIONED BY (\n' +
142
+ ' `date_utc_partition` string\n' +
143
+ ')\n' +
144
+ 'ROW FORMAT SERDE\n' +
145
+ " 'org.apache.hadoop.hive.serde2.RegexSerDe'\n" +
146
+ 'WITH SERDEPROPERTIES (\n' +
147
+ ' \'input.regex\'=\'([^ ]*) ([^ ]*) ([^ ]*) ([^ ]*):([0-9]*) ([^ ]*)[:-]([0-9]*) ([-.0-9]*) ([-.0-9]*) ([-.0-9]*) (|[-0-9]*) (-|[-0-9]*) ([-0-9]*) ([-0-9]*) \\"([^ ]*) ([^ ]*) (- |[^ ]*)\\" \\"([^\\"]*)\\" ([A-Z0-9-]+) ([A-Za-z0-9.-]*) ([^ ]*) \\"([^\\"]*)\\" \\"([^\\"]*)\\" \\"([^\\"]*)\\" ([-.0-9]*) ([^ ]*) \\"([^\\"]*)\\" \\"([^\\"]*)\\" \\"([^ ]*)\\" \\"([^s]+)\\" \\"([^s]+)\\"(.*)\')\n' +
148
+ 'STORED AS INPUTFORMAT\n' +
149
+ " 'org.apache.hadoop.mapred.TextInputFormat'\n" +
150
+ 'OUTPUTFORMAT\n' +
151
+ " 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\n" +
152
+ 'LOCATION\n' +
153
+ " '{{ALB_LOG_ROOT}}'\n";
@@ -0,0 +1,154 @@
1
+ import { GetNamedQueryCommand, GetQueryExecutionCommand, ListNamedQueriesCommand, StartQueryExecutionCommand, } from '@aws-sdk/client-athena';
2
+ import { StringRatchet } from '@bitblit/ratchet-common';
3
+ import { Logger } from '@bitblit/ratchet-common';
4
+ import { StopWatch } from '@bitblit/ratchet-common';
5
+ import { PromiseRatchet } from '@bitblit/ratchet-common';
6
+ import { GetObjectCommand } from '@aws-sdk/client-s3';
7
+ import tmp from 'tmp';
8
+ import fs from 'fs';
9
+ import { CsvRatchet } from '@bitblit/ratchet-node-only';
10
+ import { RequireRatchet } from '@bitblit/ratchet-common';
11
+ export class AthenaRatchet {
12
+ constructor(athena, s3, outputLocation) {
13
+ this.athena = athena;
14
+ this.s3 = s3;
15
+ this.outputLocation = outputLocation;
16
+ RequireRatchet.notNullOrUndefined(athena);
17
+ RequireRatchet.notNullOrUndefined(s3);
18
+ RequireRatchet.notNullOrUndefined(outputLocation);
19
+ RequireRatchet.true(outputLocation.startsWith('s3://'));
20
+ }
21
+ static athenaRowsToObject(input) {
22
+ const colNames = input[0].Data.map((d) => d.VarCharValue);
23
+ const temp = input.slice(1);
24
+ const rval = temp.map((t) => {
25
+ const newItem = {};
26
+ for (let i = 0; i < t.Data.length; i++) {
27
+ newItem[colNames[i]] = t.Data[i].VarCharValue;
28
+ }
29
+ return newItem;
30
+ });
31
+ return rval;
32
+ }
33
+ static applyParamsToQuery(query, queryParams) {
34
+ let rval = query;
35
+ if (!!rval && !!queryParams) {
36
+ Object.keys(queryParams).forEach((k) => {
37
+ const val = StringRatchet.safeString(queryParams[k]);
38
+ const kk = '{' + k + '}';
39
+ rval = rval.split(kk).join(val);
40
+ });
41
+ }
42
+ return rval;
43
+ }
44
+ async fetchQueryIds() {
45
+ const params = {
46
+ NextToken: null,
47
+ };
48
+ let rval = [];
49
+ let next = null;
50
+ do {
51
+ next = await this.athena.send(new ListNamedQueriesCommand(params));
52
+ rval = rval.concat(next.NamedQueryIds);
53
+ params.NextToken = next.NextToken;
54
+ } while (!!params.NextToken);
55
+ return rval;
56
+ }
57
+ async listQueries() {
58
+ const rval = [];
59
+ const ids = await this.fetchQueryIds();
60
+ Logger.debug('Finding %d items', ids.length);
61
+ for (let i = 0; i < ids.length; i++) {
62
+ const params = {
63
+ NamedQueryId: ids[i],
64
+ };
65
+ const val = await this.athena.send(new GetNamedQueryCommand(params));
66
+ rval.push(val.NamedQuery);
67
+ }
68
+ return rval;
69
+ }
70
+ async findQueryByName(name) {
71
+ const all = await this.listQueries();
72
+ const rval = all.find((a) => a.Name.toLowerCase() == name.toLowerCase());
73
+ return rval;
74
+ }
75
+ async runQueryToObjects(queryIn, queryParams = {}, pingTimeMS = 2000) {
76
+ Logger.info('Running query to objects');
77
+ const outputLoc = await this.runQueryToOutputLocation(queryIn, queryParams, pingTimeMS);
78
+ Logger.info('Query succeeded, processing file from %s', outputLoc);
79
+ const bucketName = outputLoc.substring(5, outputLoc.indexOf('/', 5));
80
+ const obKey = outputLoc.substring(outputLoc.indexOf('/', 5) + 1);
81
+ const req = {
82
+ Bucket: bucketName,
83
+ Key: obKey,
84
+ };
85
+ const getFileOut = await this.s3.send(new GetObjectCommand(req));
86
+ const rval = await CsvRatchet.stringParse(getFileOut.Body.toString(), (p) => {
87
+ return p;
88
+ }, { columns: true, skip_empty_lines: true });
89
+ return rval;
90
+ }
91
+ async runQueryToFile(queryIn, queryParams = {}, targetDataFileIn = null, pingTimeMS = 2000) {
92
+ Logger.info('Running query to file');
93
+ const outputLoc = await this.runQueryToOutputLocation(queryIn, queryParams, pingTimeMS);
94
+ Logger.info('Query succeeded, pulling file from %s', outputLoc);
95
+ const bucketName = outputLoc.substring(5, outputLoc.indexOf('/', 5));
96
+ const obKey = outputLoc.substring(outputLoc.indexOf('/', 5) + 1);
97
+ const req = {
98
+ Bucket: bucketName,
99
+ Key: obKey,
100
+ };
101
+ const targetDataFile = targetDataFileIn || tmp.fileSync({ postfix: '.csv', keep: false }).name;
102
+ const fileStream = fs.createWriteStream(targetDataFile);
103
+ const output = await this.s3.send(new GetObjectCommand(req));
104
+ const readStream = output.Body;
105
+ readStream.pipe(fileStream);
106
+ const rval = await PromiseRatchet.resolveOnEvent(readStream, ['finish', 'close'], ['error'], targetDataFile);
107
+ Logger.silly('Response: %s', rval);
108
+ return targetDataFile;
109
+ }
110
+ async runQueryToOutputLocation(queryIn, queryParams = {}, pingTimeMS = 2000) {
111
+ let rval = null;
112
+ const timer = new StopWatch();
113
+ const query = AthenaRatchet.applyParamsToQuery(queryIn, queryParams);
114
+ try {
115
+ Logger.info('Starting query : %s', query);
116
+ const token = StringRatchet.createType4Guid();
117
+ const params = {
118
+ QueryString: query,
119
+ ResultConfiguration: {
120
+ OutputLocation: this.outputLocation,
121
+ EncryptionConfiguration: {
122
+ EncryptionOption: 'SSE_S3',
123
+ },
124
+ },
125
+ ClientRequestToken: token,
126
+ QueryExecutionContext: {
127
+ Database: 'default',
128
+ },
129
+ };
130
+ const startToken = await this.athena.send(new StartQueryExecutionCommand(params));
131
+ const getExecParams = {
132
+ QueryExecutionId: startToken.QueryExecutionId,
133
+ };
134
+ const finalStates = ['FAILED', 'CANCELLED', 'SUCCEEDED'];
135
+ let curState = await this.athena.send(new GetQueryExecutionCommand(getExecParams));
136
+ while (finalStates.indexOf(curState.QueryExecution.Status.State) === -1) {
137
+ await PromiseRatchet.createTimeoutPromise('wait', pingTimeMS);
138
+ Logger.debug('%s : %s : %s', curState.QueryExecution.Status.State, timer.dump(), query);
139
+ curState = await this.athena.send(new GetQueryExecutionCommand(getExecParams));
140
+ }
141
+ if (curState.QueryExecution.Status.State === 'FAILED') {
142
+ Logger.warn('Query failed : %s', curState.QueryExecution.Status.StateChangeReason);
143
+ }
144
+ else if (curState.QueryExecution.Status.State === 'SUCCEEDED') {
145
+ rval = curState.QueryExecution.ResultConfiguration.OutputLocation;
146
+ }
147
+ }
148
+ catch (err) {
149
+ Logger.warn('Failure : %s', err, err);
150
+ }
151
+ Logger.info('Query took %s : %s', timer.dump(), query);
152
+ return rval;
153
+ }
154
+ }
@@ -0,0 +1,14 @@
1
+ export class RatchetAwsNodeOnlyInfo {
2
+ constructor() { }
3
+ static buildInformation() {
4
+ const val = {
5
+ version: '80',
6
+ hash: '10fcb761c2fa4186df89e527e948f5780a4e14ac',
7
+ branch: 'alpha-2023-03-12-2',
8
+ tag: 'alpha-2023-03-12-2',
9
+ timeBuiltISO: '2023-03-12T20:00:51-0700',
10
+ notes: '',
11
+ };
12
+ return val;
13
+ }
14
+ }
@@ -0,0 +1,76 @@
1
+ import { StringRatchet } from '@bitblit/ratchet-common';
2
+ import { Logger } from '@bitblit/ratchet-common';
3
+ import { PromiseRatchet } from '@bitblit/ratchet-common';
4
+ import { RequireRatchet } from '@bitblit/ratchet-common';
5
+ import fs from 'fs';
6
+ import readline from 'readline';
7
+ export class DynamoExporter {
8
+ constructor() { }
9
+ static async importJsonLFileToTable(dynamo, tableName, filename) {
10
+ RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
11
+ RequireRatchet.notNullOrUndefined(tableName, 'tableName');
12
+ RequireRatchet.notNullOrUndefined(filename, 'filename');
13
+ const fileStream = fs.createReadStream(filename);
14
+ const rl = readline.createInterface({
15
+ input: fileStream,
16
+ crlfDelay: Infinity,
17
+ });
18
+ let rval = 0;
19
+ for await (const line of rl) {
20
+ if (rval % 100 === 0) {
21
+ Logger.info('Importing line %d', rval);
22
+ }
23
+ if (StringRatchet.trimToNull(line)) {
24
+ const parsed = JSON.parse(line);
25
+ await dynamo.simplePut(tableName, parsed);
26
+ rval++;
27
+ }
28
+ }
29
+ return rval;
30
+ }
31
+ static async exportScanToJsonLFile(dynamo, scan, filename) {
32
+ RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
33
+ RequireRatchet.notNullOrUndefined(scan, 'scan');
34
+ RequireRatchet.notNullOrUndefined(filename, 'filename');
35
+ const ws = fs.createWriteStream(filename);
36
+ ws.on('end', () => {
37
+ Logger.debug('Write complete');
38
+ });
39
+ const rval = await DynamoExporter.exportScanToJsonLWriteStream(dynamo, scan, ws);
40
+ await PromiseRatchet.resolveOnEvent(ws, ['finish', 'close'], ['error']);
41
+ ws.close();
42
+ return rval;
43
+ }
44
+ static async exportQueryToJsonLFile(dynamo, qry, filename) {
45
+ RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
46
+ RequireRatchet.notNullOrUndefined(qry, 'qry');
47
+ RequireRatchet.notNullOrUndefined(filename, 'filename');
48
+ const ws = fs.createWriteStream(filename);
49
+ ws.on('end', () => {
50
+ Logger.debug('Write complete');
51
+ });
52
+ const rval = await DynamoExporter.exportQueryToJsonLWriteStream(dynamo, qry, ws);
53
+ await PromiseRatchet.resolveOnEvent(ws, ['finish', 'close'], ['error']);
54
+ ws.close();
55
+ return rval;
56
+ }
57
+ static async exportScanToJsonLWriteStream(dynamo, scan, target) {
58
+ RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
59
+ RequireRatchet.notNullOrUndefined(scan, 'scan');
60
+ RequireRatchet.notNullOrUndefined(target, 'target');
61
+ const rval = await dynamo.fullyExecuteProcessOverScan(scan, async (row) => DynamoExporter.writeItemToJsonLStream(row, target, false));
62
+ return rval;
63
+ }
64
+ static async exportQueryToJsonLWriteStream(dynamo, qry, target) {
65
+ RequireRatchet.notNullOrUndefined(dynamo, 'dynamo');
66
+ RequireRatchet.notNullOrUndefined(qry, 'qry');
67
+ RequireRatchet.notNullOrUndefined(target, 'target');
68
+ const rval = await dynamo.fullyExecuteProcessOverQuery(qry, async (row) => DynamoExporter.writeItemToJsonLStream(row, target, false));
69
+ return rval;
70
+ }
71
+ static writeItemToJsonLStream(item, target, includeNulls = false) {
72
+ if (!!item || includeNulls) {
73
+ target.write(JSON.stringify(item) + '\n');
74
+ }
75
+ }
76
+ }
@@ -0,0 +1,15 @@
1
+ import { AbstractRatchetCliHandler } from '@bitblit/ratchet-node-only';
2
+ import { SiteUploader } from './site-uploader/site-uploader';
3
+ import { StartInstanceAndSsh } from './start-instance-and-ssh';
4
+ import { RatchetAwsNodeOnlyInfo } from '../build/ratchet-aws-node-only-info';
5
+ export class RatchetCliHandler extends AbstractRatchetCliHandler {
6
+ fetchHandlerMap() {
7
+ return {
8
+ 'site-uploader': SiteUploader.runFromCliArgs,
9
+ 'start-instance-and-ssh': StartInstanceAndSsh.runFromCliArgs,
10
+ };
11
+ }
12
+ fetchVersionInfo() {
13
+ return RatchetAwsNodeOnlyInfo.buildInformation();
14
+ }
15
+ }
@@ -0,0 +1,112 @@
1
+ import fs from 'fs';
2
+ import walk from 'walk';
3
+ import { S3Client } from '@aws-sdk/client-s3';
4
+ import path from 'path';
5
+ import mime from 'mime-types';
6
+ import { Logger } from '@bitblit/ratchet-common';
7
+ import { Upload } from '@aws-sdk/lib-storage';
8
+ export class SiteUploader {
9
+ constructor(srcDir, bucketName, configFile) {
10
+ this.s3 = new S3Client({ region: 'us-east-1' });
11
+ this.srcDir = srcDir;
12
+ this.bucketName = bucketName;
13
+ this.config = JSON.parse(fs.readFileSync(configFile).toString('ascii'));
14
+ }
15
+ static createFromArgs(args) {
16
+ if (args && args.length === 3) {
17
+ const src = args[0];
18
+ const bucket = args[1];
19
+ const configFile = args[2];
20
+ return new SiteUploader(src, bucket, configFile);
21
+ }
22
+ else {
23
+ console.log('Usage : node ratchet-site-uploader {srcDir} {bucket} {configFile} (Found ' + args + ' arguments, need 3)');
24
+ return null;
25
+ }
26
+ }
27
+ static async runFromCliArgs(args) {
28
+ const inst = SiteUploader.createFromArgs(args);
29
+ return inst.runPump();
30
+ }
31
+ findMatch(prefix, fileName, config) {
32
+ let found = null;
33
+ if (prefix != null && fileName != null && config != null && config.mapping != null) {
34
+ config.mapping.forEach((entry) => {
35
+ if (found == null) {
36
+ if (entry.prefixMatch == null || prefix.match(entry.prefixMatch)) {
37
+ if (entry.fileMatch == null || fileName.match(entry.fileMatch)) {
38
+ found = entry;
39
+ }
40
+ }
41
+ }
42
+ });
43
+ }
44
+ return found;
45
+ }
46
+ findMime(fileName, config) {
47
+ let found = null;
48
+ if (config != null && config.customMimeTypeMapping != null) {
49
+ Object.keys(config.customMimeTypeMapping).forEach((k) => {
50
+ if (found == null && fileName.endsWith(k)) {
51
+ found = config.customMimeTypeMapping[k];
52
+ }
53
+ });
54
+ }
55
+ if (found == null) {
56
+ found = mime.lookup(fileName);
57
+ }
58
+ if (found == null) {
59
+ found = 'binary/octet-stream';
60
+ }
61
+ return found;
62
+ }
63
+ runPump() {
64
+ return new Promise((resolve, reject) => {
65
+ Logger.info('Uploading contents of %s to %s using %j as config', this.srcDir, this.bucketName, this.config);
66
+ const options = {};
67
+ const walker = walk.walk(this.srcDir, options);
68
+ walker.on('file', function (root, fileStats, next) {
69
+ Logger.info('Processing %j', fileStats.name);
70
+ const prefix = root == this.srcDir ? '' : root.substring(this.srcDir.length + 1) + '/';
71
+ const proc = this.findMatch(prefix, fileStats.name, this.config);
72
+ const key = prefix + fileStats.name;
73
+ Logger.info('Uploading file : %s/%s to key %s with %j', root, fileStats.name, key, proc);
74
+ const params = proc && proc.putParams ? JSON.parse(JSON.stringify(proc.putParams)) : {};
75
+ params.Bucket = this.bucketName;
76
+ params.Key = key;
77
+ params.Body = fs.readFileSync(path.join(root, fileStats.name));
78
+ if (!params.ContentType) {
79
+ params.ContentType = this.findMime(fileStats.name, this.config);
80
+ }
81
+ const upload = new Upload({
82
+ client: this.s3,
83
+ params: params,
84
+ tags: [],
85
+ queueSize: 4,
86
+ partSize: 1024 * 1024 * 5,
87
+ leavePartsOnError: false,
88
+ });
89
+ upload.on('httpUploadProgress', (progress) => {
90
+ Logger.info('Uploading : %s', progress);
91
+ });
92
+ upload
93
+ .done()
94
+ .then((result) => {
95
+ Logger.info('Finished upload of %s: %j', key, result);
96
+ next();
97
+ })
98
+ .catch((err) => {
99
+ Logger.warn('%s failed to upload : %s : Continuing', key, err);
100
+ next();
101
+ });
102
+ }.bind(this));
103
+ walker.on('errors', function (root, nodeStatsArray, next) {
104
+ next();
105
+ });
106
+ walker.on('end', function () {
107
+ Logger.info('All done');
108
+ resolve(true);
109
+ });
110
+ });
111
+ }
112
+ }
@@ -0,0 +1,62 @@
1
+ import { Logger } from '@bitblit/ratchet-common';
2
+ import { Ec2Ratchet } from '@bitblit/ratchet-aws';
3
+ import { spawnSync } from 'child_process';
4
+ import fs from 'fs';
5
+ import os from 'os';
6
+ import path from 'path';
7
+ export class StartInstanceAndSsh {
8
+ constructor(instanceId, publicKeyFile = path.join(os.homedir(), '.ssh', 'id_rsa.pub'), instanceOsUser = 'ec2-user', region = 'us-east-1', availabilityZone = 'us-east-1a') {
9
+ this.instanceId = instanceId;
10
+ this.publicKeyFile = publicKeyFile;
11
+ this.instanceOsUser = instanceOsUser;
12
+ this.region = region;
13
+ this.availabilityZone = availabilityZone;
14
+ this.ec2Ratchet = new Ec2Ratchet(this.region, this.availabilityZone);
15
+ }
16
+ static createFromArgs(args) {
17
+ if (args?.length === 1 || args?.length === 2) {
18
+ const instanceId = args[0];
19
+ return new StartInstanceAndSsh(instanceId);
20
+ }
21
+ else {
22
+ Logger.info('Usage : ratchet-start-instance-and-ssh {instanceId} {publicKeyFile} (Found %s arguments, need 1 or 2)', args);
23
+ return null;
24
+ }
25
+ }
26
+ static async runFromCliArgs(args) {
27
+ const inst = StartInstanceAndSsh.createFromArgs(args);
28
+ return inst.run();
29
+ }
30
+ async run() {
31
+ let instance = await this.ec2Ratchet.describeInstance(this.instanceId);
32
+ if (!!instance) {
33
+ let launched = false;
34
+ if (instance.State.Code == 16) {
35
+ Logger.info('Instance is already running...');
36
+ launched = true;
37
+ }
38
+ else {
39
+ Logger.info('Instance is not running... starting up : %s', this.instanceId);
40
+ launched = await this.ec2Ratchet.launchInstance(this.instanceId, 1000 * 30);
41
+ }
42
+ if (launched) {
43
+ Logger.info('Uploading public key...');
44
+ const publicKeyText = fs.readFileSync(this.publicKeyFile).toString();
45
+ const publicKeyResponse = await this.ec2Ratchet.sendPublicKeyToEc2Instance(this.instanceId, publicKeyText, this.instanceOsUser);
46
+ Logger.info('Key response : %j', publicKeyResponse);
47
+ instance = instance && instance.PublicIpAddress ? instance : await this.ec2Ratchet.describeInstance(this.instanceId);
48
+ Logger.info('Instance IP address is %s', instance.PublicIpAddress);
49
+ const ret = spawnSync('ssh', [this.instanceOsUser + '@' + instance.PublicIpAddress], {
50
+ stdio: 'inherit',
51
+ });
52
+ Logger.info('%j', ret);
53
+ }
54
+ else {
55
+ Logger.info('Instance could not start - check logs');
56
+ }
57
+ }
58
+ else {
59
+ Logger.info('No such instance found - check your AWS keys? : %s', this.instanceId);
60
+ }
61
+ }
62
+ }
@@ -0,0 +1,8 @@
1
+ export * from './athena/alb-athena-log-ratchet';
2
+ export * from './athena/athena-ratchet';
3
+ export * from './build/ratchet-aws-node-only-info';
4
+ export * from './cli/dynamo-exporter';
5
+ export * from './cli/ratchet-cli-handler';
6
+ export * from './cli/start-instance-and-ssh';
7
+ export * from './cli/site-uploader/site-uploader';
8
+ export * from './mail/inbound/inbound-email-ratchet';