@bitblit/ratchet-aws-node-only 6.0.146-alpha → 6.0.148-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,150 @@
1
+ import { DaemonProcessState } from './daemon-process-state.js';
2
+ import { DaemonProcessCreateOptions } from './daemon-process-create-options.js';
3
+ import { DaemonUtil } from './daemon-util.js';
4
+ import { DaemonLike } from './daemon-like.js';
5
+ import { DaemonProcessStatePublicToken } from './daemon-process-state-public-token.js';
6
+ import { S3Client } from '@aws-sdk/client-s3';
7
+ import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
8
+ import { StringRatchet } from '@bitblit/ratchet-common/lang/string-ratchet';
9
+ import { Logger } from '@bitblit/ratchet-common/logger/logger';
10
+ import { JwtRatchetLike } from '@bitblit/ratchet-node-only/jwt/jwt-ratchet-like';
11
+ import { S3CacheRatchetLike } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet-like';
12
+ import { S3CacheRatchet } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet';
13
+
14
+ export class Daemon implements DaemonLike {
15
+ public static DEFAULT_DEFAULT_GROUP: string = 'DEFAULT';
16
+
17
+ private cache: S3CacheRatchetLike;
18
+
19
+ constructor(
20
+ private s3: S3Client,
21
+ private bucket: string,
22
+ private prefix: string = '',
23
+ private _defaultGroup: string = Daemon.DEFAULT_DEFAULT_GROUP,
24
+ private jwtRatchet?: JwtRatchetLike,
25
+ ) {
26
+ this.cache = new S3CacheRatchet(this.s3, this.bucket);
27
+ }
28
+
29
+ public get defaultGroup(): string {
30
+ return this._defaultGroup;
31
+ }
32
+
33
+ public async keyToPublicToken(key: string, expirationSeconds: number): Promise<string> {
34
+ RequireRatchet.notNullOrUndefined(this.jwtRatchet, 'You must set jwtRatchet if you wish to use public tokens');
35
+ RequireRatchet.notNullOrUndefined(key, 'key');
36
+ RequireRatchet.true(expirationSeconds > 0, 'Expiration seconds must be larger than 0');
37
+
38
+ const token: DaemonProcessStatePublicToken = { daemonKey: key };
39
+ const publicToken: string = await this.jwtRatchet.createTokenString(token, expirationSeconds);
40
+ return publicToken;
41
+ }
42
+
43
+ private keyToPath(key: string): string {
44
+ return Buffer.from(key, 'base64').toString();
45
+ }
46
+
47
+ private pathToKey(path: string): string {
48
+ return Buffer.from(path).toString('base64');
49
+ }
50
+
51
+ private generatePath(group: string = this._defaultGroup): string {
52
+ return this.generatePrefix(group) + StringRatchet.createType4Guid();
53
+ }
54
+
55
+ private generatePrefix(group: string = this._defaultGroup): string {
56
+ return this.prefix + group + '/';
57
+ }
58
+
59
+ public async start(options: DaemonProcessCreateOptions): Promise<DaemonProcessState> {
60
+ options.group ??= this._defaultGroup;
61
+ const path: string = this.generatePath(options.group);
62
+ const key: string = this.pathToKey(path);
63
+ return DaemonUtil.start(this.cache, key, path, options);
64
+ }
65
+
66
+ private async writeState(newState: DaemonProcessState, contents: Buffer): Promise<DaemonProcessState> {
67
+ const key: string = this.keyToPath(newState.id);
68
+ return DaemonUtil.writeState(this.cache, key, newState, contents);
69
+ }
70
+
71
+ public async clean(group: string = this._defaultGroup, olderThanSeconds: number = 60 * 60 * 24 * 7): Promise<DaemonProcessState[]> {
72
+ try {
73
+ Logger.info('Daemon removing items older than %d seconds from group %s', olderThanSeconds, group);
74
+ const original: DaemonProcessState[] = await this.list(group);
75
+ const now: number = new Date().getTime();
76
+ const removed: DaemonProcessState[] = [];
77
+ for (const test of original) {
78
+ //for (let i = 0; i < original.length; i++) {
79
+ //const test: DaemonProcessState = original[i];
80
+ const ageSeconds: number = (now - test.startedEpochMS) / 1000;
81
+ if (ageSeconds > olderThanSeconds) {
82
+ const _remove: any = await this.cache.removeCacheFile(this.keyToPath(test.id));
83
+ removed.push(test);
84
+ }
85
+ }
86
+ Logger.debug('Removed %d of %d items', removed.length, original.length);
87
+ return removed;
88
+ } catch (err) {
89
+ Logger.error('Error while trying to clean a daemon: %j %s', group, err);
90
+ throw err;
91
+ }
92
+ }
93
+
94
+ public async listKeys(group: string = this._defaultGroup): Promise<string[]> {
95
+ try {
96
+ const prefix: string = this.generatePrefix(group);
97
+ Logger.info('Fetching children of %s', prefix);
98
+ const rval: string[] = await this.cache.directChildrenOfPrefix(prefix);
99
+ Logger.debug('Found : %j', rval);
100
+ return rval;
101
+ } catch (err) {
102
+ Logger.error('Error while trying to list daemon keys: %j %s', group, err);
103
+ throw err;
104
+ }
105
+ }
106
+
107
+ public async list(group: string = this._defaultGroup): Promise<DaemonProcessState[]> {
108
+ try {
109
+ const prefix: string = this.generatePrefix(group);
110
+ Logger.info('Fetching children of %s', prefix);
111
+ const keys: string[] = await this.listKeys(group);
112
+ const proms: Promise<DaemonProcessState>[] = keys.map((k) => this.stat(this.pathToKey(this.generatePrefix(group) + k)));
113
+ const rval: DaemonProcessState[] = await Promise.all(proms);
114
+
115
+ return rval;
116
+ } catch (err) {
117
+ Logger.error('Error while trying to list daemon states: %j %s', group, err);
118
+ throw err;
119
+ }
120
+ }
121
+
122
+ public async updateMessage(id: string, newMessage: string): Promise<DaemonProcessState> {
123
+ const itemPath: string = this.keyToPath(id);
124
+ return DaemonUtil.updateMessage(this.cache, itemPath, newMessage);
125
+ }
126
+
127
+ public async stat(key: string): Promise<DaemonProcessState> {
128
+ const itemPath: string = this.keyToPath(key);
129
+ return DaemonUtil.stat(this.cache, itemPath);
130
+ }
131
+
132
+ public async statFromPublicToken(publicToken: string): Promise<DaemonProcessState> {
133
+ RequireRatchet.notNullOrUndefined(this.jwtRatchet, 'You must set jwtRatchet if you wish to use public tokens');
134
+ RequireRatchet.notNullOrUndefined(publicToken, 'publicToken');
135
+ const parsed: DaemonProcessStatePublicToken = await this.jwtRatchet.decodeToken<DaemonProcessStatePublicToken>(publicToken);
136
+ const key: string = parsed?.daemonKey;
137
+ return key ? this.stat(key) : null;
138
+ }
139
+
140
+ public async abort(id: string): Promise<DaemonProcessState> {
141
+ return DaemonUtil.abort(this.cache, this.keyToPath(id));
142
+ }
143
+ public async error(id: string, error: string): Promise<DaemonProcessState> {
144
+ return DaemonUtil.error(this.cache, this.keyToPath(id), error);
145
+ }
146
+
147
+ public async finalize(id: string, contents: Buffer): Promise<DaemonProcessState> {
148
+ return DaemonUtil.finalize(this.cache, this.keyToPath(id), contents);
149
+ }
150
+ }
@@ -0,0 +1,60 @@
1
+ import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
2
+ import { Logger } from '@bitblit/ratchet-common/logger/logger';
3
+ import { Ec2Ratchet } from '@bitblit/ratchet-aws/ec2/ec2-ratchet';
4
+ import fs from 'fs';
5
+ import { Instance } from '@aws-sdk/client-ec2';
6
+ import { SendSSHPublicKeyResponse } from '@aws-sdk/client-ec2-instance-connect';
7
+
8
+ export class Ec2InstanceUtil {
9
+ constructor(private ec2Ratchet: Ec2Ratchet) {}
10
+
11
+ public async startInstanceAndUploadPublicKeyFile(
12
+ instanceId: string,
13
+ filePath: string,
14
+ instanceOsUser: string = 'ec2-user',
15
+ ): Promise<Instance> {
16
+ RequireRatchet.notNullUndefinedOrOnlyWhitespaceString(instanceId, 'instanceId');
17
+ RequireRatchet.notNullUndefinedOrOnlyWhitespaceString(filePath, 'filePath');
18
+ RequireRatchet.true(fs.existsSync(filePath), 'File does not exist');
19
+ Logger.info('Starting instance %s and uploading contents of public key file %s', instanceId, filePath);
20
+ const publicKeyText: string = fs.readFileSync(filePath).toString();
21
+ return this.startInstanceAndUploadPublicKey(instanceId, publicKeyText, instanceOsUser);
22
+ }
23
+
24
+ public async startInstanceAndUploadPublicKey(
25
+ instanceId: string,
26
+ publicKeyText: string,
27
+ instanceOsUser: string = 'ec2-user',
28
+ ): Promise<Instance> {
29
+ Logger.info('Starting instance %s, public key length %d, user %s', instanceId, publicKeyText.length, instanceOsUser);
30
+ let instance: Instance = await this.ec2Ratchet.describeInstance(instanceId);
31
+ if (instance) {
32
+ let launched: boolean = false;
33
+ if (instance.State.Code == 16) {
34
+ Logger.info('Instance is already running...');
35
+ launched = true;
36
+ } else {
37
+ Logger.info('Instance is not running... starting up : %s', instanceId);
38
+ launched = await this.ec2Ratchet.launchInstance(instanceId, 1000 * 30); // 30 seconds
39
+ }
40
+
41
+ if (launched) {
42
+ Logger.info('Uploading public key...');
43
+ const publicKeyResponse: SendSSHPublicKeyResponse = await this.ec2Ratchet.sendPublicKeyToEc2Instance(
44
+ instanceId,
45
+ publicKeyText,
46
+ instanceOsUser,
47
+ );
48
+ Logger.info('Key response : %j', publicKeyResponse);
49
+
50
+ instance = instance && instance.PublicIpAddress ? instance : await this.ec2Ratchet.describeInstance(instanceId);
51
+ Logger.info('Instance IP address is %s', instance.PublicIpAddress);
52
+ } else {
53
+ Logger.info('Instance could not start - check logs');
54
+ }
55
+ } else {
56
+ Logger.info('No such instance found - check your AWS keys? : %s', instanceId);
57
+ }
58
+ return instance;
59
+ }
60
+ }
@@ -0,0 +1,105 @@
1
+ import { ParsedEmailProcessor } from './parsed-email-processor.js';
2
+ import { ParsedMail } from 'mailparser';
3
+ import unzipper from 'unzipper';
4
+ import { DateTime } from 'luxon';
5
+ import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
6
+ import { Logger } from '@bitblit/ratchet-common/logger/logger';
7
+ import { CsvRatchet } from '@bitblit/ratchet-node-only/csv/csv-ratchet';
8
+ import { MultiStream } from '@bitblit/ratchet-node-only/stream/multi-stream';
9
+
10
+ export class EmailToDbInsertProcessor implements ParsedEmailProcessor<EmailToDbStatement[]> {
11
+ public canProcess(_mail: ParsedMail): boolean {
12
+ return true;
13
+ }
14
+
15
+ public async processEmail(msg: ParsedMail): Promise<EmailToDbStatement[]> {
16
+ const rval: EmailToDbStatement[] = [];
17
+ try {
18
+ RequireRatchet.notNullOrUndefined(msg, 'msg');
19
+ Logger.info('Processing Broadsign reach inbound inventory email');
20
+ const data: Buffer = msg.attachments[0].content;
21
+
22
+ Logger.info('Unzipping attachment');
23
+ const rs: MultiStream = new MultiStream(data);
24
+ let wBuf: Buffer = null;
25
+ const prom: Promise<any> = rs
26
+ .pipe(unzipper.Parse())
27
+ .on('entry', async (entry) => {
28
+ if (entry.path.toLowerCase().endsWith('csv')) {
29
+ wBuf = await entry.buffer();
30
+ } else {
31
+ Logger.info('Pass: %s', entry.path);
32
+ entry.autodrain();
33
+ }
34
+ })
35
+ .promise();
36
+ await prom;
37
+ const csvParsed: any[] = await CsvRatchet.stringParse(wBuf.toString(), (o) => o, {
38
+ columns: false,
39
+ skip_empty_lines: true,
40
+ });
41
+
42
+ if (csvParsed.length > 1) {
43
+ const dropTable: string = 'drop table if exists sample';
44
+ let createTable: string = 'create table sample (pump_date varchar(255),';
45
+ const colNames: string[] = csvParsed[0];
46
+
47
+ let insertPrefix: string = 'insert into sample (pump_date,';
48
+ let insertQ: string = '?,';
49
+
50
+ for (let i = 0; i < colNames.length; i++) {
51
+ if (i > 0) {
52
+ createTable += ', ';
53
+ insertPrefix += ', ';
54
+ insertQ += ', ';
55
+ }
56
+ const kOut: string = colNames[i].toLowerCase().split(' ').join('_');
57
+ insertPrefix += kOut;
58
+ insertQ += '?';
59
+ createTable += kOut + ' varchar(255)';
60
+ if (kOut === 'id') {
61
+ createTable += ' primary key';
62
+ } else if (kOut === 'device_id') {
63
+ createTable += ' unique';
64
+ }
65
+ }
66
+ createTable += ')';
67
+ insertPrefix += ') values ';
68
+ // ('+insertQ+')';
69
+
70
+ Logger.info('Recreating table');
71
+ const _dropRes: any = rval.push({ statement: dropTable });
72
+ const _createRes: any = rval.push({ statement: createTable });
73
+ const pumpDate: string = DateTime.utc().toISO();
74
+ let insertStmt: string = insertPrefix;
75
+ let insertParams: any[] = [];
76
+
77
+ for (let i = 1; i < csvParsed.length; i++) {
78
+ if (insertStmt > insertPrefix) {
79
+ insertStmt += ',';
80
+ }
81
+ insertStmt += '(' + insertQ + ')';
82
+ insertParams = insertParams.concat(pumpDate, csvParsed[i]);
83
+
84
+ if (i % 25 === 0 || i === csvParsed.length - 1) {
85
+ rval.push({ statement: insertStmt, params: insertParams });
86
+ insertStmt = insertPrefix;
87
+ insertParams = [];
88
+ Logger.info('Inserted %d of %d rows', i, csvParsed.length);
89
+ }
90
+ }
91
+
92
+ Logger.info('Finished insertion of %d rows', csvParsed.length);
93
+ }
94
+ } catch (err) {
95
+ Logger.error('Failure: %s : %j', err, rval, err);
96
+ }
97
+
98
+ return rval;
99
+ }
100
+ }
101
+
102
+ export interface EmailToDbStatement {
103
+ statement: string;
104
+ params?: any[];
105
+ }
@@ -0,0 +1,28 @@
1
+ import { S3CacheRatchet } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet';
2
+ import { InboundEmailRatchet } from './inbound-email-ratchet.js';
3
+
4
+ import { SampleEmailProcessor } from './sample-email-processor.js';
5
+ import { beforeEach, describe, expect, test } from 'vitest';
6
+ import { mock, MockProxy } from 'vitest-mock-extended';
7
+
8
+ let mockS3CR: MockProxy<S3CacheRatchet>;
9
+
10
+ describe('#inboundEmailService', () => {
11
+ beforeEach(() => {
12
+ mockS3CR = mock<S3CacheRatchet>();
13
+ });
14
+
15
+ test('should process an email from S3', async () => {
16
+ mockS3CR.getDefaultBucket.mockReturnValueOnce('TEST-BUCKET');
17
+ mockS3CR.fileExists.mockResolvedValueOnce(true);
18
+ mockS3CR.fetchCacheFileAsString.mockResolvedValue('TEST');
19
+
20
+ const svc: InboundEmailRatchet = new InboundEmailRatchet(mockS3CR, [new SampleEmailProcessor()]);
21
+
22
+ //const buf: Buffer = fs.readFileSync('testemail.txt');
23
+ //const res: boolean = await svc.processEmailFromBuffer(buf);
24
+ const res: boolean = await svc.processEmailFromS3('some-key');
25
+
26
+ expect(res).not.toBeUndefined();
27
+ });
28
+ });
@@ -0,0 +1,55 @@
1
+ import { ParsedMail, simpleParser } from 'mailparser';
2
+ import { Logger } from '@bitblit/ratchet-common/logger/logger';
3
+ import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
4
+ import { ParsedEmailProcessor } from './parsed-email-processor.js';
5
+ import { S3CacheRatchet } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet';
6
+
7
+ /**
8
+ * Service for handling inbound emails
9
+ */
10
+ export class InboundEmailRatchet {
11
+ constructor(
12
+ private cache: S3CacheRatchet,
13
+ private processors: ParsedEmailProcessor<any>[],
14
+ ) {
15
+ RequireRatchet.notNullOrUndefined(this.cache, 'cache');
16
+ RequireRatchet.notNullOrUndefined(this.cache.getDefaultBucket(), 'cache.defaultBucket');
17
+ }
18
+
19
+ public async processEmailFromS3(key: string): Promise<boolean> {
20
+ const rval: boolean = false;
21
+ if (await this.cache.fileExists(key)) {
22
+ const data: string = await this.cache.fetchCacheFileAsString(key);
23
+ return this.processEmailFromBuffer(new Buffer(data));
24
+ } else {
25
+ Logger.warn('Cannot process inbound email - no such key : %s', key);
26
+ }
27
+
28
+ return rval;
29
+ }
30
+
31
+ public async processEmailFromBuffer(buf: Buffer): Promise<boolean> {
32
+ let rval: boolean = false;
33
+ RequireRatchet.notNullOrUndefined(buf, 'buf');
34
+ Logger.info('Processing inbound email - size %d bytes', buf.length);
35
+
36
+ const message: ParsedMail = await simpleParser(buf);
37
+ Logger.info(
38
+ 'Found mail from "%s" subject "%s" with %d attachments',
39
+ message?.from?.text,
40
+ message?.subject,
41
+ message?.attachments?.length,
42
+ );
43
+
44
+ for (let i = 0; i < this.processors.length && !rval; i++) {
45
+ if (this.processors[i].canProcess(message)) {
46
+ Logger.info('Processing message with processor %d', i);
47
+ const result: any = await this.processors[i].processEmail(message);
48
+ Logger.info('Result was : %j', result);
49
+ rval = true;
50
+ }
51
+ }
52
+
53
+ return rval;
54
+ }
55
+ }
@@ -0,0 +1,6 @@
1
+ import { ParsedMail } from 'mailparser';
2
+
3
+ export interface ParsedEmailProcessor<T> {
4
+ canProcess(mail: ParsedMail): boolean;
5
+ processEmail(msg: ParsedMail): Promise<T>;
6
+ }
@@ -0,0 +1,12 @@
1
+ import { ParsedEmailProcessor } from './parsed-email-processor.js';
2
+ import { ParsedMail } from 'mailparser';
3
+
4
+ export class SampleEmailProcessor implements ParsedEmailProcessor<string> {
5
+ public canProcess(_mail: ParsedMail): boolean {
6
+ return true;
7
+ }
8
+
9
+ public async processEmail(msg: ParsedMail): Promise<string> {
10
+ return msg.body;
11
+ }
12
+ }
@@ -0,0 +1,118 @@
1
+ import crypto from 'crypto';
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import { S3CacheRatchetLike } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet-like';
5
+ import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
6
+ import { StringRatchet } from '@bitblit/ratchet-common/lang/string-ratchet';
7
+ import { Logger } from '@bitblit/ratchet-common/logger/logger';
8
+
9
+ /**
10
+ * Use this when you want a lambda to cache a remote S3 bucket locally on disk for faster access
11
+ */
12
+ export class S3CacheToLocalDiskRatchet {
13
+ private static readonly DEFAULT_CACHE_TIMEOUT_SEC = 7 * 24 * 3600;
14
+
15
+ private currentlyLoading: Map<string, Promise<Buffer>> = new Map<string, Promise<Buffer>>();
16
+
17
+ constructor(
18
+ private s3: S3CacheRatchetLike,
19
+ private tmpFolder: string,
20
+ private cacheTimeoutSeconds: number = S3CacheToLocalDiskRatchet.DEFAULT_CACHE_TIMEOUT_SEC,
21
+ ) {
22
+ RequireRatchet.notNullOrUndefined(s3, 's3');
23
+ RequireRatchet.notNullOrUndefined(StringRatchet.trimToNull(tmpFolder));
24
+ RequireRatchet.true(fs.existsSync(tmpFolder), 'folder must exist : ' + tmpFolder);
25
+ }
26
+
27
+ public async getFileString(key: string): Promise<string> {
28
+ const buf: Buffer = await this.getFileBuffer(key);
29
+ return buf ? buf.toString() : null;
30
+ }
31
+
32
+ private keyToLocalCachePath(key: string): string {
33
+ const cachedHash: string = this.generateCacheHash(this.s3.getDefaultBucket() + '/' + key);
34
+ const rval: string = path.join(this.tmpFolder, cachedHash);
35
+ return rval;
36
+ }
37
+
38
+ public removeCacheFileForKey(key: string): void {
39
+ const localCachePath = this.keyToLocalCachePath(key);
40
+ Logger.info('Removing cache file for %s : %s', key, localCachePath);
41
+ if (fs.existsSync(localCachePath)) {
42
+ fs.unlinkSync(localCachePath);
43
+ } else {
44
+ Logger.debug('Skipping delete for %s - does not exist', localCachePath);
45
+ }
46
+ }
47
+
48
+ public async getFileBuffer(key: string): Promise<Buffer> {
49
+ const localCachePath: string = this.keyToLocalCachePath(key);
50
+
51
+ let rval: Buffer = null;
52
+ rval = this.getCacheFileAsBuffer(localCachePath);
53
+
54
+ if (!rval) {
55
+ Logger.info('No cache. Downloading File s3://%s/%s to %s', this.s3.getDefaultBucket(), key, localCachePath);
56
+ try {
57
+ let prom: Promise<Buffer> = this.currentlyLoading.get(key);
58
+ if (prom) {
59
+ Logger.info('Already running - wait for that');
60
+ } else {
61
+ Logger.info('Not running - start');
62
+ prom = this.updateLocalCacheFile(key, localCachePath);
63
+ this.currentlyLoading.set(key, prom);
64
+ }
65
+ rval = await prom;
66
+ this.currentlyLoading.delete(key); // Clear the cache
67
+ } catch (err) {
68
+ Logger.warn('File %s/%s does not exist. Err code: %s', this.s3.getDefaultBucket(), key, err);
69
+ }
70
+ } else {
71
+ Logger.info('Found cache file for s3://%s/%s. Local path %s', this.s3.getDefaultBucket(), key, localCachePath);
72
+ }
73
+ return rval;
74
+ }
75
+
76
+ private async updateLocalCacheFile(key: string, localCachePath: string): Promise<Buffer> {
77
+ const rval: Buffer = await this.s3.fetchCacheFileAsBuffer(key);
78
+ if (rval && rval.length > 0) {
79
+ Logger.info('Saving %d bytes to disk for cache', rval.length);
80
+ fs.writeFileSync(localCachePath, rval);
81
+ }
82
+ return rval;
83
+ }
84
+
85
+ public getCacheFileAsString(filePath: string): string {
86
+ const buf: Buffer = this.getCacheFileAsBuffer(filePath);
87
+ return buf ? buf.toString() : null;
88
+ }
89
+
90
+ public getCacheFileAsBuffer(filePath: string): Buffer {
91
+ if (!fs.existsSync(filePath)) {
92
+ return null;
93
+ }
94
+
95
+ try {
96
+ const stats = fs.statSync(filePath);
97
+ const now = new Date().getTime();
98
+
99
+ const duration: number = (now - stats.ctimeMs) / 1000;
100
+ if (duration >= this.cacheTimeoutSeconds) {
101
+ return null;
102
+ } else {
103
+ const rval: Buffer = fs.readFileSync(filePath);
104
+ return rval;
105
+ }
106
+ } catch (err) {
107
+ Logger.warn('Error getting s3 cache file %s', err);
108
+ }
109
+
110
+ return null;
111
+ }
112
+
113
+ private generateCacheHash(hashVal: string): string {
114
+ const rval: string = crypto.createHash('md5').update(hashVal).digest('hex');
115
+
116
+ return rval;
117
+ }
118
+ }
@@ -0,0 +1,31 @@
1
+ import { S3CacheToLocalDiskRatchet } from './s3-cache-to-local-disk-ratchet.js';
2
+ import { S3CacheRatchetLike } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet-like';
3
+ import { tmpdir } from 'os';
4
+ import { beforeEach, describe, expect, test } from 'vitest';
5
+ import { mock, MockProxy } from 'vitest-mock-extended';
6
+
7
+ let mockS3CR: MockProxy<S3CacheRatchetLike>;
8
+
9
+ describe('#S3CacheToLocalDiskRatchet', () => {
10
+ beforeEach(() => {
11
+ mockS3CR = mock<S3CacheRatchetLike>();
12
+ });
13
+
14
+ test('should download file and store in tmp', async () => {
15
+ mockS3CR.fetchCacheFileAsBuffer.mockResolvedValue(Buffer.from(JSON.stringify({ a: 'b' })));
16
+
17
+ const pth: string = 'test-path';
18
+
19
+ const svc: S3CacheToLocalDiskRatchet = new S3CacheToLocalDiskRatchet(mockS3CR, tmpdir()); //, 'tmp', 1000);
20
+ svc.removeCacheFileForKey(pth);
21
+
22
+ const proms: Promise<Buffer>[] = [];
23
+ for (let i = 0; i < 5; i++) {
24
+ proms.push(svc.getFileBuffer(pth));
25
+ }
26
+
27
+ const all: Buffer[] = await Promise.all(proms);
28
+
29
+ expect(all.length).toEqual(5);
30
+ });
31
+ });
@@ -0,0 +1,46 @@
1
+ CREATE EXTERNAL TABLE IF NOT EXISTS `${TABLE_NAME}`(
2
+ `type` string COMMENT '',
3
+ `time` string COMMENT '',
4
+ `elb` string COMMENT '',
5
+ `client_ip` string COMMENT '',
6
+ `client_port` int COMMENT '',
7
+ `target_ip` string COMMENT '',
8
+ `target_port` int COMMENT '',
9
+ `request_processing_time` double COMMENT '',
10
+ `target_processing_time` double COMMENT '',
11
+ `response_processing_time` double COMMENT '',
12
+ `elb_status_code` string COMMENT '',
13
+ `target_status_code` string COMMENT '',
14
+ `received_bytes` bigint COMMENT '',
15
+ `sent_bytes` bigint COMMENT '',
16
+ `request_verb` string COMMENT '',
17
+ `request_url` string COMMENT '',
18
+ `request_proto` string COMMENT '',
19
+ `user_agent` string COMMENT '',
20
+ `ssl_cipher` string COMMENT '',
21
+ `ssl_protocol` string COMMENT '',
22
+ `target_group_arn` string COMMENT '',
23
+ `trace_id` string COMMENT '',
24
+ `domain_name` string COMMENT '',
25
+ `chosen_cert_arn` string COMMENT '',
26
+ `matched_rule_priority` string COMMENT '',
27
+ `request_creation_time` string COMMENT '',
28
+ `actions_executed` string COMMENT '',
29
+ `redirect_url` string COMMENT '',
30
+ `lambda_error_reason` string COMMENT '',
31
+ `target_port_list` string COMMENT '',
32
+ `target_status_code_list` string COMMENT '',
33
+ `new_field` string COMMENT '')
34
+ PARTITIONED BY (
35
+ `date_utc_partition` string
36
+ )
37
+ ROW FORMAT SERDE
38
+ 'org.apache.hadoop.hive.serde2.RegexSerDe'
39
+ WITH SERDEPROPERTIES (
40
+ 'input.regex'='([^ ]*) ([^ ]*) ([^ ]*) ([^ ]*):([0-9]*) ([^ ]*)[:-]([0-9]*) ([-.0-9]*) ([-.0-9]*) ([-.0-9]*) (|[-0-9]*) (-|[-0-9]*) ([-0-9]*) ([-0-9]*) \"([^ ]*) ([^ ]*) (- |[^ ]*)\" \"([^\"]*)\" ([A-Z0-9-]+) ([A-Za-z0-9.-]*) ([^ ]*) \"([^\"]*)\" \"([^\"]*)\" \"([^\"]*)\" ([-.0-9]*) ([^ ]*) \"([^\"]*)\" \"([^\"]*)\" \"([^ ]*)\" \"([^s]+)\" \"([^s]+)\"(.*)')
41
+ STORED AS INPUTFORMAT
42
+ 'org.apache.hadoop.mapred.TextInputFormat'
43
+ OUTPUTFORMAT
44
+ 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
45
+ LOCATION
46
+ '${ALB_LOG_ROOT}'