@bitblit/ratchet-aws-node-only 6.0.145-alpha → 6.0.147-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -5
- package/src/athena/alb-athena-log-ratchet.spec.ts +36 -0
- package/src/athena/alb-athena-log-ratchet.ts +223 -0
- package/src/athena/athena-ratchet.spec.ts +43 -0
- package/src/athena/athena-ratchet.ts +194 -0
- package/src/build/ratchet-aws-node-only-info.ts +19 -0
- package/src/cli/dynamo-exporter.ts +104 -0
- package/src/cli/ratchet-cli-handler.ts +18 -0
- package/src/cli/site-uploader/site-uploader.ts +143 -0
- package/src/cli/start-instance-and-ssh.ts +70 -0
- package/src/daemon/daemon-like.ts +34 -0
- package/src/daemon/daemon-process-create-options.ts +7 -0
- package/src/daemon/daemon-process-state-public-token.ts +5 -0
- package/src/daemon/daemon-process-state.ts +16 -0
- package/src/daemon/daemon-stream-data-options.ts +6 -0
- package/src/daemon/daemon-util.spec.ts +133 -0
- package/src/daemon/daemon-util.ts +204 -0
- package/src/daemon/daemon.ts +150 -0
- package/src/ec2/ec2-instance-util.ts +60 -0
- package/src/mail/inbound/email-to-db-insert-processor.ts +105 -0
- package/src/mail/inbound/inbound-email-ratchet.spec.ts +28 -0
- package/src/mail/inbound/inbound-email-ratchet.ts +55 -0
- package/src/mail/inbound/parsed-email-processor.ts +6 -0
- package/src/mail/inbound/sample-email-processor.ts +12 -0
- package/src/s3/s3-cache-to-local-disk-ratchet.ts +118 -0
- package/src/s3/s3-cache-to-local-dist-ratchet.spec.ts +31 -0
- package/src/static/albAthenaTableCreate.txt +46 -0
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import walk from 'walk';
|
|
3
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import mime from 'mime-types';
|
|
6
|
+
import { Logger } from '@bitblit/ratchet-common/logger/logger';
|
|
7
|
+
import { Upload } from '@aws-sdk/lib-storage';
|
|
8
|
+
|
|
9
|
+
export class SiteUploader {
|
|
10
|
+
private srcDir: string;
|
|
11
|
+
private bucketName: string;
|
|
12
|
+
private config: any;
|
|
13
|
+
private readonly s3: S3Client = new S3Client({ region: 'us-east-1' });
|
|
14
|
+
|
|
15
|
+
constructor(srcDir: string, bucketName: string, configFile: string) {
|
|
16
|
+
this.srcDir = srcDir;
|
|
17
|
+
this.bucketName = bucketName;
|
|
18
|
+
this.config = JSON.parse(fs.readFileSync(configFile).toString('ascii'));
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
public static createFromArgs(args: string[]): SiteUploader {
|
|
22
|
+
if (args && args.length === 3) {
|
|
23
|
+
const src = args[0];
|
|
24
|
+
const bucket = args[1];
|
|
25
|
+
const configFile = args[2];
|
|
26
|
+
|
|
27
|
+
return new SiteUploader(src, bucket, configFile);
|
|
28
|
+
} else {
|
|
29
|
+
console.log('Usage : node ratchet-site-uploader {srcDir} {bucket} {configFile} (Found ' + args + ' arguments, need 3)');
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
public static async runFromCliArgs(args: string[]): Promise<void> {
|
|
35
|
+
const inst: SiteUploader = SiteUploader.createFromArgs(args);
|
|
36
|
+
return inst.runPump();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
findMatch(prefix: string, fileName: string, config: any): any {
|
|
40
|
+
let found = null;
|
|
41
|
+
|
|
42
|
+
if (prefix != null && fileName != null && config != null && config.mapping != null) {
|
|
43
|
+
config.mapping.forEach((entry) => {
|
|
44
|
+
if (found == null) {
|
|
45
|
+
if (entry.prefixMatch == null || prefix.match(entry.prefixMatch)) {
|
|
46
|
+
if (entry.fileMatch == null || fileName.match(entry.fileMatch)) {
|
|
47
|
+
found = entry;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return found;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
findMime(fileName: string, config: any): string {
|
|
58
|
+
let found = null;
|
|
59
|
+
|
|
60
|
+
if (config != null && config.customMimeTypeMapping != null) {
|
|
61
|
+
Object.keys(config.customMimeTypeMapping).forEach((k) => {
|
|
62
|
+
if (found == null && fileName.endsWith(k)) {
|
|
63
|
+
found = config.customMimeTypeMapping[k];
|
|
64
|
+
}
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (found == null) {
|
|
69
|
+
found = mime.lookup(fileName);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (found == null) {
|
|
73
|
+
found = 'binary/octet-stream';
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return found;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
runPump(): Promise<any> {
|
|
80
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
81
|
+
return new Promise<any>((resolve, reject) => {
|
|
82
|
+
Logger.info('Uploading contents of %s to %s using %j as config', this.srcDir, this.bucketName, this.config);
|
|
83
|
+
// bucket = boto3.resource("s3").Bucket(bucket)
|
|
84
|
+
|
|
85
|
+
const options = {};
|
|
86
|
+
const walker = walk.walk(this.srcDir, options);
|
|
87
|
+
|
|
88
|
+
walker.on(
|
|
89
|
+
'file',
|
|
90
|
+
function (root, fileStats, next) {
|
|
91
|
+
Logger.info('Processing %j', fileStats.name);
|
|
92
|
+
const prefix: string = root == this.srcDir ? '' : root.substring(this.srcDir.length + 1) + '/';
|
|
93
|
+
|
|
94
|
+
const proc: any = this.findMatch(prefix, fileStats.name, this.config);
|
|
95
|
+
const key: string = prefix + fileStats.name;
|
|
96
|
+
Logger.info('Uploading file : %s/%s to key %s with %j', root, fileStats.name, key, proc);
|
|
97
|
+
|
|
98
|
+
const params: any = proc && proc.putParams ? JSON.parse(JSON.stringify(proc.putParams)) : {};
|
|
99
|
+
|
|
100
|
+
params.Bucket = this.bucketName;
|
|
101
|
+
params.Key = key;
|
|
102
|
+
params.Body = fs.readFileSync(path.join(root, fileStats.name));
|
|
103
|
+
|
|
104
|
+
if (!params.ContentType) {
|
|
105
|
+
params.ContentType = this.findMime(fileStats.name, this.config);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const upload: Upload = new Upload({
|
|
109
|
+
client: this.s3,
|
|
110
|
+
params: params,
|
|
111
|
+
tags: [],
|
|
112
|
+
queueSize: 4,
|
|
113
|
+
partSize: 1024 * 1024 * 5,
|
|
114
|
+
leavePartsOnError: false,
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
upload.on('httpUploadProgress', (progress) => {
|
|
118
|
+
Logger.debug('Uploading : %s', progress);
|
|
119
|
+
});
|
|
120
|
+
upload
|
|
121
|
+
.done()
|
|
122
|
+
.then((result) => {
|
|
123
|
+
Logger.info('Finished upload of %s: %j', key, result);
|
|
124
|
+
next();
|
|
125
|
+
})
|
|
126
|
+
.catch((err) => {
|
|
127
|
+
Logger.warn('%s failed to upload : %s : Continuing', key, err);
|
|
128
|
+
next();
|
|
129
|
+
});
|
|
130
|
+
}.bind(this),
|
|
131
|
+
);
|
|
132
|
+
|
|
133
|
+
walker.on('errors', function (root, nodeStatsArray, next) {
|
|
134
|
+
next();
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
walker.on('end', function () {
|
|
138
|
+
Logger.info('All done');
|
|
139
|
+
resolve(true);
|
|
140
|
+
});
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { Logger } from '@bitblit/ratchet-common/logger/logger';
|
|
2
|
+
import { Ec2Ratchet } from '@bitblit/ratchet-aws/ec2/ec2-ratchet';
|
|
3
|
+
import { spawnSync, SpawnSyncReturns } from 'child_process';
|
|
4
|
+
import os from 'os';
|
|
5
|
+
import path from 'path';
|
|
6
|
+
import { Instance } from '@aws-sdk/client-ec2';
|
|
7
|
+
import { Ec2InstanceUtil } from '../ec2/ec2-instance-util.js';
|
|
8
|
+
|
|
9
|
+
export class StartInstanceAndSsh {
|
|
10
|
+
private instanceId: string;
|
|
11
|
+
private publicKeyFile: string;
|
|
12
|
+
private instanceOsUser: string;
|
|
13
|
+
private region: string;
|
|
14
|
+
private availabilityZone: string;
|
|
15
|
+
private ec2Ratchet: Ec2Ratchet;
|
|
16
|
+
private instanceUtil: Ec2InstanceUtil;
|
|
17
|
+
|
|
18
|
+
constructor(
|
|
19
|
+
instanceId: string,
|
|
20
|
+
publicKeyFile: string = path.join(os.homedir(), '.ssh', 'id_rsa.pub'),
|
|
21
|
+
instanceOsUser: string = 'ec2-user',
|
|
22
|
+
region: string = 'us-east-1',
|
|
23
|
+
availabilityZone: string = 'us-east-1a',
|
|
24
|
+
) {
|
|
25
|
+
this.instanceId = instanceId;
|
|
26
|
+
this.publicKeyFile = publicKeyFile;
|
|
27
|
+
this.instanceOsUser = instanceOsUser;
|
|
28
|
+
this.region = region;
|
|
29
|
+
this.availabilityZone = availabilityZone;
|
|
30
|
+
|
|
31
|
+
this.ec2Ratchet = new Ec2Ratchet(this.region, this.availabilityZone);
|
|
32
|
+
this.instanceUtil = new Ec2InstanceUtil(this.ec2Ratchet);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
public static createFromArgs(args: string[]): StartInstanceAndSsh {
|
|
36
|
+
if (args?.length === 1 || args?.length === 2) {
|
|
37
|
+
const instanceId = args[0];
|
|
38
|
+
//const publicKeyFile = args[1];
|
|
39
|
+
|
|
40
|
+
return new StartInstanceAndSsh(instanceId); // , publicKeyFile);
|
|
41
|
+
} else {
|
|
42
|
+
Logger.info('Usage : ratchet-start-instance-and-ssh {instanceId} {publicKeyFile} (Found %s arguments, need 1 or 2)', args);
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
public static async runFromCliArgs(args: string[]): Promise<void> {
|
|
48
|
+
const inst: StartInstanceAndSsh = StartInstanceAndSsh.createFromArgs(args);
|
|
49
|
+
return inst.run();
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
public async run(): Promise<any> {
|
|
53
|
+
//return new Promise<any>(async (res, rej) => {
|
|
54
|
+
const instance: Instance = await this.instanceUtil.startInstanceAndUploadPublicKeyFile(
|
|
55
|
+
this.instanceId,
|
|
56
|
+
this.publicKeyFile,
|
|
57
|
+
this.instanceOsUser,
|
|
58
|
+
);
|
|
59
|
+
if (instance) {
|
|
60
|
+
Logger.info('Instance IP address is %s', instance.PublicIpAddress);
|
|
61
|
+
const ret: SpawnSyncReturns<Buffer> = spawnSync('ssh', [this.instanceOsUser + '@' + instance.PublicIpAddress], {
|
|
62
|
+
stdio: 'inherit',
|
|
63
|
+
});
|
|
64
|
+
Logger.info('%j', ret);
|
|
65
|
+
} else {
|
|
66
|
+
Logger.info('No such instance found - check your AWS keys? : %s', this.instanceId);
|
|
67
|
+
}
|
|
68
|
+
//});
|
|
69
|
+
}
|
|
70
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { DaemonProcessState } from './daemon-process-state.js';
|
|
2
|
+
import { DaemonProcessCreateOptions } from './daemon-process-create-options.js';
|
|
3
|
+
|
|
4
|
+
/*
|
|
5
|
+
Classes implementing this interface provide the ability to monitor and update long-running
|
|
6
|
+
processes in S3.
|
|
7
|
+
|
|
8
|
+
For all functions, if group is not provided the default group will be used.
|
|
9
|
+
*/
|
|
10
|
+
export interface DaemonLike {
|
|
11
|
+
get defaultGroup(): string;
|
|
12
|
+
|
|
13
|
+
keyToPublicToken(key: string, expirationSeconds: number): Promise<string>;
|
|
14
|
+
|
|
15
|
+
start(options: DaemonProcessCreateOptions): Promise<DaemonProcessState>;
|
|
16
|
+
|
|
17
|
+
clean(group?: string, olderThanSeconds?: number): Promise<DaemonProcessState[]>;
|
|
18
|
+
|
|
19
|
+
listKeys(group?: string): Promise<string[]>;
|
|
20
|
+
|
|
21
|
+
list(group?: string): Promise<DaemonProcessState[]>;
|
|
22
|
+
|
|
23
|
+
updateMessage(id: string, newMessage: string): Promise<DaemonProcessState>;
|
|
24
|
+
|
|
25
|
+
statFromPublicToken(publicToken: string): Promise<DaemonProcessState>;
|
|
26
|
+
|
|
27
|
+
stat(key: string): Promise<DaemonProcessState>;
|
|
28
|
+
|
|
29
|
+
abort(id: string): Promise<DaemonProcessState>;
|
|
30
|
+
|
|
31
|
+
error(id: string, error: string): Promise<DaemonProcessState>;
|
|
32
|
+
|
|
33
|
+
finalize(id: string, contents: Buffer): Promise<DaemonProcessState>;
|
|
34
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export interface DaemonProcessState {
|
|
2
|
+
id: string;
|
|
3
|
+
|
|
4
|
+
title: string;
|
|
5
|
+
targetFileName: string;
|
|
6
|
+
|
|
7
|
+
lastUpdatedEpochMS: number;
|
|
8
|
+
lastUpdatedMessage: string;
|
|
9
|
+
|
|
10
|
+
startedEpochMS: number;
|
|
11
|
+
completedEpochMS: number;
|
|
12
|
+
meta: any;
|
|
13
|
+
error: string;
|
|
14
|
+
link: string;
|
|
15
|
+
contentType: string;
|
|
16
|
+
}
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { DaemonProcessState } from './daemon-process-state.js';
|
|
2
|
+
import { DaemonUtil } from './daemon-util.js';
|
|
3
|
+
import fs, { ReadStream } from 'fs';
|
|
4
|
+
import { DaemonProcessCreateOptions } from './daemon-process-create-options.js';
|
|
5
|
+
|
|
6
|
+
import { Subject } from 'rxjs';
|
|
7
|
+
import { PassThrough } from 'stream';
|
|
8
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
9
|
+
import { beforeEach, describe, expect, test } from 'vitest';
|
|
10
|
+
import { mock, MockProxy } from 'vitest-mock-extended';
|
|
11
|
+
|
|
12
|
+
import { Logger } from '@bitblit/ratchet-common/logger/logger';
|
|
13
|
+
import { CsvRatchet } from '@bitblit/ratchet-node-only/csv/csv-ratchet';
|
|
14
|
+
import { PromiseRatchet } from '@bitblit/ratchet-common/lang/promise-ratchet';
|
|
15
|
+
import { LoggerLevelName } from '@bitblit/ratchet-common/logger/logger-level-name';
|
|
16
|
+
import { S3CacheRatchetLike } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet-like';
|
|
17
|
+
import { S3CacheRatchet } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet';
|
|
18
|
+
|
|
19
|
+
let mockS3CR: MockProxy<S3CacheRatchetLike>;
|
|
20
|
+
|
|
21
|
+
describe('#DaemonUtil', function () {
|
|
22
|
+
beforeEach(() => {
|
|
23
|
+
mockS3CR = mock<S3CacheRatchetLike>();
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
test('should test the daemon util', async () => {
|
|
27
|
+
mockS3CR.getDefaultBucket.mockReturnValueOnce('TEST-BUCKET');
|
|
28
|
+
mockS3CR.fetchMetaForCacheFile.mockResolvedValue({
|
|
29
|
+
Metadata: { daemon_meta: '{"id":"testid", "completedEpochMS":123456}' },
|
|
30
|
+
$metadata: null,
|
|
31
|
+
});
|
|
32
|
+
mockS3CR.preSignedDownloadUrlForCacheFile.mockResolvedValue('https://test-link');
|
|
33
|
+
|
|
34
|
+
const t1: DaemonProcessState = await DaemonUtil.stat(mockS3CR, 'test1.csv');
|
|
35
|
+
Logger.info('Got : %j', t1);
|
|
36
|
+
expect(t1).not.toBeNull();
|
|
37
|
+
expect(t1.link).not.toBeNull();
|
|
38
|
+
|
|
39
|
+
/*
|
|
40
|
+
let id = 'test';
|
|
41
|
+
const newDaemonOptions: DaemonProcessCreateOptions = {
|
|
42
|
+
title: 'test',
|
|
43
|
+
contentType: 'text/csv',
|
|
44
|
+
group: 'NA',
|
|
45
|
+
meta: {},
|
|
46
|
+
targetFileName: 'test.csv'
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
const t2: DaemonProcessState = await DaemonUtil.start(cache, id,'test1.csv', newDaemonOptions);
|
|
50
|
+
Logger.info('Got : %j', t2);
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
const t2: DaemonProcessState = await DaemonUtil.updateMessage(mockS3CR, 'test1.csv', 'msg : ' + new Date());
|
|
54
|
+
Logger.info('Got : %j', t2);
|
|
55
|
+
|
|
56
|
+
const result: DaemonProcessState = await DaemonUtil.stat(mockS3CR, 'test1.csv');
|
|
57
|
+
|
|
58
|
+
Logger.info('Got : %j', result);
|
|
59
|
+
|
|
60
|
+
expect(result).toBeTruthy();
|
|
61
|
+
Logger.info('Got objects : %j', result);
|
|
62
|
+
*/
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
test.skip('should test the daemon util streaming', async () => {
|
|
66
|
+
const s3: S3Client = new S3Client({ region: 'us-east-1' });
|
|
67
|
+
const cache: S3CacheRatchetLike = new S3CacheRatchet(s3, 'test-bucket');
|
|
68
|
+
const key: string = 's3-cache-ratchet.spec.ts';
|
|
69
|
+
|
|
70
|
+
const newDaemonOptions: DaemonProcessCreateOptions = {
|
|
71
|
+
title: 'test',
|
|
72
|
+
contentType: 'text/plain',
|
|
73
|
+
group: 'NA',
|
|
74
|
+
meta: {},
|
|
75
|
+
targetFileName: 's3-cache-ratchet.spec.ts',
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
const _t2: DaemonProcessState = await DaemonUtil.start(cache, key, 's3-cache-ratchet.spec.ts', newDaemonOptions);
|
|
79
|
+
|
|
80
|
+
const t1: DaemonProcessState = await DaemonUtil.stat(cache, key);
|
|
81
|
+
Logger.info('Got : %j', t1);
|
|
82
|
+
|
|
83
|
+
const stream: ReadStream = fs.createReadStream('test/aws/s3-cache-ratchet.spec.ts');
|
|
84
|
+
const result: DaemonProcessState = await DaemonUtil.streamDataAndFinish(cache, key, stream);
|
|
85
|
+
|
|
86
|
+
expect(result).toBeTruthy();
|
|
87
|
+
Logger.info('Got objects : %j', result);
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
test.skip('should stream objects to a csv', async () => {
|
|
91
|
+
Logger.setLevel(LoggerLevelName.debug);
|
|
92
|
+
const sub: Subject<TestItem> = new Subject<TestItem>();
|
|
93
|
+
const out: PassThrough = new PassThrough();
|
|
94
|
+
const s3: S3Client = new S3Client({ region: 'us-east-1' });
|
|
95
|
+
const cache: S3CacheRatchet = new S3CacheRatchet(s3, 'test-bucket');
|
|
96
|
+
const key: string = 'test.csv';
|
|
97
|
+
|
|
98
|
+
const newDaemonOptions: DaemonProcessCreateOptions = {
|
|
99
|
+
title: 'test',
|
|
100
|
+
contentType: 'text/csv',
|
|
101
|
+
group: 'NA',
|
|
102
|
+
meta: {},
|
|
103
|
+
targetFileName: 'test.csv',
|
|
104
|
+
};
|
|
105
|
+
const _t2: DaemonProcessState = await DaemonUtil.start(cache, key, key, newDaemonOptions);
|
|
106
|
+
|
|
107
|
+
const dProm: Promise<DaemonProcessState> = DaemonUtil.streamDataAndFinish(cache, key, out);
|
|
108
|
+
|
|
109
|
+
const prom: Promise<number> = CsvRatchet.streamObjectsToCsv<TestItem>(sub, out); //, opts);
|
|
110
|
+
|
|
111
|
+
for (let i = 1; i < 6; i++) {
|
|
112
|
+
Logger.debug('Proc : %d', i);
|
|
113
|
+
sub.next({ a: i, b: 'test ' + i + ' ,,' });
|
|
114
|
+
await PromiseRatchet.wait(10);
|
|
115
|
+
}
|
|
116
|
+
sub.complete();
|
|
117
|
+
|
|
118
|
+
Logger.debug('Waiting on write');
|
|
119
|
+
|
|
120
|
+
const result: number = await prom;
|
|
121
|
+
Logger.debug('Write complete');
|
|
122
|
+
|
|
123
|
+
const val: DaemonProcessState = await dProm;
|
|
124
|
+
|
|
125
|
+
expect(result).toEqual(5);
|
|
126
|
+
Logger.debug('Have res : %d and val : \n%j', result, val);
|
|
127
|
+
});
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
export interface TestItem {
|
|
131
|
+
a: number;
|
|
132
|
+
b: string;
|
|
133
|
+
}
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
import { DaemonProcessState } from './daemon-process-state.js';
|
|
2
|
+
import { DaemonProcessCreateOptions } from './daemon-process-create-options.js';
|
|
3
|
+
import {
|
|
4
|
+
CompleteMultipartUploadCommandOutput,
|
|
5
|
+
HeadObjectOutput,
|
|
6
|
+
PutObjectCommand,
|
|
7
|
+
PutObjectCommandInput,
|
|
8
|
+
PutObjectOutput,
|
|
9
|
+
PutObjectRequest,
|
|
10
|
+
} from '@aws-sdk/client-s3';
|
|
11
|
+
import { Readable } from 'stream';
|
|
12
|
+
import { Upload } from '@aws-sdk/lib-storage';
|
|
13
|
+
import { DaemonStreamDataOptions } from './daemon-stream-data-options.js';
|
|
14
|
+
import { Logger } from '@bitblit/ratchet-common/logger/logger';
|
|
15
|
+
import { StringRatchet } from '@bitblit/ratchet-common/lang/string-ratchet';
|
|
16
|
+
import { S3CacheRatchetLike } from '@bitblit/ratchet-aws/s3/s3-cache-ratchet-like';
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Internal utilities which are here for the USE OF THE DAEMON OBJECT ONLY - if you are trying to use this
|
|
20
|
+
* class outside of Ratchet, you are doing it wrong. Instantiate a Daemon object and use that instead.
|
|
21
|
+
*
|
|
22
|
+
* The details of storage and retrieval of a DaemonProcessState is meant to be transparent to the user
|
|
23
|
+
*/
|
|
24
|
+
export class DaemonUtil {
|
|
25
|
+
public static DEFAULT_CONTENT: Buffer = Buffer.from('DAEMON_PLACEHOLDER');
|
|
26
|
+
public static DAEMON_METADATA_KEY: string = 'daemon_meta'; // Must be lowercase for s3
|
|
27
|
+
|
|
28
|
+
public static async start(
|
|
29
|
+
cache: S3CacheRatchetLike,
|
|
30
|
+
id: string,
|
|
31
|
+
s3Key: string,
|
|
32
|
+
options: DaemonProcessCreateOptions,
|
|
33
|
+
): Promise<DaemonProcessState> {
|
|
34
|
+
try {
|
|
35
|
+
options.meta ??= {};
|
|
36
|
+
|
|
37
|
+
Logger.info('Starting daemon, key: %s, options: %j', s3Key, options);
|
|
38
|
+
const now: number = new Date().getTime();
|
|
39
|
+
|
|
40
|
+
const newState: DaemonProcessState = {
|
|
41
|
+
id: id,
|
|
42
|
+
title: options.title,
|
|
43
|
+
lastUpdatedEpochMS: now,
|
|
44
|
+
lastUpdatedMessage: 'Created',
|
|
45
|
+
targetFileName: options.targetFileName,
|
|
46
|
+
|
|
47
|
+
startedEpochMS: now,
|
|
48
|
+
completedEpochMS: null,
|
|
49
|
+
meta: options.meta,
|
|
50
|
+
error: null,
|
|
51
|
+
link: null,
|
|
52
|
+
contentType: options.contentType,
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
const rval: DaemonProcessState = await DaemonUtil.writeState(cache, s3Key, newState, DaemonUtil.DEFAULT_CONTENT);
|
|
56
|
+
return rval;
|
|
57
|
+
} catch (err) {
|
|
58
|
+
Logger.error('Error while trying to start a daemon: %j %s', options, err);
|
|
59
|
+
throw err;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
public static async writeState(
|
|
64
|
+
cache: S3CacheRatchetLike,
|
|
65
|
+
s3Key: string,
|
|
66
|
+
newState: DaemonProcessState,
|
|
67
|
+
contents: Uint8Array, // This was Buffer before, moving to Uint8array to be node/browser ok. Need a streaming version
|
|
68
|
+
): Promise<DaemonProcessState> {
|
|
69
|
+
try {
|
|
70
|
+
const s3meta: any = {};
|
|
71
|
+
newState.lastUpdatedEpochMS = new Date().getTime();
|
|
72
|
+
s3meta[DaemonUtil.DAEMON_METADATA_KEY] = JSON.stringify(newState);
|
|
73
|
+
|
|
74
|
+
const params: PutObjectCommandInput = {
|
|
75
|
+
Bucket: cache.getDefaultBucket(),
|
|
76
|
+
Key: s3Key,
|
|
77
|
+
ContentType: newState.contentType,
|
|
78
|
+
Metadata: s3meta,
|
|
79
|
+
Body: contents,
|
|
80
|
+
};
|
|
81
|
+
if (newState.targetFileName) {
|
|
82
|
+
params.ContentDisposition = 'attachment;filename="' + newState.targetFileName + '"';
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
const written: PutObjectOutput = await cache.getS3Client().send(new PutObjectCommand(params));
|
|
86
|
+
Logger.silly('Daemon wrote : %s', written);
|
|
87
|
+
|
|
88
|
+
return DaemonUtil.stat(cache, s3Key);
|
|
89
|
+
} catch (err) {
|
|
90
|
+
Logger.error('Error while trying to write a daemon stat: %j %s', newState, err);
|
|
91
|
+
throw err;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
public static async streamDataAndFinish(
|
|
96
|
+
cache: S3CacheRatchetLike,
|
|
97
|
+
s3Key: string,
|
|
98
|
+
data: Readable,
|
|
99
|
+
options?: DaemonStreamDataOptions,
|
|
100
|
+
): Promise<DaemonProcessState> {
|
|
101
|
+
Logger.debug('Streaming data to %s', s3Key);
|
|
102
|
+
const inStat: DaemonProcessState = await DaemonUtil.updateMessage(cache, s3Key, 'Streaming data');
|
|
103
|
+
inStat.completedEpochMS = new Date().getTime();
|
|
104
|
+
inStat.lastUpdatedMessage = 'Complete';
|
|
105
|
+
|
|
106
|
+
const s3meta: any = {};
|
|
107
|
+
s3meta[DaemonUtil.DAEMON_METADATA_KEY] = JSON.stringify(inStat);
|
|
108
|
+
|
|
109
|
+
const params: PutObjectRequest = {
|
|
110
|
+
Bucket: cache.getDefaultBucket(),
|
|
111
|
+
Key: s3Key,
|
|
112
|
+
ContentType: inStat.contentType,
|
|
113
|
+
Metadata: s3meta,
|
|
114
|
+
Body: data,
|
|
115
|
+
};
|
|
116
|
+
const targetFileName: string =
|
|
117
|
+
StringRatchet.trimToNull(options?.overrideTargetFileName) || StringRatchet.trimToNull(inStat?.targetFileName);
|
|
118
|
+
if (targetFileName) {
|
|
119
|
+
params.ContentDisposition = 'attachment;filename="' + targetFileName + '"';
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const upload: Upload = new Upload({
|
|
123
|
+
client: cache.getS3Client(),
|
|
124
|
+
params: params,
|
|
125
|
+
tags: [],
|
|
126
|
+
queueSize: 4,
|
|
127
|
+
partSize: 1024 * 1024 * 5,
|
|
128
|
+
leavePartsOnError: false,
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
if (options?.progressFn) {
|
|
132
|
+
upload.on('httpUploadProgress', options.progressFn);
|
|
133
|
+
}
|
|
134
|
+
const written: CompleteMultipartUploadCommandOutput = await upload.done();
|
|
135
|
+
|
|
136
|
+
Logger.silly('Daemon wrote : %s', written);
|
|
137
|
+
|
|
138
|
+
return DaemonUtil.stat(cache, s3Key);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
public static async updateMessage(cache: S3CacheRatchetLike, s3Key: string, newMessage: string): Promise<DaemonProcessState> {
|
|
142
|
+
try {
|
|
143
|
+
const inStat: DaemonProcessState = await DaemonUtil.stat(cache, s3Key);
|
|
144
|
+
inStat.lastUpdatedMessage = newMessage;
|
|
145
|
+
return DaemonUtil.writeState(cache, s3Key, inStat, DaemonUtil.DEFAULT_CONTENT);
|
|
146
|
+
} catch (err) {
|
|
147
|
+
Logger.error('Error while trying to update a daemon message: %j %s', s3Key, err);
|
|
148
|
+
throw err;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
public static async stat(s3Cache: S3CacheRatchetLike, path: string): Promise<DaemonProcessState> {
|
|
153
|
+
try {
|
|
154
|
+
Logger.debug('Daemon stat for path %s / %s', s3Cache.getDefaultBucket(), path);
|
|
155
|
+
let stat: DaemonProcessState = null;
|
|
156
|
+
|
|
157
|
+
const meta: HeadObjectOutput = await s3Cache.fetchMetaForCacheFile(path);
|
|
158
|
+
Logger.debug('Daemon: Meta is %j', meta);
|
|
159
|
+
const metaString: string = meta && meta.Metadata ? meta.Metadata[DaemonUtil.DAEMON_METADATA_KEY] : null;
|
|
160
|
+
if (metaString) {
|
|
161
|
+
stat = JSON.parse(metaString) as DaemonProcessState;
|
|
162
|
+
|
|
163
|
+
if (stat.completedEpochMS && !stat.error) {
|
|
164
|
+
stat.link = await s3Cache.preSignedDownloadUrlForCacheFile(path);
|
|
165
|
+
}
|
|
166
|
+
} else {
|
|
167
|
+
Logger.warn('No metadata found! (Head was %j)', meta);
|
|
168
|
+
}
|
|
169
|
+
return stat;
|
|
170
|
+
} catch (err) {
|
|
171
|
+
Logger.error('Error while trying to fetch a daemon state: %j %s', path, err);
|
|
172
|
+
throw err;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
public static async abort(s3Cache: S3CacheRatchetLike, path: string): Promise<DaemonProcessState> {
|
|
177
|
+
return DaemonUtil.error(s3Cache, path, 'Aborted');
|
|
178
|
+
}
|
|
179
|
+
public static async error(s3Cache: S3CacheRatchetLike, path: string, error: string): Promise<DaemonProcessState> {
|
|
180
|
+
try {
|
|
181
|
+
const inStat: DaemonProcessState = await DaemonUtil.stat(s3Cache, path);
|
|
182
|
+
inStat.error = error;
|
|
183
|
+
inStat.completedEpochMS = new Date().getTime();
|
|
184
|
+
return DaemonUtil.writeState(s3Cache, path, inStat, DaemonUtil.DEFAULT_CONTENT);
|
|
185
|
+
} catch (err) {
|
|
186
|
+
Logger.error('Error while trying to write a daemon error: %j %s', path, err);
|
|
187
|
+
throw err;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
public static async finalize(s3Cache: S3CacheRatchetLike, path: string, contents: Buffer): Promise<DaemonProcessState> {
|
|
192
|
+
try {
|
|
193
|
+
Logger.info('Finalizing daemon %s with %d bytes', path, contents.length);
|
|
194
|
+
const inStat: DaemonProcessState = await DaemonUtil.stat(s3Cache, path);
|
|
195
|
+
inStat.completedEpochMS = new Date().getTime();
|
|
196
|
+
inStat.lastUpdatedMessage = 'Complete';
|
|
197
|
+
|
|
198
|
+
return DaemonUtil.writeState(s3Cache, path, inStat, contents);
|
|
199
|
+
} catch (err) {
|
|
200
|
+
Logger.error('Error while trying to finalize a daemon: %j %s', path, err);
|
|
201
|
+
throw err;
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|