@bitblit/ratchet-aws 6.0.146-alpha → 6.0.148-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -4
- package/src/batch/aws-batch-background-processor.spec.ts +22 -0
- package/src/batch/aws-batch-background-processor.ts +71 -0
- package/src/batch/aws-batch-ratchet.spec.ts +42 -0
- package/src/batch/aws-batch-ratchet.ts +70 -0
- package/src/build/ratchet-aws-info.ts +19 -0
- package/src/cache/memory-storage-provider.ts +39 -0
- package/src/cache/simple-cache-object-wrapper.ts +11 -0
- package/src/cache/simple-cache-read-options.ts +9 -0
- package/src/cache/simple-cache-storage-provider.ts +15 -0
- package/src/cache/simple-cache.spec.ts +42 -0
- package/src/cache/simple-cache.ts +81 -0
- package/src/cloudwatch/cloud-watch-log-group-ratchet.spec.ts +26 -0
- package/src/cloudwatch/cloud-watch-log-group-ratchet.ts +105 -0
- package/src/cloudwatch/cloud-watch-logs-ratchet.spec.ts +123 -0
- package/src/cloudwatch/cloud-watch-logs-ratchet.ts +232 -0
- package/src/cloudwatch/cloud-watch-metrics-ratchet.spec.ts +30 -0
- package/src/cloudwatch/cloud-watch-metrics-ratchet.ts +98 -0
- package/src/dao/example-prototype-dao-item.ts +8 -0
- package/src/dao/memory-prototype-dao-provider.ts +16 -0
- package/src/dao/prototype-dao-config.ts +8 -0
- package/src/dao/prototype-dao-db.ts +4 -0
- package/src/dao/prototype-dao-provider.ts +6 -0
- package/src/dao/prototype-dao.spec.ts +33 -0
- package/src/dao/prototype-dao.ts +110 -0
- package/src/dao/s3-simple-dao.ts +96 -0
- package/src/dao/simple-dao-item.ts +13 -0
- package/src/dynamodb/dynamo-ratchet-like.ts +61 -0
- package/src/dynamodb/dynamo-ratchet.spec.ts +206 -0
- package/src/dynamodb/dynamo-ratchet.ts +850 -0
- package/src/dynamodb/dynamo-table-ratchet.spec.ts +23 -0
- package/src/dynamodb/dynamo-table-ratchet.ts +189 -0
- package/src/dynamodb/hash-spreader.spec.ts +22 -0
- package/src/dynamodb/hash-spreader.ts +89 -0
- package/src/dynamodb/impl/dynamo-db-storage-provider.spec.ts +60 -0
- package/src/dynamodb/impl/dynamo-db-storage-provider.ts +140 -0
- package/src/dynamodb/impl/dynamo-db-sync-lock.spec.ts +41 -0
- package/src/dynamodb/impl/dynamo-db-sync-lock.ts +78 -0
- package/src/dynamodb/impl/dynamo-expiring-code-provider.ts +31 -0
- package/src/dynamodb/impl/dynamo-runtime-parameter-provider.spec.ts +65 -0
- package/src/dynamodb/impl/dynamo-runtime-parameter-provider.ts +44 -0
- package/src/ec2/ec2-ratchet.spec.ts +45 -0
- package/src/ec2/ec2-ratchet.ts +169 -0
- package/src/ecr/ecr-unused-image-cleaner-options.ts +9 -0
- package/src/ecr/ecr-unused-image-cleaner-output.ts +8 -0
- package/src/ecr/ecr-unused-image-cleaner-repository-output.ts +10 -0
- package/src/ecr/ecr-unused-image-cleaner.spec.ts +40 -0
- package/src/ecr/ecr-unused-image-cleaner.ts +183 -0
- package/src/ecr/retained-image-descriptor.ts +7 -0
- package/src/ecr/retained-image-reason.ts +4 -0
- package/src/ecr/used-image-finder.ts +6 -0
- package/src/ecr/used-image-finders/aws-batch-used-image-finder.ts +40 -0
- package/src/ecr/used-image-finders/lambda-used-image-finder.ts +51 -0
- package/src/environment/cascade-environment-service-provider.ts +28 -0
- package/src/environment/env-var-environment-service-provider.ts +36 -0
- package/src/environment/environment-service-config.ts +7 -0
- package/src/environment/environment-service-provider.ts +7 -0
- package/src/environment/environment-service.spec.ts +41 -0
- package/src/environment/environment-service.ts +89 -0
- package/src/environment/fixed-environment-service-provider.ts +26 -0
- package/src/environment/ssm-environment-service-provider.spec.ts +18 -0
- package/src/environment/ssm-environment-service-provider.ts +71 -0
- package/src/expiring-code/expiring-code-params.ts +7 -0
- package/src/expiring-code/expiring-code-provider.ts +6 -0
- package/src/expiring-code/expiring-code-ratchet.spec.ts +10 -0
- package/src/expiring-code/expiring-code-ratchet.ts +44 -0
- package/src/expiring-code/expiring-code.ts +6 -0
- package/src/iam/aws-credentials-ratchet.ts +25 -0
- package/src/lambda/lambda-event-detector.ts +55 -0
- package/src/lambda/lambda-event-type-guards.ts +38 -0
- package/src/model/cloud-watch-metrics-minute-level-dynamo-count-request.ts +18 -0
- package/src/model/dynamo-count-result.ts +8 -0
- package/src/route53/route-53-ratchet.ts +77 -0
- package/src/runtime-parameter/cached-stored-runtime-parameter.ts +5 -0
- package/src/runtime-parameter/global-variable-override-runtime-parameter-provider.spec.ts +41 -0
- package/src/runtime-parameter/global-variable-override-runtime-parameter-provider.ts +82 -0
- package/src/runtime-parameter/memory-runtime-parameter-provider.ts +42 -0
- package/src/runtime-parameter/runtime-parameter-provider.ts +12 -0
- package/src/runtime-parameter/runtime-parameter-ratchet.spec.ts +53 -0
- package/src/runtime-parameter/runtime-parameter-ratchet.ts +84 -0
- package/src/runtime-parameter/stored-runtime-parameter.ts +6 -0
- package/src/s3/expanded-file-children.ts +5 -0
- package/src/s3/impl/s3-environment-service-provider.ts +41 -0
- package/src/s3/impl/s3-expiring-code-provider.spec.ts +63 -0
- package/src/s3/impl/s3-expiring-code-provider.ts +71 -0
- package/src/s3/impl/s3-prototype-dao-provider.spec.ts +45 -0
- package/src/s3/impl/s3-prototype-dao-provider.ts +37 -0
- package/src/s3/impl/s3-remote-file-tracking-provider-options.ts +6 -0
- package/src/s3/impl/s3-remote-file-tracking-provider.spec.ts +67 -0
- package/src/s3/impl/s3-remote-file-tracking-provider.ts +157 -0
- package/src/s3/impl/s3-storage-provider.spec.ts +32 -0
- package/src/s3/impl/s3-storage-provider.ts +60 -0
- package/src/s3/s3-cache-ratchet-like.ts +64 -0
- package/src/s3/s3-cache-ratchet.spec.ts +150 -0
- package/src/s3/s3-cache-ratchet.ts +476 -0
- package/src/s3/s3-location-sync-ratchet.ts +207 -0
- package/src/s3/s3-ratchet.spec.ts +26 -0
- package/src/s3/s3-ratchet.ts +26 -0
- package/src/ses/ses-mail-sending-provider.ts +85 -0
- package/src/sns/sns-ratchet.spec.ts +24 -0
- package/src/sns/sns-ratchet.ts +52 -0
- package/src/sync-lock/memory-sync-lock.ts +48 -0
- package/src/sync-lock/sync-lock-provider.ts +5 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
|
|
2
|
+
import { PutObjectOutput } from '@aws-sdk/client-s3';
|
|
3
|
+
import { PrototypeDaoProvider } from '../../dao/prototype-dao-provider.js';
|
|
4
|
+
import { PrototypeDaoDb } from '../../dao/prototype-dao-db.js';
|
|
5
|
+
import { S3CacheRatchetLike } from '../s3-cache-ratchet-like.js';
|
|
6
|
+
|
|
7
|
+
/* An implementation that puts all the values in a single JSON file in S3
|
|
8
|
+
This won't scale well at all for any kind of serious load, but is the easiest
|
|
9
|
+
solution for a very low traffic website since it doesn't require setting up tables,
|
|
10
|
+
provisioning, etc
|
|
11
|
+
*/
|
|
12
|
+
export class S3PrototypeDaoProvider<T> implements PrototypeDaoProvider<T> {
|
|
13
|
+
constructor(
|
|
14
|
+
private s3CacheRatchet: S3CacheRatchetLike,
|
|
15
|
+
private keyName: string,
|
|
16
|
+
) {
|
|
17
|
+
RequireRatchet.notNullOrUndefined(s3CacheRatchet, 's3CacheRatchet');
|
|
18
|
+
RequireRatchet.notNullUndefinedOrOnlyWhitespaceString(s3CacheRatchet.getDefaultBucket(), 's3CacheRatchet.defaultBucket');
|
|
19
|
+
RequireRatchet.notNullUndefinedOrOnlyWhitespaceString(keyName, 'keyName');
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
public async storeDatabase(inDb: PrototypeDaoDb<T>): Promise<boolean> {
|
|
23
|
+
const toSave: PrototypeDaoDb<T> = inDb || { items: [], lastModifiedEpochMS: null };
|
|
24
|
+
toSave.lastModifiedEpochMS = Date.now();
|
|
25
|
+
const put: PutObjectOutput = await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName, toSave);
|
|
26
|
+
const rval: boolean = !!put;
|
|
27
|
+
return rval;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
public async loadDatabase(): Promise<PrototypeDaoDb<T>> {
|
|
31
|
+
const rval: PrototypeDaoDb<T> = (await this.s3CacheRatchet.fetchCacheFileAsObject<PrototypeDaoDb<any>>(this.keyName)) || {
|
|
32
|
+
items: [],
|
|
33
|
+
lastModifiedEpochMS: Date.now(),
|
|
34
|
+
};
|
|
35
|
+
return rval;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { beforeEach, describe, expect, test } from 'vitest';
|
|
2
|
+
import { S3RemoteFileTrackingProvider } from './s3-remote-file-tracking-provider.js';
|
|
3
|
+
import { mock, MockProxy } from 'vitest-mock-extended';
|
|
4
|
+
import { S3CacheRatchetLike } from '../s3-cache-ratchet-like.js';
|
|
5
|
+
import { S3CacheRatchet } from '../s3-cache-ratchet.js';
|
|
6
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
7
|
+
import { S3RemoteFileTrackingProviderOptions } from './s3-remote-file-tracking-provider-options.js';
|
|
8
|
+
import { AwsCredentialsRatchet } from '../../iam/aws-credentials-ratchet.js';
|
|
9
|
+
import { RemoteFileTracker } from '@bitblit/ratchet-common/network/remote-file-tracker/remote-file-tracker';
|
|
10
|
+
import {
|
|
11
|
+
RemoteStatusDataAndContent
|
|
12
|
+
} from "@bitblit/ratchet-common/lib/network/remote-file-tracker/remote-status-data-and-content";
|
|
13
|
+
|
|
14
|
+
let mockS3Ratchet: MockProxy<S3CacheRatchetLike>;
|
|
15
|
+
|
|
16
|
+
describe('#S3RemoteFileTrackingProvider', () => {
|
|
17
|
+
beforeEach(() => {
|
|
18
|
+
mockS3Ratchet = mock<S3CacheRatchetLike>();
|
|
19
|
+
mockS3Ratchet.getDefaultBucket.mockReturnValue('TEST-BUCKET');
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
test.skip('Should save/load files', async () => {
|
|
23
|
+
AwsCredentialsRatchet.applySetProfileEnvironmentalVariable('erigir');
|
|
24
|
+
const ratchet: S3CacheRatchetLike = new S3CacheRatchet(new S3Client({ region: 'us-east-1' }), 'erigir-backup');
|
|
25
|
+
// setup initial state
|
|
26
|
+
await ratchet.writeStringToCacheFile('test.txt', 'This is a test ' + new Date().toISOString());
|
|
27
|
+
|
|
28
|
+
const testOpts: S3RemoteFileTrackingProviderOptions = {
|
|
29
|
+
//s3CacheRatchet: mockS3Ratchet
|
|
30
|
+
s3CacheRatchet: ratchet,
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
const svc: S3RemoteFileTrackingProvider = new S3RemoteFileTrackingProvider(testOpts);
|
|
34
|
+
const obj = new RemoteFileTracker<string>({
|
|
35
|
+
key: 'test.txt',
|
|
36
|
+
provider: svc,
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
expect(obj.remoteStatusData).toBeNull;
|
|
40
|
+
await obj.sync();
|
|
41
|
+
expect(obj.remoteStatusData).not.toBeNull;
|
|
42
|
+
|
|
43
|
+
const changed1: boolean = await obj.modifiedSinceLastSync();
|
|
44
|
+
expect(changed1).toEqual(false);
|
|
45
|
+
|
|
46
|
+
await ratchet.writeStringToCacheFile('test.txt', 'This is a test ' + new Date().toISOString());
|
|
47
|
+
|
|
48
|
+
const changed2: boolean = await obj.modifiedSinceLastSync();
|
|
49
|
+
expect(changed2).toEqual(true);
|
|
50
|
+
|
|
51
|
+
const raw: RemoteStatusDataAndContent<string> = await obj.pullRemoteData();
|
|
52
|
+
expect(raw).not.toBeNull;
|
|
53
|
+
const data: string = await RemoteFileTracker.dataAsString(raw);
|
|
54
|
+
expect(data).not.toBeNull;
|
|
55
|
+
|
|
56
|
+
expect(data.startsWith('This is a test')).toBeTruthy;
|
|
57
|
+
|
|
58
|
+
const pushRes: any = await obj.pushStringToRemote('Local-Test', { force: false, backup: true });
|
|
59
|
+
expect(pushRes).not.toBeNull;
|
|
60
|
+
|
|
61
|
+
const raw2: RemoteStatusDataAndContent<string> = await obj.pullRemoteData();
|
|
62
|
+
expect(raw2).not.toBeNull;
|
|
63
|
+
const data2: string = await RemoteFileTracker.dataAsString(raw2);
|
|
64
|
+
expect(data2).not.toBeNull;
|
|
65
|
+
expect(data2).toEqual('Local-Test');
|
|
66
|
+
}, 300_000);
|
|
67
|
+
});
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
|
|
2
|
+
import { Logger } from '@bitblit/ratchet-common/logger/logger';
|
|
3
|
+
import { StopWatch } from '@bitblit/ratchet-common/lang/stop-watch';
|
|
4
|
+
import {
|
|
5
|
+
CompleteMultipartUploadCommandOutput,
|
|
6
|
+
GetObjectCommandInput,
|
|
7
|
+
GetObjectCommandOutput,
|
|
8
|
+
HeadObjectCommandOutput,
|
|
9
|
+
} from '@aws-sdk/client-s3';
|
|
10
|
+
import { DateTime } from 'luxon';
|
|
11
|
+
import { RemoteFileTrackingProvider } from '@bitblit/ratchet-common/network/remote-file-tracker/remote-file-tracking-provider';
|
|
12
|
+
import { StringRatchet } from '@bitblit/ratchet-common/lang/string-ratchet';
|
|
13
|
+
import { RemoteStatusData } from '@bitblit/ratchet-common/network/remote-file-tracker/remote-status-data';
|
|
14
|
+
import { S3RemoteFileTrackingProviderOptions } from './s3-remote-file-tracking-provider-options.js';
|
|
15
|
+
import { RemoteStatusDataAndContent } from '@bitblit/ratchet-common/network/remote-file-tracker/remote-status-data-and-content';
|
|
16
|
+
import { FileTransferResult } from '@bitblit/ratchet-common/network/remote-file-tracker/file-transfer-result';
|
|
17
|
+
import { BackupResult } from '@bitblit/ratchet-common/network/remote-file-tracker/backup-result';
|
|
18
|
+
import { RemoteFileTrackerPushOptions } from '@bitblit/ratchet-common/network/remote-file-tracker/remote-file-tracker-push-options';
|
|
19
|
+
import { RemoteFileTracker } from '@bitblit/ratchet-common/network/remote-file-tracker/remote-file-tracker';
|
|
20
|
+
import { ErrorRatchet } from '@bitblit/ratchet-common/lang/error-ratchet';
|
|
21
|
+
import { FileTransferResultType } from '@bitblit/ratchet-common/network/remote-file-tracker/file-transfer-result-type';
|
|
22
|
+
|
|
23
|
+
// Keeps a local file up-to-date with a file on S3
|
|
24
|
+
export class S3RemoteFileTrackingProvider implements RemoteFileTrackingProvider<string> {
|
|
25
|
+
constructor(private opts: S3RemoteFileTrackingProviderOptions) {
|
|
26
|
+
RequireRatchet.notNullOrUndefined(opts, 'opts');
|
|
27
|
+
RequireRatchet.notNullOrUndefined(opts.s3CacheRatchet, 'opts.s3CacheRatchet');
|
|
28
|
+
RequireRatchet.notNullOrUndefined(opts.s3CacheRatchet.getDefaultBucket(), 'opts.s3CacheRatchet must have default bucket set');
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
public async readRemoteStatus(key: string): Promise<RemoteStatusData<string>> {
|
|
32
|
+
let rval: RemoteStatusData<string> = null;
|
|
33
|
+
if (StringRatchet.trimToNull(key)) {
|
|
34
|
+
const meta: HeadObjectCommandOutput = await this.opts.s3CacheRatchet.fetchMetaForCacheFile(key);
|
|
35
|
+
if (meta) {
|
|
36
|
+
rval = {
|
|
37
|
+
key: key,
|
|
38
|
+
statusTakenEpochMs: Date.now(),
|
|
39
|
+
remoteSizeInBytes: meta.ContentLength,
|
|
40
|
+
remoteLastUpdatedEpochMs: meta.LastModified.getTime(),
|
|
41
|
+
remoteHash: meta.ETag,
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return rval;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
public async pullRemoteData(key: string, ifNewerThan?: RemoteStatusData<string>): Promise<RemoteStatusDataAndContent<string>> {
|
|
49
|
+
let rval: RemoteStatusDataAndContent<string> = null;
|
|
50
|
+
const sw: StopWatch = new StopWatch();
|
|
51
|
+
|
|
52
|
+
const req: GetObjectCommandInput = {
|
|
53
|
+
Bucket: this.opts.s3CacheRatchet.getDefaultBucket(),
|
|
54
|
+
Key: key,
|
|
55
|
+
IfModifiedSince: ifNewerThan?.remoteLastUpdatedEpochMs ? new Date(ifNewerThan.remoteLastUpdatedEpochMs) : null,
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
const output: GetObjectCommandOutput = await this.opts.s3CacheRatchet.fetchCacheFilePassThru(req);
|
|
59
|
+
if (output) {
|
|
60
|
+
rval = {
|
|
61
|
+
status: {
|
|
62
|
+
key: key,
|
|
63
|
+
statusTakenEpochMs: Date.now(),
|
|
64
|
+
remoteSizeInBytes: output.ContentLength,
|
|
65
|
+
remoteLastUpdatedEpochMs: output.LastModified.getTime(),
|
|
66
|
+
remoteHash: output.ETag,
|
|
67
|
+
},
|
|
68
|
+
content: output.Body.transformToWebStream(),
|
|
69
|
+
};
|
|
70
|
+
Logger.info('Fetched remote to local, %d bytes in %s : %s', output.ContentLength, sw.dump(), key);
|
|
71
|
+
} else {
|
|
72
|
+
Logger.info('Did not pull %s - not modified', key);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
return rval;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
public async sendDataToRemote(
|
|
79
|
+
src: ReadableStream,
|
|
80
|
+
key: string,
|
|
81
|
+
opts: RemoteFileTrackerPushOptions,
|
|
82
|
+
checkStatus: RemoteStatusData<string>,
|
|
83
|
+
): Promise<FileTransferResult> {
|
|
84
|
+
RequireRatchet.notNullOrUndefined(src, 'src');
|
|
85
|
+
RequireRatchet.notNullOrUndefined(key, 'key');
|
|
86
|
+
RequireRatchet.notNullOrUndefined(opts, 'opts');
|
|
87
|
+
const rval: FileTransferResult = {
|
|
88
|
+
type: null,
|
|
89
|
+
error: null,
|
|
90
|
+
bytesTransferred: null,
|
|
91
|
+
backupResult: BackupResult.NotRequested,
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
const sw: StopWatch = new StopWatch();
|
|
95
|
+
Logger.info('Sending local data to remote : %s : %j : %j', key, opts, checkStatus);
|
|
96
|
+
|
|
97
|
+
// Check it was not modified
|
|
98
|
+
// TODO: should be able to wrap this into the PUT request probably?
|
|
99
|
+
if (!opts.force && checkStatus?.remoteLastUpdatedEpochMs) {
|
|
100
|
+
sw.start('statusCheck');
|
|
101
|
+
const current: RemoteStatusData<string> = await this.readRemoteStatus(key);
|
|
102
|
+
if (!RemoteFileTracker.statusMatch(checkStatus, current)) {
|
|
103
|
+
rval.type = FileTransferResultType.Error;
|
|
104
|
+
rval.error =
|
|
105
|
+
'CheckStatus did not match, was ' +
|
|
106
|
+
JSON.stringify(checkStatus) +
|
|
107
|
+
' but current is ' +
|
|
108
|
+
JSON.stringify(current) +
|
|
109
|
+
' and force not specified';
|
|
110
|
+
rval.bytesTransferred = 0;
|
|
111
|
+
return rval; // TODO: Refactor for single exit point
|
|
112
|
+
}
|
|
113
|
+
sw.stop('statusCheck');
|
|
114
|
+
Logger.info('Performed status check in %s', sw.dump('statusCheck'));
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Backup if requested
|
|
118
|
+
if (opts.backup) {
|
|
119
|
+
sw.start('backup');
|
|
120
|
+
rval.backupResult = await this.backupRemote(key);
|
|
121
|
+
sw.stop('backup');
|
|
122
|
+
Logger.info('Performed backup in %s', sw.dump('backup'));
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Write the file
|
|
126
|
+
try {
|
|
127
|
+
sw.start('send');
|
|
128
|
+
const out: CompleteMultipartUploadCommandOutput = await this.opts.s3CacheRatchet.writeStreamToCacheFile(key, src);
|
|
129
|
+
sw.stop('send');
|
|
130
|
+
Logger.info('Sent to remote in %s : %j', sw.dump('send'), out);
|
|
131
|
+
} catch (err) {
|
|
132
|
+
Logger.error('Failed to write %s - %s', key, err, err);
|
|
133
|
+
rval.type = FileTransferResultType.Error;
|
|
134
|
+
rval.error = ErrorRatchet.safeStringifyErr(err);
|
|
135
|
+
rval.bytesTransferred = 0;
|
|
136
|
+
}
|
|
137
|
+
Logger.info('Overall timing : %s', sw.dump());
|
|
138
|
+
return rval;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
public async backupRemote(key: string): Promise<BackupResult> {
|
|
142
|
+
let rval: BackupResult = null;
|
|
143
|
+
try {
|
|
144
|
+
const lastSlash: number = key.lastIndexOf('/');
|
|
145
|
+
const datePart: string = '/backup/' + DateTime.now().toFormat('yyyy/MM/dd/HH/mm/ss') + '/';
|
|
146
|
+
const newPath: string = lastSlash > -1 ? key.substring(0, lastSlash) + datePart + key.substring(lastSlash + 1) : datePart + key;
|
|
147
|
+
|
|
148
|
+
Logger.info('Backing up path %s to %s', key, newPath);
|
|
149
|
+
await this.opts.s3CacheRatchet.copyFile(key, newPath);
|
|
150
|
+
rval = BackupResult.Success;
|
|
151
|
+
} catch (err) {
|
|
152
|
+
Logger.error('Failed to backup %s : %s', key, err, err);
|
|
153
|
+
rval = BackupResult.Error;
|
|
154
|
+
}
|
|
155
|
+
return rval;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { S3StorageProvider } from './s3-storage-provider.js';
|
|
2
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
3
|
+
import { describe, expect, test } from 'vitest';
|
|
4
|
+
import { S3CacheRatchet } from '../s3-cache-ratchet.js';
|
|
5
|
+
import { SimpleCache } from '../../cache/simple-cache.js';
|
|
6
|
+
import { SimpleCacheObjectWrapper } from '../../cache/simple-cache-object-wrapper.js';
|
|
7
|
+
|
|
8
|
+
describe('#S3StorageProvider', function () {
|
|
9
|
+
test.skip('should read/write/delete with an S3 handler', async () => {
|
|
10
|
+
const s3: S3Client = new S3Client({ region: 'us-east-1' });
|
|
11
|
+
const cache: S3CacheRatchet = new S3CacheRatchet(s3, 'test-bucket');
|
|
12
|
+
const s3StorageProvider: S3StorageProvider = new S3StorageProvider(cache, 'test-cache');
|
|
13
|
+
|
|
14
|
+
const simpleCache: SimpleCache = new SimpleCache(s3StorageProvider, 2000000);
|
|
15
|
+
|
|
16
|
+
await simpleCache.removeFromCache('test1'); // Make sure clear
|
|
17
|
+
|
|
18
|
+
const test1a: SimpleCacheObjectWrapper<any> = await simpleCache.fetchWrapper<any>('test1', () => Promise.resolve({ x: 1 }));
|
|
19
|
+
expect(test1a).not.toBeNull();
|
|
20
|
+
expect(test1a.generated).toBeTruthy();
|
|
21
|
+
expect(test1a.value).not.toBeNull();
|
|
22
|
+
expect(test1a.value['x']).toEqual(1);
|
|
23
|
+
|
|
24
|
+
const test1b: SimpleCacheObjectWrapper<any> = await simpleCache.fetchWrapper<any>('test1', () => Promise.resolve({ x: 1 }));
|
|
25
|
+
expect(test1b).not.toBeNull();
|
|
26
|
+
expect(test1b.generated).toBeFalsy();
|
|
27
|
+
expect(test1b.value).not.toBeNull();
|
|
28
|
+
expect(test1b.value['x']).toEqual(1);
|
|
29
|
+
|
|
30
|
+
await simpleCache.removeFromCache('test1'); // Make sure clear
|
|
31
|
+
}, 60_000);
|
|
32
|
+
});
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Objects implementing this interface can store and retrieve objects in a cache, using a read-thru
|
|
3
|
+
approach.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { SimpleCacheObjectWrapper } from '../../cache/simple-cache-object-wrapper.js';
|
|
7
|
+
import { SimpleCacheStorageProvider } from '../../cache/simple-cache-storage-provider.js';
|
|
8
|
+
import { PutObjectOutput } from '@aws-sdk/client-s3';
|
|
9
|
+
import { S3CacheRatchetLike } from '../s3-cache-ratchet-like.js';
|
|
10
|
+
import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
|
|
11
|
+
import { StringRatchet } from '@bitblit/ratchet-common/lang/string-ratchet';
|
|
12
|
+
|
|
13
|
+
export class S3StorageProvider implements SimpleCacheStorageProvider {
|
|
14
|
+
constructor(
|
|
15
|
+
private s3CacheRatchet: S3CacheRatchetLike,
|
|
16
|
+
private prefix: string,
|
|
17
|
+
) {
|
|
18
|
+
RequireRatchet.notNullOrUndefined(this.s3CacheRatchet, 's3CacheRatchet');
|
|
19
|
+
RequireRatchet.notNullOrUndefined(this.s3CacheRatchet.getDefaultBucket(), 's3CacheRatchet.defaultBucket');
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
public keyToPath(cacheKey: string): string {
|
|
23
|
+
let rval: string = StringRatchet.trimToEmpty(this.prefix);
|
|
24
|
+
if (rval.length > 0 && !rval.endsWith('/')) {
|
|
25
|
+
rval += '/';
|
|
26
|
+
}
|
|
27
|
+
rval += cacheKey;
|
|
28
|
+
return rval;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
public async readFromCache<T>(cacheKey: string): Promise<SimpleCacheObjectWrapper<T>> {
|
|
32
|
+
const rval: SimpleCacheObjectWrapper<T> = await this.s3CacheRatchet.fetchCacheFileAsObject<SimpleCacheObjectWrapper<T>>(
|
|
33
|
+
this.keyToPath(cacheKey),
|
|
34
|
+
);
|
|
35
|
+
return rval;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
public async storeInCache<T>(value: SimpleCacheObjectWrapper<T>): Promise<boolean> {
|
|
39
|
+
RequireRatchet.notNullOrUndefined(value, 'value');
|
|
40
|
+
RequireRatchet.notNullOrUndefined(value.cacheKey, 'value.cacheKey');
|
|
41
|
+
const tmp: PutObjectOutput = await this.s3CacheRatchet.writeObjectToCacheFile(this.keyToPath(value.cacheKey), value);
|
|
42
|
+
return !!tmp;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
public async removeFromCache(cacheKey: string): Promise<void> {
|
|
46
|
+
await this.s3CacheRatchet.removeCacheFile(this.keyToPath(cacheKey));
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
public async clearCache(): Promise<number> {
|
|
50
|
+
const keys: string[] = await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(''));
|
|
51
|
+
const _removed: any[] = await Promise.all(keys.map((k) => this.removeFromCache(k)));
|
|
52
|
+
return keys.length;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
public async readAll(): Promise<SimpleCacheObjectWrapper<any>[]> {
|
|
56
|
+
const keys: string[] = await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(''));
|
|
57
|
+
const rval: SimpleCacheObjectWrapper<any>[] = await Promise.all(keys.map((k) => this.readFromCache(k)));
|
|
58
|
+
return rval;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Objects implementing S3CacheRatchetLike wrap S3 with an ability to store and retrieve objects cached as json files
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { Readable } from 'stream';
|
|
6
|
+
import {
|
|
7
|
+
CompleteMultipartUploadCommandOutput,
|
|
8
|
+
CopyObjectCommandOutput,
|
|
9
|
+
DeleteObjectCommandOutput,
|
|
10
|
+
GetObjectCommandInput,
|
|
11
|
+
GetObjectCommandOutput,
|
|
12
|
+
HeadObjectCommandOutput,
|
|
13
|
+
PutObjectCommandInput,
|
|
14
|
+
S3Client,
|
|
15
|
+
} from '@aws-sdk/client-s3';
|
|
16
|
+
|
|
17
|
+
export interface S3CacheRatchetLike {
|
|
18
|
+
getDefaultBucket(): string;
|
|
19
|
+
getS3Client(): S3Client;
|
|
20
|
+
fileExists(key: string, bucket?: string): Promise<boolean>;
|
|
21
|
+
|
|
22
|
+
fetchCacheFilePassThru(req: GetObjectCommandInput): Promise<GetObjectCommandOutput>;
|
|
23
|
+
|
|
24
|
+
fetchCacheFileAsS3GetObjectCommandOutput(key: string, bucket?: string): Promise<GetObjectCommandOutput>;
|
|
25
|
+
|
|
26
|
+
fetchCacheFileAsReadableStream(key: string, bucket?: string): Promise<ReadableStream>;
|
|
27
|
+
|
|
28
|
+
fetchCacheFileAsBuffer(key: string, bucket?: string): Promise<Buffer>;
|
|
29
|
+
|
|
30
|
+
fetchCacheFileAsString(key: string, bucket?: string): Promise<string>;
|
|
31
|
+
|
|
32
|
+
fetchCacheFileAsObject<T>(key: string, bucket?: string): Promise<T>;
|
|
33
|
+
|
|
34
|
+
removeCacheFile(key: string, bucket?: string): Promise<DeleteObjectCommandOutput>;
|
|
35
|
+
writeObjectToCacheFile(
|
|
36
|
+
key: string,
|
|
37
|
+
dataObject: any,
|
|
38
|
+
template?: PutObjectCommandInput,
|
|
39
|
+
bucket?: string,
|
|
40
|
+
): Promise<CompleteMultipartUploadCommandOutput>;
|
|
41
|
+
|
|
42
|
+
writeStringToCacheFile(
|
|
43
|
+
key: string,
|
|
44
|
+
dataString: string,
|
|
45
|
+
template?: PutObjectCommandInput,
|
|
46
|
+
bucket?: string,
|
|
47
|
+
): Promise<CompleteMultipartUploadCommandOutput>;
|
|
48
|
+
|
|
49
|
+
writeStreamToCacheFile(
|
|
50
|
+
key: string,
|
|
51
|
+
data: ReadableStream | Readable,
|
|
52
|
+
template?: PutObjectCommandInput,
|
|
53
|
+
bucket?: string,
|
|
54
|
+
): Promise<CompleteMultipartUploadCommandOutput>;
|
|
55
|
+
|
|
56
|
+
synchronize(srcPrefix: string, targetPrefix: string, targetRatchet?: S3CacheRatchetLike, recurseSubFolders?: boolean): Promise<string[]>;
|
|
57
|
+
preSignedDownloadUrlForCacheFile(key: string, expirationSeconds?: number, bucket?: string): Promise<string>;
|
|
58
|
+
fetchMetaForCacheFile(key: string, bucket?: string): Promise<HeadObjectCommandOutput>;
|
|
59
|
+
cacheFileAgeInSeconds(key: string, bucket?: string): Promise<number>;
|
|
60
|
+
copyFile(srcKey: string, dstKey: string, srcBucket?: string, dstBucket?: string): Promise<CopyObjectCommandOutput>;
|
|
61
|
+
quietCopyFile(srcKey: string, dstKey: string, srcBucket?: string, dstBucket?: string): Promise<boolean>;
|
|
62
|
+
directChildrenOfPrefix(prefix: string, expandFiles?: boolean, bucket?: string, maxToReturn?: number): Promise<string[]>;
|
|
63
|
+
allSubFoldersOfPrefix(prefix: string, bucket?: string): Promise<string[]>;
|
|
64
|
+
}
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import {
|
|
2
|
+
CopyObjectCommand,
|
|
3
|
+
CreateMultipartUploadCommand,
|
|
4
|
+
GetObjectCommand,
|
|
5
|
+
HeadObjectCommand,
|
|
6
|
+
PutObjectCommand,
|
|
7
|
+
PutObjectCommandInput,
|
|
8
|
+
PutObjectCommandOutput,
|
|
9
|
+
S3Client,
|
|
10
|
+
UploadPartCommand,
|
|
11
|
+
} from '@aws-sdk/client-s3';
|
|
12
|
+
import { S3CacheRatchet } from './s3-cache-ratchet.js';
|
|
13
|
+
import { Logger } from '@bitblit/ratchet-common/logger/logger';
|
|
14
|
+
import { StringRatchet } from '@bitblit/ratchet-common/lang/string-ratchet';
|
|
15
|
+
import { WebStreamRatchet } from '@bitblit/ratchet-common/lang/web-stream-ratchet';
|
|
16
|
+
import { mockClient } from 'aws-sdk-client-mock';
|
|
17
|
+
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
|
18
|
+
|
|
19
|
+
vi.mock('@aws-sdk/s3-request-presigner', async (importOriginal) => {
|
|
20
|
+
const mod = await importOriginal<typeof import('@aws-sdk/s3-request-presigner')>();
|
|
21
|
+
return {
|
|
22
|
+
...mod,
|
|
23
|
+
getSignedUrl: vi.fn(() => Promise.resolve('https://test.link/test.jpg')),
|
|
24
|
+
};
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
let mockS3 = null;
|
|
28
|
+
let mockS3OtherAccount = null;
|
|
29
|
+
|
|
30
|
+
describe('#fileExists', function () {
|
|
31
|
+
// I'd rather do this above but then typescript screams it does not implement S3Client
|
|
32
|
+
mockS3 = mockClient(S3Client);
|
|
33
|
+
mockS3OtherAccount = mockClient(S3Client);
|
|
34
|
+
|
|
35
|
+
beforeEach(() => {
|
|
36
|
+
mockS3.reset();
|
|
37
|
+
mockS3OtherAccount.reset();
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
test('should return false for files that do not exist', async () => {
|
|
41
|
+
mockS3.on(HeadObjectCommand).rejects({ statusCode: 404, $metadata: null });
|
|
42
|
+
const cache: S3CacheRatchet = new S3CacheRatchet(mockS3, 'test-bucket');
|
|
43
|
+
const out: boolean = await cache.fileExists('test-missing-file');
|
|
44
|
+
|
|
45
|
+
expect(out).toEqual(false);
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
// TODO: CAW 2023-02-28 : Make me work after the stub gets fixed for this
|
|
49
|
+
test.skip('should create a expiring link', async () => {
|
|
50
|
+
//mockS3.getSignedUrl.mockReturnValue('https://test.link/test.jpg');
|
|
51
|
+
|
|
52
|
+
const cache: S3CacheRatchet = new S3CacheRatchet(mockS3, 'test-bucket');
|
|
53
|
+
const out: string = await cache.preSignedDownloadUrlForCacheFile('test.jpg', 300);
|
|
54
|
+
|
|
55
|
+
expect(out).toEqual('https://test.link/test.jpg');
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
test('should copy an object', async () => {
|
|
59
|
+
mockS3.on(CopyObjectCommand).resolves({});
|
|
60
|
+
const cache: S3CacheRatchet = new S3CacheRatchet(mockS3, 'test-bucket');
|
|
61
|
+
const out: boolean = await cache.quietCopyFile('test.png', 'test2.png');
|
|
62
|
+
|
|
63
|
+
expect(out).toBeTruthy();
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
// TODO: CAW 2023-02-28 : Make me work after the stub gets fixed for this
|
|
67
|
+
test.skip('should copy a file to s3', async () => {
|
|
68
|
+
//mockS3['config'] = {};
|
|
69
|
+
//mockS3.reset();
|
|
70
|
+
// This mocks for uploading small files
|
|
71
|
+
mockS3.on(PutObjectCommand).resolves({});
|
|
72
|
+
// These mock for the multipart upload command
|
|
73
|
+
mockS3.on(CreateMultipartUploadCommand).resolves({ UploadId: '1' });
|
|
74
|
+
mockS3.on(UploadPartCommand).resolves({ ETag: '1' });
|
|
75
|
+
|
|
76
|
+
const cache: S3CacheRatchet = new S3CacheRatchet(mockS3, 'test-bucket');
|
|
77
|
+
const stream: ReadableStream = WebStreamRatchet.stringToWebReadableStream(StringRatchet.createRandomHexString(1024 * 1024 * 6));
|
|
78
|
+
|
|
79
|
+
const out: PutObjectCommandOutput = await cache.writeStreamToCacheFile('s3-cache-ratchet.spec.ts', stream, {
|
|
80
|
+
ContentType: 'text/typescript',
|
|
81
|
+
} as PutObjectCommandInput);
|
|
82
|
+
|
|
83
|
+
Logger.info('Calls: %j', mockS3.calls());
|
|
84
|
+
expect(out).toBeTruthy();
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
test('should pull a file as a string', async () => {
|
|
88
|
+
// Need to re-call this multiple times to get the stream reset
|
|
89
|
+
async function createNew(): Promise<any> {
|
|
90
|
+
return {
|
|
91
|
+
Body: {
|
|
92
|
+
transformToByteArray: async () =>
|
|
93
|
+
Promise.resolve(StringRatchet.stringToUint8Array(JSON.stringify({ test: StringRatchet.createRandomHexString(128) }))),
|
|
94
|
+
transformToString: async () => Promise.resolve(JSON.stringify({ test: StringRatchet.createRandomHexString(128) })),
|
|
95
|
+
},
|
|
96
|
+
$metadata: null,
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
mockS3.on(GetObjectCommand).resolves(createNew());
|
|
101
|
+
|
|
102
|
+
const cache: S3CacheRatchet = new S3CacheRatchet(mockS3, 'test-bucket');
|
|
103
|
+
const fileName: string = 'test-file.json';
|
|
104
|
+
|
|
105
|
+
const outBuf: Buffer = await cache.fetchCacheFileAsBuffer(fileName);
|
|
106
|
+
expect(outBuf).toBeTruthy();
|
|
107
|
+
expect(outBuf.length).toBeGreaterThan(100);
|
|
108
|
+
mockS3.reset();
|
|
109
|
+
mockS3.on(GetObjectCommand).resolves(createNew());
|
|
110
|
+
|
|
111
|
+
const outString: string = await cache.fetchCacheFileAsString(fileName);
|
|
112
|
+
expect(outString).toBeTruthy();
|
|
113
|
+
expect(outString.length).toBeGreaterThan(100);
|
|
114
|
+
mockS3.reset();
|
|
115
|
+
mockS3.on(GetObjectCommand).resolves(createNew());
|
|
116
|
+
|
|
117
|
+
const outObject: any = await cache.fetchCacheFileAsObject(fileName);
|
|
118
|
+
expect(outObject).toBeTruthy();
|
|
119
|
+
expect(outObject['test']).toBeTruthy();
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
//---
|
|
123
|
+
|
|
124
|
+
test.skip('should sync 2 folders', async () => {
|
|
125
|
+
const cache1: S3CacheRatchet = new S3CacheRatchet(mockS3, 'test1');
|
|
126
|
+
const cache2: S3CacheRatchet = new S3CacheRatchet(mockS3, 'test2');
|
|
127
|
+
const out: string[] = await cache1.synchronize('src/', 'dst/', cache2);
|
|
128
|
+
|
|
129
|
+
expect(out).not.toBeNull();
|
|
130
|
+
}, 60_000);
|
|
131
|
+
|
|
132
|
+
test.skip('should list direct children past 1000', async () => {
|
|
133
|
+
const s3: S3Client = new S3Client({ region: 'us-east-1' });
|
|
134
|
+
const cache: S3CacheRatchet = new S3CacheRatchet(s3, 'test-bucket');
|
|
135
|
+
const out: string[] = await cache.directChildrenOfPrefix('test/aws/test-path-with-lots-of-childen/');
|
|
136
|
+
expect(out).toBeTruthy();
|
|
137
|
+
expect(out.length).toBeGreaterThan(1000);
|
|
138
|
+
|
|
139
|
+
Logger.info('Got: %s', out);
|
|
140
|
+
expect(out).toBeTruthy();
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
test.skip('should sync cross-account', async () => {
|
|
144
|
+
const cache1: S3CacheRatchet = new S3CacheRatchet(mockS3, 'bucket1');
|
|
145
|
+
const cache2: S3CacheRatchet = new S3CacheRatchet(mockS3OtherAccount, 'bucket2');
|
|
146
|
+
|
|
147
|
+
const res: any = await cache1.synchronize('test1/', 'test2/', cache2, true);
|
|
148
|
+
expect(res).not.toBeNull();
|
|
149
|
+
}, 50_000);
|
|
150
|
+
});
|