@baadal-sdk/dapi 0.28.4 → 0.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.js +1 -1
- package/dist/cjs/index.js.map +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/types/aws/client.d.ts +13 -0
- package/dist/types/aws/client.d.ts.map +1 -0
- package/dist/types/aws/db.d.ts +261 -9
- package/dist/types/aws/db.d.ts.map +1 -1
- package/dist/types/aws/index.d.ts +10 -11
- package/dist/types/aws/index.d.ts.map +1 -1
- package/dist/types/aws/s3.d.ts +90 -0
- package/dist/types/aws/s3.d.ts.map +1 -0
- package/dist/types/common/const.d.ts +3 -0
- package/dist/types/common/const.d.ts.map +1 -0
- package/dist/types/fs/index.d.ts +29 -25
- package/dist/types/fs/index.d.ts.map +1 -1
- package/dist/types/index.d.ts +3 -1
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/utils/index.d.ts +6 -0
- package/dist/types/utils/index.d.ts.map +1 -0
- package/package.json +5 -1
- package/src/aws/client.ts +18 -0
- package/src/aws/db.ts +534 -105
- package/src/aws/index.ts +22 -8
- package/src/aws/s3.ts +476 -0
- package/src/common/const.ts +2 -0
- package/src/fs/index.ts +60 -54
- package/src/index.ts +3 -2
- package/src/utils/index.ts +39 -0
- package/dist/types/aws/db-client.d.ts +0 -7
- package/dist/types/aws/db-client.d.ts.map +0 -1
- package/src/aws/db-client.ts +0 -6
package/src/aws/index.ts
CHANGED
|
@@ -1,19 +1,33 @@
|
|
|
1
|
-
import * as
|
|
2
|
-
import
|
|
1
|
+
import * as db from './db';
|
|
2
|
+
import * as s3 from './s3';
|
|
3
|
+
import { warn, error } from '../common/logger';
|
|
3
4
|
|
|
4
|
-
|
|
5
|
+
// Ref: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html
|
|
6
|
+
// Types: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/modules/_aws_sdk_types.html
|
|
5
7
|
|
|
6
|
-
const init
|
|
8
|
+
const { init: dbInit, status: dbStatus } = db;
|
|
9
|
+
const { init: s3Init, status: s3Status } = s3;
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @deprecated explicit init deprecated!
|
|
13
|
+
*/
|
|
14
|
+
export const init = (region: string) => {
|
|
15
|
+
warn('[@baadal-sdk/dapi] aws explicit init deprecated!');
|
|
7
16
|
if (!region) {
|
|
8
17
|
error(`AWS initialization error! Missing region: ${region}`);
|
|
9
18
|
return false;
|
|
10
19
|
}
|
|
11
20
|
|
|
12
|
-
|
|
21
|
+
const cond1 = dbInit(region);
|
|
22
|
+
const cond2 = s3Init(region);
|
|
23
|
+
return cond1 && cond2;
|
|
13
24
|
};
|
|
14
25
|
|
|
15
|
-
const status = () => {
|
|
16
|
-
return
|
|
26
|
+
export const status = () => {
|
|
27
|
+
return {
|
|
28
|
+
db: dbStatus(),
|
|
29
|
+
s3: s3Status(),
|
|
30
|
+
};
|
|
17
31
|
};
|
|
18
32
|
|
|
19
|
-
export { db,
|
|
33
|
+
export { db, s3 }; // named exports
|
package/src/aws/s3.ts
ADDED
|
@@ -0,0 +1,476 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Examples:
|
|
3
|
+
* Ref: https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/s3-examples.html
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import path from 'path';
|
|
7
|
+
import { Readable } from 'stream';
|
|
8
|
+
import fs from 'fs';
|
|
9
|
+
import {
|
|
10
|
+
S3Client,
|
|
11
|
+
PutObjectCommand,
|
|
12
|
+
PutObjectCommandInput,
|
|
13
|
+
GetObjectCommand,
|
|
14
|
+
GetObjectCommandInput,
|
|
15
|
+
ListObjectsV2Command,
|
|
16
|
+
ListObjectsV2CommandInput,
|
|
17
|
+
HeadObjectCommand,
|
|
18
|
+
HeadObjectCommandInput,
|
|
19
|
+
DeleteObjectCommand,
|
|
20
|
+
DeleteObjectCommandInput,
|
|
21
|
+
DeleteObjectsCommand,
|
|
22
|
+
DeleteObjectsCommandInput,
|
|
23
|
+
} from '@aws-sdk/client-s3';
|
|
24
|
+
import short from 'short-uuid';
|
|
25
|
+
import mime from 'mime-types';
|
|
26
|
+
import { chunkifyArray } from '@baadal-sdk/utils';
|
|
27
|
+
|
|
28
|
+
import { StringIndexable } from 'src/common/common.model';
|
|
29
|
+
import { assertPath, fileHash } from '../utils';
|
|
30
|
+
import { s3Client } from './client';
|
|
31
|
+
import { CustomError } from '../common/error';
|
|
32
|
+
import { warn, error } from '../common/logger';
|
|
33
|
+
import { BATCH_SIZE, CHUNK_SIZE } from '../common/const';
|
|
34
|
+
|
|
35
|
+
// import { Upload } from "@aws-sdk/lib-storage";
|
|
36
|
+
// Multipart upload: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/modules/_aws_sdk_lib_storage.html
|
|
37
|
+
|
|
38
|
+
const AWSS3Error = (msg: string) => new CustomError(msg, { name: 'AWSS3Error' });
|
|
39
|
+
|
|
40
|
+
/** @internal */
|
|
41
|
+
export const init = (region: string) => {
|
|
42
|
+
// Ref: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/index.html
|
|
43
|
+
if (!s3Client.client) {
|
|
44
|
+
const awsS3Client = new S3Client({ region }); // may also pass `credentials`
|
|
45
|
+
s3Client.client = awsS3Client;
|
|
46
|
+
s3Client.id = short.uuid();
|
|
47
|
+
return true;
|
|
48
|
+
}
|
|
49
|
+
return false;
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
/** @internal */
|
|
53
|
+
export const status = () => s3Client.id;
|
|
54
|
+
|
|
55
|
+
const tryInit = (silent = false) => {
|
|
56
|
+
if (s3Client.client) return;
|
|
57
|
+
const region = process.env.AWS_REGION || '';
|
|
58
|
+
if (region) {
|
|
59
|
+
if (init(region)) {
|
|
60
|
+
// console.log('Auto-initialization of S3 successful');
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
if (!silent) {
|
|
65
|
+
// throw AWSS3Error('AWS S3 is possibly uninitialized!');
|
|
66
|
+
throw AWSS3Error('Could not auto-initialize AWS S3!');
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
|
|
70
|
+
// auto-initialize on load
|
|
71
|
+
tryInit(true);
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Create a new file in S3 bucket
|
|
75
|
+
* @param bucket S3 bucket name
|
|
76
|
+
* @param s3path S3 path to be created
|
|
77
|
+
* @param contents contents of the file to be created
|
|
78
|
+
* @returns true if the write is successful, null in case of error
|
|
79
|
+
*/
|
|
80
|
+
export const putObject = async (bucket: string, s3path: string, contents: string) => {
|
|
81
|
+
if (!s3Client.client) tryInit();
|
|
82
|
+
if (!s3Client.client) return null;
|
|
83
|
+
if (!bucket || !s3path || !contents) return null;
|
|
84
|
+
|
|
85
|
+
let baseParams: PutObjectCommandInput | null = null;
|
|
86
|
+
|
|
87
|
+
try {
|
|
88
|
+
baseParams = {
|
|
89
|
+
Bucket: bucket,
|
|
90
|
+
Key: s3path,
|
|
91
|
+
Body: '<contents>',
|
|
92
|
+
ContentType: 'text/plain; charset=utf-8',
|
|
93
|
+
ACL: 'public-read',
|
|
94
|
+
// CacheControl: 'max-age=86400,public',
|
|
95
|
+
};
|
|
96
|
+
const cmdParams: PutObjectCommandInput = { ...baseParams, Body: contents };
|
|
97
|
+
const command = new PutObjectCommand(cmdParams);
|
|
98
|
+
|
|
99
|
+
await s3Client.client.send(command);
|
|
100
|
+
} catch (err) {
|
|
101
|
+
console.error('PutObjectCommandInput:', baseParams);
|
|
102
|
+
console.error(err);
|
|
103
|
+
return null;
|
|
104
|
+
// throw err;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return true;
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Upload a file to S3 bucket
|
|
112
|
+
* @param bucket S3 bucket name
|
|
113
|
+
* @param file (local) path of the file to upload
|
|
114
|
+
* @param s3path [optional] S3 path to be created, if not provided then derived from `file` path
|
|
115
|
+
* @returns true if the write is successful, null in case of error
|
|
116
|
+
*/
|
|
117
|
+
export const uploadFile = async (bucket: string, file: string, s3path?: string) => {
|
|
118
|
+
if (!s3Client.client) tryInit();
|
|
119
|
+
if (!s3Client.client) return null;
|
|
120
|
+
if (!bucket || !file) return null;
|
|
121
|
+
|
|
122
|
+
const filepath = assertPath(file);
|
|
123
|
+
const basename = path.basename(filepath);
|
|
124
|
+
const ext = basename.substr(basename.lastIndexOf('.'));
|
|
125
|
+
const contentType = mime.lookup(ext);
|
|
126
|
+
if (!contentType) {
|
|
127
|
+
error(`Could not detect file type for: ${basename} [${filepath}]`);
|
|
128
|
+
return null;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if (!s3path) {
|
|
132
|
+
if (file !== filepath) {
|
|
133
|
+
s3path = file;
|
|
134
|
+
} else {
|
|
135
|
+
s3path = basename;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
let baseParams: PutObjectCommandInput | null = null;
|
|
140
|
+
|
|
141
|
+
try {
|
|
142
|
+
const hash = (await fileHash(filepath)) || '';
|
|
143
|
+
const fileStream = fs.createReadStream(filepath);
|
|
144
|
+
|
|
145
|
+
baseParams = {
|
|
146
|
+
Bucket: bucket,
|
|
147
|
+
Key: s3path,
|
|
148
|
+
Body: '<fileStream>',
|
|
149
|
+
ContentType: contentType,
|
|
150
|
+
ACL: 'public-read',
|
|
151
|
+
// CacheControl: 'max-age=86400,public',
|
|
152
|
+
Metadata: { hash },
|
|
153
|
+
};
|
|
154
|
+
const cmdParams: PutObjectCommandInput = { ...baseParams, Body: fileStream };
|
|
155
|
+
const command = new PutObjectCommand(cmdParams);
|
|
156
|
+
|
|
157
|
+
await s3Client.client.send(command);
|
|
158
|
+
} catch (err) {
|
|
159
|
+
console.error('PutObjectCommandInput:', baseParams);
|
|
160
|
+
console.error(err);
|
|
161
|
+
return null;
|
|
162
|
+
// throw err;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
return true;
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Upload a list of files to S3 bucket
|
|
170
|
+
* @param bucket S3 bucket name
|
|
171
|
+
* @param files (local) list of file paths to upload
|
|
172
|
+
* @param s3paths [optional] S3 path to be created, if not provided then derived from `file` path
|
|
173
|
+
* @returns true if the write is successful, null in case of error
|
|
174
|
+
*/
|
|
175
|
+
export const uploadFilesAll = async (bucket: string, files: string[], s3paths?: string[]) => {
|
|
176
|
+
if (!s3Client.client) tryInit();
|
|
177
|
+
if (!s3Client.client) return null;
|
|
178
|
+
if (!bucket || !files || !Array.isArray(files)) return null;
|
|
179
|
+
if (!files.length) return false;
|
|
180
|
+
if (s3paths && (!Array.isArray(s3paths) || !s3paths.length || files.length !== s3paths.length)) return null;
|
|
181
|
+
|
|
182
|
+
let errFlag = false;
|
|
183
|
+
|
|
184
|
+
const chunkedFiles = chunkifyArray(files, CHUNK_SIZE);
|
|
185
|
+
const chunkedPaths = s3paths ? chunkifyArray(s3paths, CHUNK_SIZE) : chunkedFiles;
|
|
186
|
+
|
|
187
|
+
for (let i = 0; i < chunkedFiles.length; i += 1) {
|
|
188
|
+
const filesChunk = chunkedFiles[i];
|
|
189
|
+
const pathsChunk = chunkedPaths[i];
|
|
190
|
+
const pList = filesChunk.map((item, j) => uploadFile(bucket, item, pathsChunk[j]));
|
|
191
|
+
const rList = await Promise.all(pList); // eslint-disable-line no-await-in-loop
|
|
192
|
+
|
|
193
|
+
const isSuccess = rList.every(e => e === true);
|
|
194
|
+
if (!isSuccess) errFlag = true;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return errFlag ? null : true;
|
|
198
|
+
};
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Get the contents of a file in S3 bucket
|
|
202
|
+
* @param bucket S3 bucket name
|
|
203
|
+
* @param s3path S3 path of the file to be read
|
|
204
|
+
* @returns contents of the file, null in case of error
|
|
205
|
+
*/
|
|
206
|
+
export const getObject = async (bucket: string, s3path: string) => {
|
|
207
|
+
if (!s3Client.client) tryInit();
|
|
208
|
+
if (!s3Client.client) return null;
|
|
209
|
+
if (!bucket || !s3path) return null;
|
|
210
|
+
|
|
211
|
+
let contents: string | null = null;
|
|
212
|
+
const cmdParams: GetObjectCommandInput = { Bucket: bucket, Key: s3path };
|
|
213
|
+
const command = new GetObjectCommand(cmdParams);
|
|
214
|
+
|
|
215
|
+
try {
|
|
216
|
+
// helper function to convert a ReadableStream to a string
|
|
217
|
+
const streamToString = (stream: Readable): Promise<string> =>
|
|
218
|
+
new Promise((resolve, reject) => {
|
|
219
|
+
const chunks: Uint8Array[] = [];
|
|
220
|
+
stream.on('data', chunk => chunks.push(chunk));
|
|
221
|
+
stream.on('error', reject);
|
|
222
|
+
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
const data = await s3Client.client.send(command);
|
|
226
|
+
const bodyContents = await streamToString(data.Body as Readable);
|
|
227
|
+
// const metaData = data.Metadata;
|
|
228
|
+
|
|
229
|
+
if (bodyContents) {
|
|
230
|
+
contents = bodyContents as string;
|
|
231
|
+
}
|
|
232
|
+
} catch (err) {
|
|
233
|
+
console.error('GetObjectCommandInput:', cmdParams);
|
|
234
|
+
console.error(err);
|
|
235
|
+
return null;
|
|
236
|
+
// throw err;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
return contents;
|
|
240
|
+
};
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* Download a file from S3 bucket
|
|
244
|
+
* @param bucket S3 bucket name
|
|
245
|
+
* @param s3path S3 path of the file to be downloaded
|
|
246
|
+
* @param outPath [optional] path where the downloaded file is written, if not provided then derived from `s3path`
|
|
247
|
+
* @returns true if download is successful, null in case of error
|
|
248
|
+
*/
|
|
249
|
+
export const downloadFile = async (bucket: string, s3path: string, outPath?: string) => {
|
|
250
|
+
if (!s3Client.client) tryInit();
|
|
251
|
+
if (!s3Client.client) return null;
|
|
252
|
+
if (!bucket || !s3path) return null;
|
|
253
|
+
|
|
254
|
+
if (!outPath) outPath = s3path;
|
|
255
|
+
outPath = assertPath(outPath);
|
|
256
|
+
const cmdParams: GetObjectCommandInput = { Bucket: bucket, Key: s3path };
|
|
257
|
+
const command = new GetObjectCommand(cmdParams);
|
|
258
|
+
|
|
259
|
+
try {
|
|
260
|
+
// helper function to write a ReadableStream to a file
|
|
261
|
+
const writeStreamToFile = (stream: Readable): Promise<void> =>
|
|
262
|
+
new Promise((resolve, reject) => {
|
|
263
|
+
// Ref: https://stackabuse.com/writing-to-files-in-node-js/
|
|
264
|
+
const writeStream = fs.createWriteStream(outPath as string);
|
|
265
|
+
stream.on('data', chunk => writeStream.write(chunk));
|
|
266
|
+
stream.on('error', reject);
|
|
267
|
+
stream.on('end', () => resolve(writeStream.close()));
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
const data = await s3Client.client.send(command);
|
|
271
|
+
await writeStreamToFile(data.Body as Readable);
|
|
272
|
+
// const metaData = data.Metadata;
|
|
273
|
+
} catch (err: any) {
|
|
274
|
+
if (err.name !== 'NoSuchKey') {
|
|
275
|
+
console.error('GetObjectCommandInput:', cmdParams);
|
|
276
|
+
console.error(err);
|
|
277
|
+
}
|
|
278
|
+
return null;
|
|
279
|
+
// throw err;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
return true;
|
|
283
|
+
};
|
|
284
|
+
|
|
285
|
+
/**
|
|
286
|
+
* List objects in a S3 bucket
|
|
287
|
+
* @param bucket S3 bucket name
|
|
288
|
+
* @param prefix [optional] prefix for object names in the bucket
|
|
289
|
+
* @returns list of objects in the S3 bucket (optionally starting with the given prefix), null in case of error
|
|
290
|
+
*/
|
|
291
|
+
export const listObjects = async (bucket: string, prefix?: string) => {
|
|
292
|
+
if (!s3Client.client) tryInit();
|
|
293
|
+
if (!s3Client.client) return null;
|
|
294
|
+
if (!bucket) return null;
|
|
295
|
+
|
|
296
|
+
let filesList: string[] = [];
|
|
297
|
+
|
|
298
|
+
let cmdParams: ListObjectsV2CommandInput = { Bucket: bucket };
|
|
299
|
+
if (prefix) cmdParams = { ...cmdParams, Prefix: prefix };
|
|
300
|
+
const command = new ListObjectsV2Command(cmdParams);
|
|
301
|
+
|
|
302
|
+
try {
|
|
303
|
+
const results = await s3Client.client.send(command);
|
|
304
|
+
const items = results.Contents;
|
|
305
|
+
|
|
306
|
+
// Ref: https://docs.aws.amazon.com/AmazonS3/latest/userguide/ListingKeysUsingAPIs.html
|
|
307
|
+
if (results.IsTruncated) {
|
|
308
|
+
warn('[listObjects] Partial results obtained! Consider pagination.');
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
if (items) {
|
|
312
|
+
filesList = items.map(t => t.Key).filter(e => !!e) as string[];
|
|
313
|
+
}
|
|
314
|
+
} catch (err) {
|
|
315
|
+
console.error('ListObjectsV2CommandInput:', cmdParams);
|
|
316
|
+
console.error(err);
|
|
317
|
+
return null;
|
|
318
|
+
// throw err;
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
return filesList;
|
|
322
|
+
};
|
|
323
|
+
|
|
324
|
+
/**
|
|
325
|
+
* Get head content for a file in S3 bucket
|
|
326
|
+
* @param bucket S3 bucket name
|
|
327
|
+
* @param s3path S3 path of the file
|
|
328
|
+
* @returns head content for the given file, null in case of error
|
|
329
|
+
*/
|
|
330
|
+
export const getObjectHead = async (bucket: string, s3path: string) => {
|
|
331
|
+
if (!s3Client.client) tryInit();
|
|
332
|
+
if (!s3Client.client) return null;
|
|
333
|
+
if (!bucket || !s3path) return null;
|
|
334
|
+
|
|
335
|
+
let contents: HeadObject | null = null;
|
|
336
|
+
|
|
337
|
+
const cmdParams: HeadObjectCommandInput = { Bucket: bucket, Key: s3path };
|
|
338
|
+
const command = new HeadObjectCommand(cmdParams);
|
|
339
|
+
|
|
340
|
+
try {
|
|
341
|
+
const data = await s3Client.client.send(command);
|
|
342
|
+
if (data) {
|
|
343
|
+
const { ContentLength, ContentType, ETag, CacheControl, Expires, LastModified, Metadata } = data;
|
|
344
|
+
contents = { Key: s3path, ContentLength, ContentType, ETag, CacheControl, Expires, LastModified, Metadata };
|
|
345
|
+
}
|
|
346
|
+
} catch (err: any) {
|
|
347
|
+
if (err.name !== 'NotFound') {
|
|
348
|
+
console.error('HeadObjectCommandInput:', cmdParams);
|
|
349
|
+
console.error(err);
|
|
350
|
+
}
|
|
351
|
+
return null;
|
|
352
|
+
// throw err;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
return contents;
|
|
356
|
+
};
|
|
357
|
+
|
|
358
|
+
/**
|
|
359
|
+
* Get head contents for a list of files in S3 bucket
|
|
360
|
+
* @param bucket S3 bucket name
|
|
361
|
+
* @param s3paths list of S3 paths of the files
|
|
362
|
+
* @returns head contents for the given files, null in case of error
|
|
363
|
+
*/
|
|
364
|
+
export const getObjectHeadsAll = async (bucket: string, s3paths: string[]) => {
|
|
365
|
+
if (!s3Client.client) tryInit();
|
|
366
|
+
if (!s3Client.client) return null;
|
|
367
|
+
if (!bucket || !s3paths || !Array.isArray(s3paths)) return null;
|
|
368
|
+
if (!s3paths.length) return [];
|
|
369
|
+
|
|
370
|
+
let contents: (HeadObject | null)[] = [];
|
|
371
|
+
|
|
372
|
+
const chunkedItems = chunkifyArray(s3paths, CHUNK_SIZE);
|
|
373
|
+
|
|
374
|
+
for (let i = 0; i < chunkedItems.length; i += 1) {
|
|
375
|
+
const chunk = chunkedItems[i];
|
|
376
|
+
const pList = chunk.map(item => getObjectHead(bucket, item));
|
|
377
|
+
const rList = await Promise.all(pList); // eslint-disable-line no-await-in-loop
|
|
378
|
+
|
|
379
|
+
contents = contents.concat(rList);
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
if (contents.length) {
|
|
383
|
+
contents = contents.filter(e => !!e);
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
return contents;
|
|
387
|
+
};
|
|
388
|
+
|
|
389
|
+
/**
|
|
390
|
+
* Delete a file in S3 bucket
|
|
391
|
+
* @param bucket S3 bucket name
|
|
392
|
+
* @param s3path S3 file path to be deleted
|
|
393
|
+
* @returns true if delete is successful, null in case of error
|
|
394
|
+
*/
|
|
395
|
+
export const deleteObject = async (bucket: string, s3path: string) => {
|
|
396
|
+
if (!s3Client.client) tryInit();
|
|
397
|
+
if (!s3Client.client) return null;
|
|
398
|
+
if (!bucket || !s3path) return null;
|
|
399
|
+
|
|
400
|
+
const cmdParams: DeleteObjectCommandInput = { Bucket: bucket, Key: s3path };
|
|
401
|
+
const command = new DeleteObjectCommand(cmdParams);
|
|
402
|
+
|
|
403
|
+
try {
|
|
404
|
+
await s3Client.client.send(command);
|
|
405
|
+
} catch (err) {
|
|
406
|
+
console.error('DeleteObjectCommandInput:', cmdParams);
|
|
407
|
+
console.error(err);
|
|
408
|
+
return null;
|
|
409
|
+
// throw err;
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
return true;
|
|
413
|
+
};
|
|
414
|
+
|
|
415
|
+
const batchDeleteObjects = async (bucket: string, s3paths: string[]) => {
|
|
416
|
+
if (!s3Client.client) tryInit();
|
|
417
|
+
if (!s3Client.client) return null;
|
|
418
|
+
if (!bucket || !s3paths || !Array.isArray(s3paths)) return null;
|
|
419
|
+
if (!s3paths.length) return false;
|
|
420
|
+
|
|
421
|
+
const keys = s3paths.map(key => ({ Key: key }));
|
|
422
|
+
const cmdParams: DeleteObjectsCommandInput = { Bucket: bucket, Delete: { Objects: keys } };
|
|
423
|
+
const command = new DeleteObjectsCommand(cmdParams);
|
|
424
|
+
|
|
425
|
+
try {
|
|
426
|
+
await s3Client.client.send(command);
|
|
427
|
+
} catch (err) {
|
|
428
|
+
console.error('DeleteObjectsCommandInput:', cmdParams);
|
|
429
|
+
console.error(err);
|
|
430
|
+
return null;
|
|
431
|
+
// throw err;
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
return true;
|
|
435
|
+
};
|
|
436
|
+
|
|
437
|
+
/**
|
|
438
|
+
* Delete a list of files in S3 bucket
|
|
439
|
+
* @param bucket S3 bucket name
|
|
440
|
+
* @param s3paths list of S3 file paths to be deleted
|
|
441
|
+
* @returns true if all deletes are successful, null in case of error
|
|
442
|
+
*/
|
|
443
|
+
export const deleteObjectsAll = async (bucket: string, s3paths: string[]) => {
|
|
444
|
+
if (!s3Client.client) tryInit();
|
|
445
|
+
if (!s3Client.client) return null;
|
|
446
|
+
if (!bucket || !s3paths || !Array.isArray(s3paths)) return null;
|
|
447
|
+
if (!s3paths.length) return false;
|
|
448
|
+
|
|
449
|
+
let errFlag = false;
|
|
450
|
+
|
|
451
|
+
const batchedFiles = chunkifyArray(s3paths, BATCH_SIZE);
|
|
452
|
+
const chunkedFiles = chunkifyArray(batchedFiles, CHUNK_SIZE);
|
|
453
|
+
|
|
454
|
+
for (let i = 0; i < chunkedFiles.length; i += 1) {
|
|
455
|
+
const batchFiles = chunkedFiles[i];
|
|
456
|
+
|
|
457
|
+
const brlist = batchFiles.map(paths => batchDeleteObjects(bucket, paths));
|
|
458
|
+
const bslist = await Promise.all(brlist); // eslint-disable-line no-await-in-loop
|
|
459
|
+
|
|
460
|
+
const isSuccess = bslist.every(e => e === true);
|
|
461
|
+
if (!isSuccess) errFlag = true;
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
return errFlag ? null : true;
|
|
465
|
+
};
|
|
466
|
+
|
|
467
|
+
export interface HeadObject {
|
|
468
|
+
Key: string;
|
|
469
|
+
ContentLength?: number;
|
|
470
|
+
ContentType?: string;
|
|
471
|
+
ETag?: string;
|
|
472
|
+
CacheControl?: string;
|
|
473
|
+
Expires?: Date;
|
|
474
|
+
LastModified?: Date;
|
|
475
|
+
Metadata?: StringIndexable;
|
|
476
|
+
}
|