@hdriel/aws-utils 1.2.0 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +144 -27
- package/dist/index.d.cts +14 -8
- package/dist/index.d.ts +14 -8
- package/dist/index.js +142 -26
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -89,7 +89,8 @@ __export(index_exports, {
|
|
|
89
89
|
LambdaUtil: () => LambdaUtil,
|
|
90
90
|
S3LocalstackUtil: () => S3LocalstackUtil,
|
|
91
91
|
S3Util: () => S3Util,
|
|
92
|
-
SNSUtil: () => SNSUtil
|
|
92
|
+
SNSUtil: () => SNSUtil,
|
|
93
|
+
SUPPORTED_IFRAME_EXTENSIONS: () => SUPPORTED_IFRAME_EXTENSIONS
|
|
93
94
|
});
|
|
94
95
|
module.exports = __toCommonJS(index_exports);
|
|
95
96
|
|
|
@@ -279,6 +280,7 @@ var import_archiver = __toESM(require("archiver"), 1);
|
|
|
279
280
|
var import_node_stream2 = require("stream");
|
|
280
281
|
var import_multer_s3 = __toESM(require("multer-s3"), 1);
|
|
281
282
|
var import_multer = __toESM(require("multer"), 1);
|
|
283
|
+
var import_p_limit = __toESM(require("p-limit"), 1);
|
|
282
284
|
var import_client_s35 = require("@aws-sdk/client-s3");
|
|
283
285
|
|
|
284
286
|
// src/utils/consts.ts
|
|
@@ -288,10 +290,91 @@ var ACLs = /* @__PURE__ */ ((ACLs2) => {
|
|
|
288
290
|
ACLs2["publicReadWrite"] = "public-read-write";
|
|
289
291
|
return ACLs2;
|
|
290
292
|
})(ACLs || {});
|
|
291
|
-
|
|
292
|
-
//
|
|
293
|
-
|
|
294
|
-
|
|
293
|
+
var SUPPORTED_IFRAME_EXTENSIONS = [
|
|
294
|
+
// Images
|
|
295
|
+
"jpg",
|
|
296
|
+
"jpeg",
|
|
297
|
+
"png",
|
|
298
|
+
"gif",
|
|
299
|
+
"bmp",
|
|
300
|
+
"webp",
|
|
301
|
+
"svg",
|
|
302
|
+
"ico",
|
|
303
|
+
"tif",
|
|
304
|
+
"tiff",
|
|
305
|
+
"heic",
|
|
306
|
+
"heif",
|
|
307
|
+
"raw",
|
|
308
|
+
"cr2",
|
|
309
|
+
"nef",
|
|
310
|
+
"arw",
|
|
311
|
+
// Videos
|
|
312
|
+
"mp4",
|
|
313
|
+
"avi",
|
|
314
|
+
"mov",
|
|
315
|
+
"wmv",
|
|
316
|
+
"flv",
|
|
317
|
+
"mkv",
|
|
318
|
+
"webm",
|
|
319
|
+
"mpeg",
|
|
320
|
+
"mpg",
|
|
321
|
+
"m4v",
|
|
322
|
+
"3gp",
|
|
323
|
+
"ogv",
|
|
324
|
+
"ts",
|
|
325
|
+
"mts",
|
|
326
|
+
"m2ts",
|
|
327
|
+
// Documents
|
|
328
|
+
"pdf",
|
|
329
|
+
// Text
|
|
330
|
+
"txt",
|
|
331
|
+
"csv",
|
|
332
|
+
"json",
|
|
333
|
+
"xml",
|
|
334
|
+
"md",
|
|
335
|
+
"log",
|
|
336
|
+
"yaml",
|
|
337
|
+
"yml",
|
|
338
|
+
"ini",
|
|
339
|
+
"conf",
|
|
340
|
+
"cfg",
|
|
341
|
+
// Code
|
|
342
|
+
"js",
|
|
343
|
+
"ts",
|
|
344
|
+
"jsx",
|
|
345
|
+
"tsx",
|
|
346
|
+
"py",
|
|
347
|
+
"java",
|
|
348
|
+
"c",
|
|
349
|
+
"cpp",
|
|
350
|
+
"h",
|
|
351
|
+
"cs",
|
|
352
|
+
"php",
|
|
353
|
+
"rb",
|
|
354
|
+
"go",
|
|
355
|
+
"rs",
|
|
356
|
+
"swift",
|
|
357
|
+
"kt",
|
|
358
|
+
"scala",
|
|
359
|
+
// Audio
|
|
360
|
+
"mp3",
|
|
361
|
+
"wav",
|
|
362
|
+
"ogg",
|
|
363
|
+
"flac",
|
|
364
|
+
"aac",
|
|
365
|
+
"m4a",
|
|
366
|
+
"wma",
|
|
367
|
+
"aiff",
|
|
368
|
+
"ape",
|
|
369
|
+
"opus",
|
|
370
|
+
// Web
|
|
371
|
+
"html",
|
|
372
|
+
"htm",
|
|
373
|
+
"css",
|
|
374
|
+
"scss",
|
|
375
|
+
"sass",
|
|
376
|
+
"less"
|
|
377
|
+
];
|
|
295
378
|
|
|
296
379
|
// src/utils/helpers.ts
|
|
297
380
|
var import_bytes = __toESM(require("bytes"), 1);
|
|
@@ -1110,7 +1193,7 @@ var S3File = class extends S3Directory {
|
|
|
1110
1193
|
return url;
|
|
1111
1194
|
});
|
|
1112
1195
|
}
|
|
1113
|
-
sizeOf(fileKey, unit) {
|
|
1196
|
+
sizeOf(fileKey, unit = "b") {
|
|
1114
1197
|
return __async(this, null, function* () {
|
|
1115
1198
|
var _a2, _b, _c;
|
|
1116
1199
|
const normalizedKey = getNormalizedPath(fileKey);
|
|
@@ -1232,15 +1315,16 @@ var S3File = class extends S3Directory {
|
|
|
1232
1315
|
var pump = (0, import_util.promisify)(import_stream.pipeline);
|
|
1233
1316
|
var S3Stream = class _S3Stream extends S3File {
|
|
1234
1317
|
constructor(_a2) {
|
|
1235
|
-
var _b = _a2, { maxUploadFileSizeRestriction = "10GB" } = _b, props = __objRest(_b, ["maxUploadFileSizeRestriction"]);
|
|
1318
|
+
var _b = _a2, { maxUploadFileSizeRestriction = "10GB", concurrencyVideoLimit = 0 } = _b, props = __objRest(_b, ["maxUploadFileSizeRestriction", "concurrencyVideoLimit"]);
|
|
1236
1319
|
super(props);
|
|
1237
1320
|
__publicField(this, "maxUploadFileSizeRestriction");
|
|
1321
|
+
__publicField(this, "s3Limiter");
|
|
1238
1322
|
__publicField(this, "streamImageFileCtrl", ({
|
|
1239
1323
|
fileKey: _fileKey,
|
|
1240
1324
|
queryField = "file",
|
|
1241
1325
|
paramsField = "file",
|
|
1242
1326
|
headerField = "x-fileKey",
|
|
1243
|
-
|
|
1327
|
+
cachingAge: _cachingAge = "1y"
|
|
1244
1328
|
} = {}) => {
|
|
1245
1329
|
return (req, res, next) => __async(this, null, function* () {
|
|
1246
1330
|
var _a2, _b, _c, _d, _e, _f, _g, _h;
|
|
@@ -1265,8 +1349,10 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1265
1349
|
const contentType = mimeTypeMap[ext] || "application/octet-stream";
|
|
1266
1350
|
res.setHeader("Content-Type", contentType);
|
|
1267
1351
|
res.setHeader("Content-Length", imageBuffer.length);
|
|
1268
|
-
const cachingAge = !
|
|
1269
|
-
if (cachingAge)
|
|
1352
|
+
const cachingAge = !_cachingAge || typeof _cachingAge === "number" ? _cachingAge : getTotalSeconds(_cachingAge);
|
|
1353
|
+
if (cachingAge) {
|
|
1354
|
+
res.setHeader("Cache-Control", `public, max-age=${cachingAge}`);
|
|
1355
|
+
}
|
|
1270
1356
|
res.status(200).send(imageBuffer);
|
|
1271
1357
|
} catch (error) {
|
|
1272
1358
|
(_h = this.logger) == null ? void 0 : _h.warn(req.id, "image fileKey not found", __spreadValues({
|
|
@@ -1276,19 +1362,20 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1276
1362
|
}
|
|
1277
1363
|
});
|
|
1278
1364
|
});
|
|
1279
|
-
__publicField(this, "
|
|
1365
|
+
__publicField(this, "streamBufferFileCtrl", ({
|
|
1280
1366
|
fileKey: _fileKey,
|
|
1367
|
+
filename: _filename,
|
|
1281
1368
|
queryField = "file",
|
|
1282
1369
|
paramsField = "file",
|
|
1283
1370
|
headerField = "x-fileKey",
|
|
1284
|
-
|
|
1371
|
+
cachingAge: _cachingAge = "1h"
|
|
1285
1372
|
} = {}) => {
|
|
1286
1373
|
return (req, res, next) => __async(this, null, function* () {
|
|
1287
1374
|
var _a2, _b, _c, _d, _e, _f, _g, _h;
|
|
1288
1375
|
let fileKey = _fileKey || (((_a2 = req.params) == null ? void 0 : _a2[paramsField]) ? decodeURIComponent((_b = req.params) == null ? void 0 : _b[paramsField]) : void 0) || (((_c = req.query) == null ? void 0 : _c[queryField]) ? decodeURIComponent((_d = req.query) == null ? void 0 : _d[queryField]) : void 0) || (((_e = req.headers) == null ? void 0 : _e[headerField]) ? decodeURIComponent((_f = req.headers) == null ? void 0 : _f[headerField]) : void 0);
|
|
1289
1376
|
if (!fileKey) {
|
|
1290
|
-
(_g = this.logger) == null ? void 0 : _g.warn(req.id, "
|
|
1291
|
-
next(Error("
|
|
1377
|
+
(_g = this.logger) == null ? void 0 : _g.warn(req.id, "iframe fileKey is required");
|
|
1378
|
+
next(Error("iframe fileKey is required"));
|
|
1292
1379
|
return;
|
|
1293
1380
|
}
|
|
1294
1381
|
try {
|
|
@@ -1305,12 +1392,14 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1305
1392
|
pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation"
|
|
1306
1393
|
};
|
|
1307
1394
|
const contentType = mimeTypeMap[ext] || "application/octet-stream";
|
|
1308
|
-
const filename = (0, import_pathe2.basename)(fileKey);
|
|
1395
|
+
const filename = _filename || (0, import_pathe2.basename)(fileKey);
|
|
1309
1396
|
res.setHeader("Content-Type", contentType);
|
|
1310
1397
|
res.setHeader("Content-Disposition", `inline; filename="${encodeURIComponent(filename)}"`);
|
|
1311
|
-
res.setHeader("Content-Length", fileBuffer.length);
|
|
1312
|
-
const cachingAge = !
|
|
1313
|
-
|
|
1398
|
+
res.setHeader("Content-Length", String(fileBuffer.length));
|
|
1399
|
+
const cachingAge = !_cachingAge || typeof _cachingAge === "number" ? _cachingAge : getTotalSeconds(_cachingAge);
|
|
1400
|
+
if (cachingAge) {
|
|
1401
|
+
res.setHeader("Cache-Control", `public, max-age=${cachingAge}`);
|
|
1402
|
+
}
|
|
1314
1403
|
res.status(200).send(fileBuffer);
|
|
1315
1404
|
} catch (error) {
|
|
1316
1405
|
(_h = this.logger) == null ? void 0 : _h.warn(req.id, "pdf fileKey not found", __spreadValues({
|
|
@@ -1321,6 +1410,7 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1321
1410
|
});
|
|
1322
1411
|
});
|
|
1323
1412
|
this.maxUploadFileSizeRestriction = maxUploadFileSizeRestriction;
|
|
1413
|
+
this.s3Limiter = concurrencyVideoLimit ? (0, import_p_limit.default)(concurrencyVideoLimit) : null;
|
|
1324
1414
|
}
|
|
1325
1415
|
streamObjectFile(_0) {
|
|
1326
1416
|
return __async(this, arguments, function* (fileKey, {
|
|
@@ -1358,7 +1448,7 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1358
1448
|
Bucket: this.bucket,
|
|
1359
1449
|
Key: normalizedKey
|
|
1360
1450
|
}, Range ? { Range } : {}));
|
|
1361
|
-
const data = yield s3Limiter(() => this.execute(cmd, { abortSignal }));
|
|
1451
|
+
const data = this.s3Limiter ? yield this.s3Limiter(() => this.execute(cmd, { abortSignal })) : yield this.execute(cmd, { abortSignal });
|
|
1362
1452
|
const body = data.Body;
|
|
1363
1453
|
if (!body) return null;
|
|
1364
1454
|
return {
|
|
@@ -1513,11 +1603,13 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1513
1603
|
forDownloading = false,
|
|
1514
1604
|
paramsField = "file",
|
|
1515
1605
|
queryField = "file",
|
|
1516
|
-
headerField = "x-fileKey"
|
|
1606
|
+
headerField = "x-fileKey",
|
|
1607
|
+
streamMethod,
|
|
1608
|
+
cachingAge: _cachingAge = "1h"
|
|
1517
1609
|
} = {}) {
|
|
1518
1610
|
return (req, res, next) => __async(this, null, function* () {
|
|
1519
1611
|
var _a2, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
1520
|
-
|
|
1612
|
+
const fileKey = _fileKey || (((_a2 = req.params) == null ? void 0 : _a2[paramsField]) ? (_b = req.params) == null ? void 0 : _b[paramsField] : void 0) || (((_c = req.query) == null ? void 0 : _c[queryField]) ? (_d = req.query) == null ? void 0 : _d[queryField] : void 0) || (((_e = req.headers) == null ? void 0 : _e[headerField]) ? decodeURIComponent((_f = req.headers) == null ? void 0 : _f[headerField]) : void 0);
|
|
1521
1613
|
if (!fileKey || fileKey === "/") {
|
|
1522
1614
|
(_g = this.logger) == null ? void 0 : _g.warn(req.id, "fileKey stream is required");
|
|
1523
1615
|
next(Error("fileKey stream is required"));
|
|
@@ -1551,13 +1643,23 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1551
1643
|
}
|
|
1552
1644
|
const fileInfo = yield this.fileInfo(normalizedKey);
|
|
1553
1645
|
const fileName = filename || normalizedKey.split("/").pop() || "download";
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1646
|
+
const contentType = fileInfo.ContentType || "application/octet-stream";
|
|
1647
|
+
const ext = (0, import_pathe2.extname)(fileKey).slice(1).toLowerCase();
|
|
1648
|
+
const inlineTypes = ["text/", "image/", "application/pdf", "video/", "audio/"];
|
|
1649
|
+
const canDisplayInline = SUPPORTED_IFRAME_EXTENSIONS.includes(ext) || inlineTypes.some((type) => contentType.startsWith(type));
|
|
1650
|
+
res.setHeader("Content-Type", contentType);
|
|
1558
1651
|
if (fileInfo.ContentLength) {
|
|
1559
1652
|
res.setHeader("Content-Length", String(fileInfo.ContentLength));
|
|
1560
1653
|
}
|
|
1654
|
+
if (forDownloading || !canDisplayInline) {
|
|
1655
|
+
res.setHeader("Content-Disposition", `attachment; filename="${encodeURIComponent(fileName)}"`);
|
|
1656
|
+
} else {
|
|
1657
|
+
res.setHeader("Content-Disposition", `inline; filename="${encodeURIComponent(fileName)}"`);
|
|
1658
|
+
}
|
|
1659
|
+
const cachingAge = !_cachingAge || typeof _cachingAge === "number" ? _cachingAge : getTotalSeconds(_cachingAge);
|
|
1660
|
+
if (cachingAge) {
|
|
1661
|
+
res.setHeader("Cache-Control", `public, max-age=${cachingAge}`);
|
|
1662
|
+
}
|
|
1561
1663
|
stream.on("error", (err) => {
|
|
1562
1664
|
var _a3, _b2;
|
|
1563
1665
|
(_a3 = this.logger) == null ? void 0 : _a3.warn(this.reqId, "Stream error", { fileKey: normalizedKey, error: err });
|
|
@@ -1569,7 +1671,12 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1569
1671
|
(_a3 = stream == null ? void 0 : stream.destroy) == null ? void 0 : _a3.call(stream);
|
|
1570
1672
|
req.off("close", onClose);
|
|
1571
1673
|
});
|
|
1572
|
-
|
|
1674
|
+
streamMethod || (streamMethod = canDisplayInline ? "pipe" : "pipeline");
|
|
1675
|
+
if (streamMethod === "pipeline") {
|
|
1676
|
+
yield pump(stream, res);
|
|
1677
|
+
} else {
|
|
1678
|
+
stream.pipe(res);
|
|
1679
|
+
}
|
|
1573
1680
|
req.off("close", onClose);
|
|
1574
1681
|
} catch (error) {
|
|
1575
1682
|
abort.abort();
|
|
@@ -1785,6 +1892,7 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1785
1892
|
}),
|
|
1786
1893
|
key: (req, file, cb) => __async(this, null, function* () {
|
|
1787
1894
|
let filename;
|
|
1895
|
+
file.originalname = decodeURIComponent(file.originalname);
|
|
1788
1896
|
if (typeof _filename === "function") {
|
|
1789
1897
|
filename = yield _filename(req, file);
|
|
1790
1898
|
} else if (_filename) {
|
|
@@ -1896,6 +2004,14 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1896
2004
|
* Middleware for uploading multiple files with different field names
|
|
1897
2005
|
* Adds the uploaded files info to req.s3FilesByField
|
|
1898
2006
|
*/
|
|
2007
|
+
/*
|
|
2008
|
+
example
|
|
2009
|
+
uploadFieldsFiles([
|
|
2010
|
+
{ name: 'cardPosterSrc', maxCount: 1 },
|
|
2011
|
+
{ name: 'sectionPosterSrc', maxCount: 1 },
|
|
2012
|
+
{ name: 'imageSrc', maxCount: 1 },
|
|
2013
|
+
]) as any,
|
|
2014
|
+
*/
|
|
1899
2015
|
// uploadFieldsFiles(
|
|
1900
2016
|
// fields: Array<{ name: string; directory: string; maxCount?: number; options?: S3UploadOptions }>
|
|
1901
2017
|
// ): RequestHandler {
|
|
@@ -2083,5 +2199,6 @@ var SNSUtil = class {
|
|
|
2083
2199
|
LambdaUtil,
|
|
2084
2200
|
S3LocalstackUtil,
|
|
2085
2201
|
S3Util,
|
|
2086
|
-
SNSUtil
|
|
2202
|
+
SNSUtil,
|
|
2203
|
+
SUPPORTED_IFRAME_EXTENSIONS
|
|
2087
2204
|
});
|
package/dist/index.d.cts
CHANGED
|
@@ -76,6 +76,7 @@ declare enum ACLs {
|
|
|
76
76
|
publicRead = "public-read",
|
|
77
77
|
publicReadWrite = "public-read-write"
|
|
78
78
|
}
|
|
79
|
+
declare const SUPPORTED_IFRAME_EXTENSIONS: string[];
|
|
79
80
|
|
|
80
81
|
interface ContentFile {
|
|
81
82
|
Key: string;
|
|
@@ -263,7 +264,7 @@ declare class S3File extends S3Directory {
|
|
|
263
264
|
taggingFile(fileKey: string, tag: Tag | Tag[]): Promise<boolean>;
|
|
264
265
|
fileVersion(fileKey: string): Promise<string>;
|
|
265
266
|
fileUrl(fileKey: string, expiresIn?: number | StringValue): Promise<string>;
|
|
266
|
-
sizeOf(fileKey: string, unit
|
|
267
|
+
sizeOf(fileKey: string, unit?: Unit): Promise<number>;
|
|
267
268
|
fileExists(fileKey: string): Promise<boolean>;
|
|
268
269
|
fileContent(fileKey: string, format?: 'buffer' | 'base64' | 'utf8'): Promise<Buffer | string>;
|
|
269
270
|
uploadFileContent(fileKey: string, fileData: Buffer | Readable | string | Uint8Array | object, { acl, version, prettier, }?: {
|
|
@@ -276,10 +277,12 @@ declare class S3File extends S3Directory {
|
|
|
276
277
|
|
|
277
278
|
type S3StreamProps = S3FileProps & {
|
|
278
279
|
maxUploadFileSizeRestriction?: ByteUnitStringValue;
|
|
280
|
+
concurrencyVideoLimit?: number | null;
|
|
279
281
|
};
|
|
280
282
|
declare class S3Stream extends S3File {
|
|
281
283
|
private readonly maxUploadFileSizeRestriction;
|
|
282
|
-
|
|
284
|
+
private readonly s3Limiter;
|
|
285
|
+
constructor({ maxUploadFileSizeRestriction, concurrencyVideoLimit, ...props }: S3StreamProps);
|
|
283
286
|
protected streamObjectFile(fileKey: string, { Range, checkFileExists, abortSignal, }?: {
|
|
284
287
|
Range?: string;
|
|
285
288
|
checkFileExists?: boolean;
|
|
@@ -309,27 +312,30 @@ declare class S3Stream extends S3File {
|
|
|
309
312
|
headerField?: string;
|
|
310
313
|
streamTimeoutMS?: number | undefined;
|
|
311
314
|
}): Promise<(req: Request$1 & any, res: Response & any, next: NextFunction & any) => Promise<any>>;
|
|
312
|
-
streamImageFileCtrl: ({ fileKey: _fileKey, queryField, paramsField, headerField,
|
|
315
|
+
streamImageFileCtrl: ({ fileKey: _fileKey, queryField, paramsField, headerField, cachingAge: _cachingAge, }?: {
|
|
313
316
|
fileKey?: string;
|
|
314
317
|
queryField?: string;
|
|
315
318
|
paramsField?: string;
|
|
316
319
|
headerField?: string;
|
|
317
|
-
|
|
320
|
+
cachingAge?: null | number | StringValue;
|
|
318
321
|
}) => (req: Request$1 & any, res: Response & any, next: NextFunction & any) => Promise<void>;
|
|
319
|
-
|
|
322
|
+
streamBufferFileCtrl: ({ fileKey: _fileKey, filename: _filename, queryField, paramsField, headerField, cachingAge: _cachingAge, }?: {
|
|
320
323
|
fileKey?: string;
|
|
324
|
+
filename?: string;
|
|
321
325
|
queryField?: string;
|
|
322
326
|
paramsField?: string;
|
|
323
327
|
headerField?: string;
|
|
324
|
-
|
|
328
|
+
cachingAge?: null | number | StringValue;
|
|
325
329
|
}) => (req: Request$1 & any, res: Response & any, next: NextFunction & any) => Promise<void>;
|
|
326
|
-
streamFileCtrl({ fileKey: _fileKey, filename, forDownloading, paramsField, queryField, headerField, }?: {
|
|
330
|
+
streamFileCtrl({ fileKey: _fileKey, filename, forDownloading, paramsField, queryField, headerField, streamMethod, cachingAge: _cachingAge, }?: {
|
|
327
331
|
fileKey?: string;
|
|
328
332
|
filename?: string;
|
|
329
333
|
forDownloading?: boolean;
|
|
330
334
|
paramsField?: string;
|
|
331
335
|
queryField?: string;
|
|
332
336
|
headerField?: string;
|
|
337
|
+
cachingAge?: null | number | StringValue;
|
|
338
|
+
streamMethod?: 'pipe' | 'pipeline';
|
|
333
339
|
}): Promise<(req: Request$1 & any, res: Response & any, next: NextFunction & any) => Promise<void>>;
|
|
334
340
|
streamZipFileCtr({ fileKey: _fileKey, filename: _filename, queryField, paramsField, headerField, compressionLevel, }?: {
|
|
335
341
|
filename?: string;
|
|
@@ -420,4 +426,4 @@ declare class AWSConfigSharingUtil {
|
|
|
420
426
|
};
|
|
421
427
|
}
|
|
422
428
|
|
|
423
|
-
export { ACLs, AWSConfigSharingUtil, type BucketInfo, type FILE_EXT, type FILE_TYPE, IAMUtil, LambdaUtil, S3LocalstackUtil, S3Util, type S3UtilProps, SNSUtil, type TreeDirectoryItem, type TreeFileItem, type UploadedS3File };
|
|
429
|
+
export { ACLs, AWSConfigSharingUtil, type BucketInfo, type FILE_EXT, type FILE_TYPE, IAMUtil, LambdaUtil, S3LocalstackUtil, S3Util, type S3UtilProps, SNSUtil, SUPPORTED_IFRAME_EXTENSIONS, type TreeDirectoryItem, type TreeFileItem, type UploadedS3File };
|
package/dist/index.d.ts
CHANGED
|
@@ -76,6 +76,7 @@ declare enum ACLs {
|
|
|
76
76
|
publicRead = "public-read",
|
|
77
77
|
publicReadWrite = "public-read-write"
|
|
78
78
|
}
|
|
79
|
+
declare const SUPPORTED_IFRAME_EXTENSIONS: string[];
|
|
79
80
|
|
|
80
81
|
interface ContentFile {
|
|
81
82
|
Key: string;
|
|
@@ -263,7 +264,7 @@ declare class S3File extends S3Directory {
|
|
|
263
264
|
taggingFile(fileKey: string, tag: Tag | Tag[]): Promise<boolean>;
|
|
264
265
|
fileVersion(fileKey: string): Promise<string>;
|
|
265
266
|
fileUrl(fileKey: string, expiresIn?: number | StringValue): Promise<string>;
|
|
266
|
-
sizeOf(fileKey: string, unit
|
|
267
|
+
sizeOf(fileKey: string, unit?: Unit): Promise<number>;
|
|
267
268
|
fileExists(fileKey: string): Promise<boolean>;
|
|
268
269
|
fileContent(fileKey: string, format?: 'buffer' | 'base64' | 'utf8'): Promise<Buffer | string>;
|
|
269
270
|
uploadFileContent(fileKey: string, fileData: Buffer | Readable | string | Uint8Array | object, { acl, version, prettier, }?: {
|
|
@@ -276,10 +277,12 @@ declare class S3File extends S3Directory {
|
|
|
276
277
|
|
|
277
278
|
type S3StreamProps = S3FileProps & {
|
|
278
279
|
maxUploadFileSizeRestriction?: ByteUnitStringValue;
|
|
280
|
+
concurrencyVideoLimit?: number | null;
|
|
279
281
|
};
|
|
280
282
|
declare class S3Stream extends S3File {
|
|
281
283
|
private readonly maxUploadFileSizeRestriction;
|
|
282
|
-
|
|
284
|
+
private readonly s3Limiter;
|
|
285
|
+
constructor({ maxUploadFileSizeRestriction, concurrencyVideoLimit, ...props }: S3StreamProps);
|
|
283
286
|
protected streamObjectFile(fileKey: string, { Range, checkFileExists, abortSignal, }?: {
|
|
284
287
|
Range?: string;
|
|
285
288
|
checkFileExists?: boolean;
|
|
@@ -309,27 +312,30 @@ declare class S3Stream extends S3File {
|
|
|
309
312
|
headerField?: string;
|
|
310
313
|
streamTimeoutMS?: number | undefined;
|
|
311
314
|
}): Promise<(req: Request$1 & any, res: Response & any, next: NextFunction & any) => Promise<any>>;
|
|
312
|
-
streamImageFileCtrl: ({ fileKey: _fileKey, queryField, paramsField, headerField,
|
|
315
|
+
streamImageFileCtrl: ({ fileKey: _fileKey, queryField, paramsField, headerField, cachingAge: _cachingAge, }?: {
|
|
313
316
|
fileKey?: string;
|
|
314
317
|
queryField?: string;
|
|
315
318
|
paramsField?: string;
|
|
316
319
|
headerField?: string;
|
|
317
|
-
|
|
320
|
+
cachingAge?: null | number | StringValue;
|
|
318
321
|
}) => (req: Request$1 & any, res: Response & any, next: NextFunction & any) => Promise<void>;
|
|
319
|
-
|
|
322
|
+
streamBufferFileCtrl: ({ fileKey: _fileKey, filename: _filename, queryField, paramsField, headerField, cachingAge: _cachingAge, }?: {
|
|
320
323
|
fileKey?: string;
|
|
324
|
+
filename?: string;
|
|
321
325
|
queryField?: string;
|
|
322
326
|
paramsField?: string;
|
|
323
327
|
headerField?: string;
|
|
324
|
-
|
|
328
|
+
cachingAge?: null | number | StringValue;
|
|
325
329
|
}) => (req: Request$1 & any, res: Response & any, next: NextFunction & any) => Promise<void>;
|
|
326
|
-
streamFileCtrl({ fileKey: _fileKey, filename, forDownloading, paramsField, queryField, headerField, }?: {
|
|
330
|
+
streamFileCtrl({ fileKey: _fileKey, filename, forDownloading, paramsField, queryField, headerField, streamMethod, cachingAge: _cachingAge, }?: {
|
|
327
331
|
fileKey?: string;
|
|
328
332
|
filename?: string;
|
|
329
333
|
forDownloading?: boolean;
|
|
330
334
|
paramsField?: string;
|
|
331
335
|
queryField?: string;
|
|
332
336
|
headerField?: string;
|
|
337
|
+
cachingAge?: null | number | StringValue;
|
|
338
|
+
streamMethod?: 'pipe' | 'pipeline';
|
|
333
339
|
}): Promise<(req: Request$1 & any, res: Response & any, next: NextFunction & any) => Promise<void>>;
|
|
334
340
|
streamZipFileCtr({ fileKey: _fileKey, filename: _filename, queryField, paramsField, headerField, compressionLevel, }?: {
|
|
335
341
|
filename?: string;
|
|
@@ -420,4 +426,4 @@ declare class AWSConfigSharingUtil {
|
|
|
420
426
|
};
|
|
421
427
|
}
|
|
422
428
|
|
|
423
|
-
export { ACLs, AWSConfigSharingUtil, type BucketInfo, type FILE_EXT, type FILE_TYPE, IAMUtil, LambdaUtil, S3LocalstackUtil, S3Util, type S3UtilProps, SNSUtil, type TreeDirectoryItem, type TreeFileItem, type UploadedS3File };
|
|
429
|
+
export { ACLs, AWSConfigSharingUtil, type BucketInfo, type FILE_EXT, type FILE_TYPE, IAMUtil, LambdaUtil, S3LocalstackUtil, S3Util, type S3UtilProps, SNSUtil, SUPPORTED_IFRAME_EXTENSIONS, type TreeDirectoryItem, type TreeFileItem, type UploadedS3File };
|
package/dist/index.js
CHANGED
|
@@ -240,6 +240,7 @@ import archiver from "archiver";
|
|
|
240
240
|
import { Readable as Readable2 } from "stream";
|
|
241
241
|
import multerS3 from "multer-s3";
|
|
242
242
|
import multer from "multer";
|
|
243
|
+
import pLimit from "p-limit";
|
|
243
244
|
import { GetObjectCommand as GetObjectCommand2 } from "@aws-sdk/client-s3";
|
|
244
245
|
|
|
245
246
|
// src/utils/consts.ts
|
|
@@ -249,10 +250,91 @@ var ACLs = /* @__PURE__ */ ((ACLs2) => {
|
|
|
249
250
|
ACLs2["publicReadWrite"] = "public-read-write";
|
|
250
251
|
return ACLs2;
|
|
251
252
|
})(ACLs || {});
|
|
252
|
-
|
|
253
|
-
//
|
|
254
|
-
|
|
255
|
-
|
|
253
|
+
var SUPPORTED_IFRAME_EXTENSIONS = [
|
|
254
|
+
// Images
|
|
255
|
+
"jpg",
|
|
256
|
+
"jpeg",
|
|
257
|
+
"png",
|
|
258
|
+
"gif",
|
|
259
|
+
"bmp",
|
|
260
|
+
"webp",
|
|
261
|
+
"svg",
|
|
262
|
+
"ico",
|
|
263
|
+
"tif",
|
|
264
|
+
"tiff",
|
|
265
|
+
"heic",
|
|
266
|
+
"heif",
|
|
267
|
+
"raw",
|
|
268
|
+
"cr2",
|
|
269
|
+
"nef",
|
|
270
|
+
"arw",
|
|
271
|
+
// Videos
|
|
272
|
+
"mp4",
|
|
273
|
+
"avi",
|
|
274
|
+
"mov",
|
|
275
|
+
"wmv",
|
|
276
|
+
"flv",
|
|
277
|
+
"mkv",
|
|
278
|
+
"webm",
|
|
279
|
+
"mpeg",
|
|
280
|
+
"mpg",
|
|
281
|
+
"m4v",
|
|
282
|
+
"3gp",
|
|
283
|
+
"ogv",
|
|
284
|
+
"ts",
|
|
285
|
+
"mts",
|
|
286
|
+
"m2ts",
|
|
287
|
+
// Documents
|
|
288
|
+
"pdf",
|
|
289
|
+
// Text
|
|
290
|
+
"txt",
|
|
291
|
+
"csv",
|
|
292
|
+
"json",
|
|
293
|
+
"xml",
|
|
294
|
+
"md",
|
|
295
|
+
"log",
|
|
296
|
+
"yaml",
|
|
297
|
+
"yml",
|
|
298
|
+
"ini",
|
|
299
|
+
"conf",
|
|
300
|
+
"cfg",
|
|
301
|
+
// Code
|
|
302
|
+
"js",
|
|
303
|
+
"ts",
|
|
304
|
+
"jsx",
|
|
305
|
+
"tsx",
|
|
306
|
+
"py",
|
|
307
|
+
"java",
|
|
308
|
+
"c",
|
|
309
|
+
"cpp",
|
|
310
|
+
"h",
|
|
311
|
+
"cs",
|
|
312
|
+
"php",
|
|
313
|
+
"rb",
|
|
314
|
+
"go",
|
|
315
|
+
"rs",
|
|
316
|
+
"swift",
|
|
317
|
+
"kt",
|
|
318
|
+
"scala",
|
|
319
|
+
// Audio
|
|
320
|
+
"mp3",
|
|
321
|
+
"wav",
|
|
322
|
+
"ogg",
|
|
323
|
+
"flac",
|
|
324
|
+
"aac",
|
|
325
|
+
"m4a",
|
|
326
|
+
"wma",
|
|
327
|
+
"aiff",
|
|
328
|
+
"ape",
|
|
329
|
+
"opus",
|
|
330
|
+
// Web
|
|
331
|
+
"html",
|
|
332
|
+
"htm",
|
|
333
|
+
"css",
|
|
334
|
+
"scss",
|
|
335
|
+
"sass",
|
|
336
|
+
"less"
|
|
337
|
+
];
|
|
256
338
|
|
|
257
339
|
// src/utils/helpers.ts
|
|
258
340
|
import bytes from "bytes";
|
|
@@ -1099,7 +1181,7 @@ var S3File = class extends S3Directory {
|
|
|
1099
1181
|
return url;
|
|
1100
1182
|
});
|
|
1101
1183
|
}
|
|
1102
|
-
sizeOf(fileKey, unit) {
|
|
1184
|
+
sizeOf(fileKey, unit = "b") {
|
|
1103
1185
|
return __async(this, null, function* () {
|
|
1104
1186
|
var _a2, _b, _c;
|
|
1105
1187
|
const normalizedKey = getNormalizedPath(fileKey);
|
|
@@ -1221,15 +1303,16 @@ var S3File = class extends S3Directory {
|
|
|
1221
1303
|
var pump = promisify(pipeline);
|
|
1222
1304
|
var S3Stream = class _S3Stream extends S3File {
|
|
1223
1305
|
constructor(_a2) {
|
|
1224
|
-
var _b = _a2, { maxUploadFileSizeRestriction = "10GB" } = _b, props = __objRest(_b, ["maxUploadFileSizeRestriction"]);
|
|
1306
|
+
var _b = _a2, { maxUploadFileSizeRestriction = "10GB", concurrencyVideoLimit = 0 } = _b, props = __objRest(_b, ["maxUploadFileSizeRestriction", "concurrencyVideoLimit"]);
|
|
1225
1307
|
super(props);
|
|
1226
1308
|
__publicField(this, "maxUploadFileSizeRestriction");
|
|
1309
|
+
__publicField(this, "s3Limiter");
|
|
1227
1310
|
__publicField(this, "streamImageFileCtrl", ({
|
|
1228
1311
|
fileKey: _fileKey,
|
|
1229
1312
|
queryField = "file",
|
|
1230
1313
|
paramsField = "file",
|
|
1231
1314
|
headerField = "x-fileKey",
|
|
1232
|
-
|
|
1315
|
+
cachingAge: _cachingAge = "1y"
|
|
1233
1316
|
} = {}) => {
|
|
1234
1317
|
return (req, res, next) => __async(this, null, function* () {
|
|
1235
1318
|
var _a2, _b, _c, _d, _e, _f, _g, _h;
|
|
@@ -1254,8 +1337,10 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1254
1337
|
const contentType = mimeTypeMap[ext] || "application/octet-stream";
|
|
1255
1338
|
res.setHeader("Content-Type", contentType);
|
|
1256
1339
|
res.setHeader("Content-Length", imageBuffer.length);
|
|
1257
|
-
const cachingAge = !
|
|
1258
|
-
if (cachingAge)
|
|
1340
|
+
const cachingAge = !_cachingAge || typeof _cachingAge === "number" ? _cachingAge : getTotalSeconds(_cachingAge);
|
|
1341
|
+
if (cachingAge) {
|
|
1342
|
+
res.setHeader("Cache-Control", `public, max-age=${cachingAge}`);
|
|
1343
|
+
}
|
|
1259
1344
|
res.status(200).send(imageBuffer);
|
|
1260
1345
|
} catch (error) {
|
|
1261
1346
|
(_h = this.logger) == null ? void 0 : _h.warn(req.id, "image fileKey not found", __spreadValues({
|
|
@@ -1265,19 +1350,20 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1265
1350
|
}
|
|
1266
1351
|
});
|
|
1267
1352
|
});
|
|
1268
|
-
__publicField(this, "
|
|
1353
|
+
__publicField(this, "streamBufferFileCtrl", ({
|
|
1269
1354
|
fileKey: _fileKey,
|
|
1355
|
+
filename: _filename,
|
|
1270
1356
|
queryField = "file",
|
|
1271
1357
|
paramsField = "file",
|
|
1272
1358
|
headerField = "x-fileKey",
|
|
1273
|
-
|
|
1359
|
+
cachingAge: _cachingAge = "1h"
|
|
1274
1360
|
} = {}) => {
|
|
1275
1361
|
return (req, res, next) => __async(this, null, function* () {
|
|
1276
1362
|
var _a2, _b, _c, _d, _e, _f, _g, _h;
|
|
1277
1363
|
let fileKey = _fileKey || (((_a2 = req.params) == null ? void 0 : _a2[paramsField]) ? decodeURIComponent((_b = req.params) == null ? void 0 : _b[paramsField]) : void 0) || (((_c = req.query) == null ? void 0 : _c[queryField]) ? decodeURIComponent((_d = req.query) == null ? void 0 : _d[queryField]) : void 0) || (((_e = req.headers) == null ? void 0 : _e[headerField]) ? decodeURIComponent((_f = req.headers) == null ? void 0 : _f[headerField]) : void 0);
|
|
1278
1364
|
if (!fileKey) {
|
|
1279
|
-
(_g = this.logger) == null ? void 0 : _g.warn(req.id, "
|
|
1280
|
-
next(Error("
|
|
1365
|
+
(_g = this.logger) == null ? void 0 : _g.warn(req.id, "iframe fileKey is required");
|
|
1366
|
+
next(Error("iframe fileKey is required"));
|
|
1281
1367
|
return;
|
|
1282
1368
|
}
|
|
1283
1369
|
try {
|
|
@@ -1294,12 +1380,14 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1294
1380
|
pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation"
|
|
1295
1381
|
};
|
|
1296
1382
|
const contentType = mimeTypeMap[ext] || "application/octet-stream";
|
|
1297
|
-
const filename = basename2(fileKey);
|
|
1383
|
+
const filename = _filename || basename2(fileKey);
|
|
1298
1384
|
res.setHeader("Content-Type", contentType);
|
|
1299
1385
|
res.setHeader("Content-Disposition", `inline; filename="${encodeURIComponent(filename)}"`);
|
|
1300
|
-
res.setHeader("Content-Length", fileBuffer.length);
|
|
1301
|
-
const cachingAge = !
|
|
1302
|
-
|
|
1386
|
+
res.setHeader("Content-Length", String(fileBuffer.length));
|
|
1387
|
+
const cachingAge = !_cachingAge || typeof _cachingAge === "number" ? _cachingAge : getTotalSeconds(_cachingAge);
|
|
1388
|
+
if (cachingAge) {
|
|
1389
|
+
res.setHeader("Cache-Control", `public, max-age=${cachingAge}`);
|
|
1390
|
+
}
|
|
1303
1391
|
res.status(200).send(fileBuffer);
|
|
1304
1392
|
} catch (error) {
|
|
1305
1393
|
(_h = this.logger) == null ? void 0 : _h.warn(req.id, "pdf fileKey not found", __spreadValues({
|
|
@@ -1310,6 +1398,7 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1310
1398
|
});
|
|
1311
1399
|
});
|
|
1312
1400
|
this.maxUploadFileSizeRestriction = maxUploadFileSizeRestriction;
|
|
1401
|
+
this.s3Limiter = concurrencyVideoLimit ? pLimit(concurrencyVideoLimit) : null;
|
|
1313
1402
|
}
|
|
1314
1403
|
streamObjectFile(_0) {
|
|
1315
1404
|
return __async(this, arguments, function* (fileKey, {
|
|
@@ -1347,7 +1436,7 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1347
1436
|
Bucket: this.bucket,
|
|
1348
1437
|
Key: normalizedKey
|
|
1349
1438
|
}, Range ? { Range } : {}));
|
|
1350
|
-
const data = yield s3Limiter(() => this.execute(cmd, { abortSignal }));
|
|
1439
|
+
const data = this.s3Limiter ? yield this.s3Limiter(() => this.execute(cmd, { abortSignal })) : yield this.execute(cmd, { abortSignal });
|
|
1351
1440
|
const body = data.Body;
|
|
1352
1441
|
if (!body) return null;
|
|
1353
1442
|
return {
|
|
@@ -1502,11 +1591,13 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1502
1591
|
forDownloading = false,
|
|
1503
1592
|
paramsField = "file",
|
|
1504
1593
|
queryField = "file",
|
|
1505
|
-
headerField = "x-fileKey"
|
|
1594
|
+
headerField = "x-fileKey",
|
|
1595
|
+
streamMethod,
|
|
1596
|
+
cachingAge: _cachingAge = "1h"
|
|
1506
1597
|
} = {}) {
|
|
1507
1598
|
return (req, res, next) => __async(this, null, function* () {
|
|
1508
1599
|
var _a2, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
1509
|
-
|
|
1600
|
+
const fileKey = _fileKey || (((_a2 = req.params) == null ? void 0 : _a2[paramsField]) ? (_b = req.params) == null ? void 0 : _b[paramsField] : void 0) || (((_c = req.query) == null ? void 0 : _c[queryField]) ? (_d = req.query) == null ? void 0 : _d[queryField] : void 0) || (((_e = req.headers) == null ? void 0 : _e[headerField]) ? decodeURIComponent((_f = req.headers) == null ? void 0 : _f[headerField]) : void 0);
|
|
1510
1601
|
if (!fileKey || fileKey === "/") {
|
|
1511
1602
|
(_g = this.logger) == null ? void 0 : _g.warn(req.id, "fileKey stream is required");
|
|
1512
1603
|
next(Error("fileKey stream is required"));
|
|
@@ -1540,13 +1631,23 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1540
1631
|
}
|
|
1541
1632
|
const fileInfo = yield this.fileInfo(normalizedKey);
|
|
1542
1633
|
const fileName = filename || normalizedKey.split("/").pop() || "download";
|
|
1543
|
-
|
|
1544
|
-
|
|
1545
|
-
|
|
1546
|
-
|
|
1634
|
+
const contentType = fileInfo.ContentType || "application/octet-stream";
|
|
1635
|
+
const ext = extname(fileKey).slice(1).toLowerCase();
|
|
1636
|
+
const inlineTypes = ["text/", "image/", "application/pdf", "video/", "audio/"];
|
|
1637
|
+
const canDisplayInline = SUPPORTED_IFRAME_EXTENSIONS.includes(ext) || inlineTypes.some((type) => contentType.startsWith(type));
|
|
1638
|
+
res.setHeader("Content-Type", contentType);
|
|
1547
1639
|
if (fileInfo.ContentLength) {
|
|
1548
1640
|
res.setHeader("Content-Length", String(fileInfo.ContentLength));
|
|
1549
1641
|
}
|
|
1642
|
+
if (forDownloading || !canDisplayInline) {
|
|
1643
|
+
res.setHeader("Content-Disposition", `attachment; filename="${encodeURIComponent(fileName)}"`);
|
|
1644
|
+
} else {
|
|
1645
|
+
res.setHeader("Content-Disposition", `inline; filename="${encodeURIComponent(fileName)}"`);
|
|
1646
|
+
}
|
|
1647
|
+
const cachingAge = !_cachingAge || typeof _cachingAge === "number" ? _cachingAge : getTotalSeconds(_cachingAge);
|
|
1648
|
+
if (cachingAge) {
|
|
1649
|
+
res.setHeader("Cache-Control", `public, max-age=${cachingAge}`);
|
|
1650
|
+
}
|
|
1550
1651
|
stream.on("error", (err) => {
|
|
1551
1652
|
var _a3, _b2;
|
|
1552
1653
|
(_a3 = this.logger) == null ? void 0 : _a3.warn(this.reqId, "Stream error", { fileKey: normalizedKey, error: err });
|
|
@@ -1558,7 +1659,12 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1558
1659
|
(_a3 = stream == null ? void 0 : stream.destroy) == null ? void 0 : _a3.call(stream);
|
|
1559
1660
|
req.off("close", onClose);
|
|
1560
1661
|
});
|
|
1561
|
-
|
|
1662
|
+
streamMethod || (streamMethod = canDisplayInline ? "pipe" : "pipeline");
|
|
1663
|
+
if (streamMethod === "pipeline") {
|
|
1664
|
+
yield pump(stream, res);
|
|
1665
|
+
} else {
|
|
1666
|
+
stream.pipe(res);
|
|
1667
|
+
}
|
|
1562
1668
|
req.off("close", onClose);
|
|
1563
1669
|
} catch (error) {
|
|
1564
1670
|
abort.abort();
|
|
@@ -1774,6 +1880,7 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1774
1880
|
}),
|
|
1775
1881
|
key: (req, file, cb) => __async(this, null, function* () {
|
|
1776
1882
|
let filename;
|
|
1883
|
+
file.originalname = decodeURIComponent(file.originalname);
|
|
1777
1884
|
if (typeof _filename === "function") {
|
|
1778
1885
|
filename = yield _filename(req, file);
|
|
1779
1886
|
} else if (_filename) {
|
|
@@ -1885,6 +1992,14 @@ var S3Stream = class _S3Stream extends S3File {
|
|
|
1885
1992
|
* Middleware for uploading multiple files with different field names
|
|
1886
1993
|
* Adds the uploaded files info to req.s3FilesByField
|
|
1887
1994
|
*/
|
|
1995
|
+
/*
|
|
1996
|
+
example
|
|
1997
|
+
uploadFieldsFiles([
|
|
1998
|
+
{ name: 'cardPosterSrc', maxCount: 1 },
|
|
1999
|
+
{ name: 'sectionPosterSrc', maxCount: 1 },
|
|
2000
|
+
{ name: 'imageSrc', maxCount: 1 },
|
|
2001
|
+
]) as any,
|
|
2002
|
+
*/
|
|
1888
2003
|
// uploadFieldsFiles(
|
|
1889
2004
|
// fields: Array<{ name: string; directory: string; maxCount?: number; options?: S3UploadOptions }>
|
|
1890
2005
|
// ): RequestHandler {
|
|
@@ -2071,5 +2186,6 @@ export {
|
|
|
2071
2186
|
LambdaUtil,
|
|
2072
2187
|
S3LocalstackUtil,
|
|
2073
2188
|
S3Util,
|
|
2074
|
-
SNSUtil
|
|
2189
|
+
SNSUtil,
|
|
2190
|
+
SUPPORTED_IFRAME_EXTENSIONS
|
|
2075
2191
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hdriel/aws-utils",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.2",
|
|
4
4
|
"description": "Simplified AWS SDK (v3) utilities for S3 (upload, download, streaming) with TypeScript support",
|
|
5
5
|
"author": "Hadriel Benjo (https://github.com/hdriel)",
|
|
6
6
|
"type": "module",
|