@hdriel/aws-utils 1.0.4 → 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -24,6 +24,18 @@ var __spreadValues = (a, b) => {
24
24
  return a;
25
25
  };
26
26
  var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
27
+ var __objRest = (source, exclude) => {
28
+ var target = {};
29
+ for (var prop in source)
30
+ if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
31
+ target[prop] = source[prop];
32
+ if (source != null && __getOwnPropSymbols)
33
+ for (var prop of __getOwnPropSymbols(source)) {
34
+ if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
35
+ target[prop] = source[prop];
36
+ }
37
+ return target;
38
+ };
27
39
  var __export = (target, all) => {
28
40
  for (var name in all)
29
41
  __defProp(target, name, { get: all[name], enumerable: true });
@@ -75,7 +87,8 @@ __export(index_exports, {
75
87
  AWSConfigSharingUtil: () => AWSConfigSharingUtil,
76
88
  IAMUtil: () => IAMUtil,
77
89
  LambdaUtil: () => LambdaUtil,
78
- S3BucketUtil: () => S3BucketUtil,
90
+ S3LocalstackUtil: () => S3LocalstackUtil,
91
+ S3Util: () => S3Util,
79
92
  SNSUtil: () => SNSUtil
80
93
  });
81
94
  module.exports = __toCommonJS(index_exports);
@@ -254,20 +267,19 @@ var LambdaUtil = class {
254
267
  }
255
268
  };
256
269
 
257
- // src/aws/s3-bucket.ts
258
- var import_ms = __toESM(require("ms"), 1);
270
+ // src/aws/s3/s3-util.localstack.ts
271
+ var import_client_s36 = require("@aws-sdk/client-s3");
272
+
273
+ // src/aws/s3/s3-stream.ts
259
274
  var import_pathe = __toESM(require("pathe"), 1);
260
- var import_http = __toESM(require("http"), 1);
261
- var import_https = __toESM(require("https"), 1);
262
275
  var import_stream = require("stream");
263
276
  var import_util = require("util");
264
- var import_lib_storage = require("@aws-sdk/lib-storage");
265
- var import_s3_request_presigner = require("@aws-sdk/s3-request-presigner");
266
- var import_node_http_handler = require("@smithy/node-http-handler");
267
- var import_buffer = require("buffer");
277
+ var import_buffer2 = require("buffer");
268
278
  var import_archiver = __toESM(require("archiver"), 1);
269
- var import_node_stream = require("stream");
270
- var import_client_s3 = require("@aws-sdk/client-s3");
279
+ var import_node_stream2 = require("stream");
280
+ var import_multer_s3 = __toESM(require("multer-s3"), 1);
281
+ var import_multer = __toESM(require("multer"), 1);
282
+ var import_client_s35 = require("@aws-sdk/client-s3");
271
283
 
272
284
  // src/utils/consts.ts
273
285
  var ACLs = /* @__PURE__ */ ((ACLs2) => {
@@ -281,11 +293,8 @@ var ACLs = /* @__PURE__ */ ((ACLs2) => {
281
293
  var import_p_limit = __toESM(require("p-limit"), 1);
282
294
  var s3Limiter = (0, import_p_limit.default)(4);
283
295
 
284
- // src/aws/s3-bucket.ts
285
- var import_multer = __toESM(require("multer"), 1);
286
- var import_multer_s3 = __toESM(require("multer-s3"), 1);
296
+ // src/utils/helpers.ts
287
297
  var import_bytes = __toESM(require("bytes"), 1);
288
- var pump = (0, import_util.promisify)(import_stream.pipeline);
289
298
  var parseRangeHeader = (range, contentLength, chunkSize) => {
290
299
  if (!range || !range.startsWith("bytes=")) return null;
291
300
  const rangeParts = range.replace("bytes=", "").split("-");
@@ -298,34 +307,60 @@ var parseRangeHeader = (range, contentLength, chunkSize) => {
298
307
  }
299
308
  return [start, Math.min(end, end)];
300
309
  };
301
- var getNormalizedPath = (directoryPath) => decodeURIComponent((directoryPath == null ? void 0 : directoryPath.replace(/^\/+/, "").replace(/\/+$/, "")) || "");
302
- var S3BucketUtil = class _S3BucketUtil {
310
+ var getNormalizedPath = (directoryPath) => {
311
+ return decodeURIComponent((directoryPath == null ? void 0 : directoryPath.trim().replace(/^\/+/, "").replace(/\/+$/, "").replace(/\/+/g, "/")) || "");
312
+ };
313
+ var getFileSize = (maxFileSize, defaultMaxFileSize) => {
314
+ var _a2;
315
+ const fileSizeUnitValue = (_a2 = maxFileSize != null ? maxFileSize : defaultMaxFileSize) != null ? _a2 : "";
316
+ const fileSize = typeof fileSizeUnitValue === "number" ? fileSizeUnitValue : (0, import_bytes.default)(fileSizeUnitValue);
317
+ return fileSize != null ? fileSize : void 0;
318
+ };
319
+
320
+ // src/aws/s3/s3-file.ts
321
+ var import_buffer = require("buffer");
322
+ var import_node_stream = require("stream");
323
+ var import_ms = __toESM(require("ms"), 1);
324
+ var import_lib_storage = require("@aws-sdk/lib-storage");
325
+ var import_s3_request_presigner = require("@aws-sdk/s3-request-presigner");
326
+ var import_client_s34 = require("@aws-sdk/client-s3");
327
+
328
+ // src/aws/s3/s3-directory.ts
329
+ var import_client_s33 = require("@aws-sdk/client-s3");
330
+
331
+ // src/aws/s3/s3-bucket.ts
332
+ var import_client_s32 = require("@aws-sdk/client-s3");
333
+
334
+ // src/aws/s3/s3-base.ts
335
+ var import_http = __toESM(require("http"), 1);
336
+ var import_https = __toESM(require("https"), 1);
337
+ var import_node_http_handler = require("@smithy/node-http-handler");
338
+ var import_client_s3 = require("@aws-sdk/client-s3");
339
+ var S3Base = class {
303
340
  constructor({
304
341
  logger: logger2,
305
- bucket,
306
342
  reqId,
307
343
  accessKeyId = AWSConfigSharingUtil.accessKeyId,
308
344
  secretAccessKey = AWSConfigSharingUtil.secretAccessKey,
309
345
  endpoint = AWSConfigSharingUtil.endpoint,
310
346
  region = AWSConfigSharingUtil.region,
311
347
  s3ForcePathStyle = true,
312
- maxUploadFileSizeRestriction = "10GB"
348
+ // @ts-ignore
349
+ localstack = false
313
350
  }) {
314
351
  __publicField(this, "s3Client");
315
- __publicField(this, "bucket");
316
352
  __publicField(this, "endpoint");
317
353
  __publicField(this, "region");
318
354
  __publicField(this, "logger");
319
355
  __publicField(this, "reqId");
320
- __publicField(this, "maxUploadFileSizeRestriction");
356
+ __publicField(this, "localstack", false);
321
357
  const credentials = { accessKeyId, secretAccessKey };
322
358
  const options = __spreadValues(__spreadValues(__spreadValues({}, accessKeyId && secretAccessKey && { credentials }), endpoint && { endpoint }), region && { region });
323
359
  this.endpoint = endpoint;
324
360
  this.region = region;
325
- this.bucket = bucket;
326
361
  this.logger = logger2;
327
362
  this.reqId = reqId != null ? reqId : null;
328
- this.maxUploadFileSizeRestriction = maxUploadFileSizeRestriction;
363
+ this.localstack = localstack;
329
364
  const s3ClientParams = __spreadProps(__spreadValues(__spreadValues({}, options), s3ForcePathStyle && { forcePathStyle: s3ForcePathStyle }), {
330
365
  requestHandler: new import_node_http_handler.NodeHttpHandler({
331
366
  httpAgent: new import_http.default.Agent({ keepAlive: true, maxSockets: 300 }),
@@ -336,18 +371,40 @@ var S3BucketUtil = class _S3BucketUtil {
336
371
  });
337
372
  this.s3Client = new import_client_s3.S3Client(s3ClientParams);
338
373
  }
339
- get link() {
340
- return this.endpoint === "http://localhost:4566" ? `${this.endpoint}/${this.bucket}/` : `https://s3.${this.region}.amazonaws.com/${this.bucket}/`;
341
- }
342
374
  execute(command, options) {
343
375
  return __async(this, null, function* () {
344
376
  return this.s3Client.send(command, options);
345
377
  });
346
378
  }
347
- // ##### BUCKET BLOCK ##########################
379
+ };
380
+
381
+ // src/aws/s3/s3-bucket.ts
382
+ var S3Bucket = class extends S3Base {
383
+ constructor(_a2) {
384
+ var _b = _a2, { bucket } = _b, props = __objRest(_b, ["bucket"]);
385
+ super(props);
386
+ __publicField(this, "_bucket");
387
+ __publicField(this, "initializedBucket", "");
388
+ this._bucket = decodeURIComponent(bucket);
389
+ }
390
+ get link() {
391
+ return this.endpoint === "http://localhost:4566" ? `${this.endpoint}/${this.bucket}/` : `https://s3.${this.region}.amazonaws.com/${this.bucket}/`;
392
+ }
393
+ get bucket() {
394
+ return this._bucket;
395
+ }
396
+ changeBucket(bucket) {
397
+ this._bucket = decodeURIComponent(bucket);
398
+ this.initializedBucket = "";
399
+ }
348
400
  getBucketList() {
349
- return __async(this, arguments, function* (options = {}, includePublicAccess = false) {
350
- const command = new import_client_s3.ListBucketsCommand(options);
401
+ return __async(this, arguments, function* (_c = {}) {
402
+ var _d = _c, {
403
+ includePublicAccess
404
+ } = _d, options = __objRest(_d, [
405
+ "includePublicAccess"
406
+ ]);
407
+ const command = new import_client_s32.ListBucketsCommand(options);
351
408
  const response = yield this.execute(command);
352
409
  const responseData = (response == null ? void 0 : response.Buckets) || null;
353
410
  if (!responseData) return null;
@@ -355,7 +412,7 @@ var S3BucketUtil = class _S3BucketUtil {
355
412
  yield Promise.allSettled(
356
413
  responseData.map((data) => __async(this, null, function* () {
357
414
  const result = yield this.execute(
358
- new import_client_s3.GetPublicAccessBlockCommand({ Bucket: data.Name })
415
+ new import_client_s32.GetPublicAccessBlockCommand({ Bucket: data.Name })
359
416
  );
360
417
  data.PublicAccessBlockConfiguration = result.PublicAccessBlockConfiguration;
361
418
  }))
@@ -369,7 +426,7 @@ var S3BucketUtil = class _S3BucketUtil {
369
426
  var _a2, _b;
370
427
  const bucketName = this.bucket;
371
428
  try {
372
- yield this.execute(new import_client_s3.HeadBucketCommand({ Bucket: bucketName }));
429
+ yield this.execute(new import_client_s32.HeadBucketCommand({ Bucket: bucketName }));
373
430
  return true;
374
431
  } catch (err) {
375
432
  if (err.name !== "NotFound" && ((_a2 = err.$metadata) == null ? void 0 : _a2.httpStatusCode) !== 404) {
@@ -390,9 +447,9 @@ var S3BucketUtil = class _S3BucketUtil {
390
447
  (_a2 = this.logger) == null ? void 0 : _a2.info(this.reqId, `Bucket already exists.`, { bucketName });
391
448
  return;
392
449
  }
393
- const data = yield this.execute(new import_client_s3.CreateBucketCommand({ Bucket: bucketName }));
450
+ const data = yield this.execute(new import_client_s32.CreateBucketCommand({ Bucket: bucketName }));
394
451
  CREATE_PUBLICK_ACCESS_BLOCK: {
395
- const command = new import_client_s3.PutPublicAccessBlockCommand({
452
+ const command = new import_client_s32.PutPublicAccessBlockCommand({
396
453
  Bucket: bucketName,
397
454
  PublicAccessBlockConfiguration: {
398
455
  BlockPublicAcls: false,
@@ -416,7 +473,7 @@ var S3BucketUtil = class _S3BucketUtil {
416
473
  }
417
474
  ]
418
475
  };
419
- const command = new import_client_s3.PutBucketPolicyCommand({ Bucket: bucketName, Policy: JSON.stringify(policy) });
476
+ const command = new import_client_s32.PutBucketPolicyCommand({ Bucket: bucketName, Policy: JSON.stringify(policy) });
420
477
  yield this.execute(command);
421
478
  }
422
479
  (_b = this.logger) == null ? void 0 : _b.info(this.reqId, `Public bucket created successfully.`, { bucketName });
@@ -437,21 +494,28 @@ var S3BucketUtil = class _S3BucketUtil {
437
494
  }, includeConstraintLocation && {
438
495
  CreateBucketConfiguration: { LocationConstraint: this.region }
439
496
  });
440
- const data = yield this.execute(new import_client_s3.CreateBucketCommand(createParams));
497
+ const data = yield this.execute(new import_client_s32.CreateBucketCommand(createParams));
441
498
  (_b = this.logger) == null ? void 0 : _b.info(this.reqId, `Private bucket created successfully.`, { bucketName });
442
499
  return data;
443
500
  });
444
501
  }
445
502
  initBucket() {
446
- return __async(this, arguments, function* (acl = "private" /* private */, includeConstraintLocation = false) {
503
+ return __async(this, arguments, function* (acl = "private" /* private */, {
504
+ includeConstraintLocation = false,
505
+ skipInitializedBucket = false
506
+ } = {}) {
447
507
  var _a2;
448
508
  const bucketName = this.bucket;
509
+ if (skipInitializedBucket && this.initializedBucket === bucketName) {
510
+ return;
511
+ }
449
512
  const isExists = yield this.isBucketExists();
450
513
  if (isExists) {
451
514
  (_a2 = this.logger) == null ? void 0 : _a2.info(this.reqId, `Bucket already exists.`, { bucketName });
452
515
  return;
453
516
  }
454
517
  const data = acl === "private" /* private */ ? yield this.initAsPrivateBucket(includeConstraintLocation) : yield this.initAsPublicBucket();
518
+ this.initializedBucket = bucketName;
455
519
  return data;
456
520
  });
457
521
  }
@@ -460,14 +524,14 @@ var S3BucketUtil = class _S3BucketUtil {
460
524
  let ContinuationToken = void 0;
461
525
  do {
462
526
  const listResp = yield this.execute(
463
- new import_client_s3.ListObjectsV2Command({
527
+ new import_client_s32.ListObjectsV2Command({
464
528
  Bucket: this.bucket,
465
529
  ContinuationToken
466
530
  })
467
531
  );
468
532
  if (listResp.Contents && listResp.Contents.length > 0) {
469
533
  yield this.execute(
470
- new import_client_s3.DeleteObjectsCommand({
534
+ new import_client_s32.DeleteObjectsCommand({
471
535
  Bucket: this.bucket,
472
536
  Delete: {
473
537
  Objects: listResp.Contents.map((obj) => ({ Key: obj.Key }))
@@ -492,7 +556,7 @@ var S3BucketUtil = class _S3BucketUtil {
492
556
  try {
493
557
  try {
494
558
  const headBucketResponse = yield this.execute(
495
- new import_client_s3.HeadBucketCommand(__spreadValues({ Bucket: bucketName }, options))
559
+ new import_client_s32.HeadBucketCommand(__spreadValues({ Bucket: bucketName }, options))
496
560
  );
497
561
  (_a2 = this.logger) == null ? void 0 : _a2.debug("bucketInfo", "HeadBucketCommandOutput", headBucketResponse);
498
562
  info.exists = true;
@@ -516,7 +580,7 @@ var S3BucketUtil = class _S3BucketUtil {
516
580
  }
517
581
  try {
518
582
  const aclResponse = yield this.execute(
519
- new import_client_s3.GetBucketAclCommand({ Bucket: bucketName })
583
+ new import_client_s32.GetBucketAclCommand({ Bucket: bucketName })
520
584
  );
521
585
  (_e = this.logger) == null ? void 0 : _e.debug("bucketInfo", "GetBucketAclCommandOutput", aclResponse);
522
586
  info.acl = (_f = aclResponse.Grants) == null ? void 0 : _f.map((grant) => {
@@ -531,7 +595,7 @@ var S3BucketUtil = class _S3BucketUtil {
531
595
  }
532
596
  try {
533
597
  const publicAccessResponse = yield this.execute(
534
- new import_client_s3.GetPublicAccessBlockCommand({ Bucket: bucketName })
598
+ new import_client_s32.GetPublicAccessBlockCommand({ Bucket: bucketName })
535
599
  );
536
600
  (_h = this.logger) == null ? void 0 : _h.debug("bucketInfo", "GetPublicAccessBlockCommandOutput", publicAccessResponse);
537
601
  info.publicAccessBlock = publicAccessResponse.PublicAccessBlockConfiguration;
@@ -542,7 +606,7 @@ var S3BucketUtil = class _S3BucketUtil {
542
606
  }
543
607
  try {
544
608
  const policyResponse = yield this.execute(
545
- new import_client_s3.GetBucketPolicyCommand({ Bucket: bucketName })
609
+ new import_client_s32.GetBucketPolicyCommand({ Bucket: bucketName })
546
610
  );
547
611
  (_j = this.logger) == null ? void 0 : _j.debug("bucketInfo", "GetBucketPolicyCommandOutput", policyResponse);
548
612
  if (policyResponse.Policy) {
@@ -555,7 +619,7 @@ var S3BucketUtil = class _S3BucketUtil {
555
619
  }
556
620
  try {
557
621
  const versioningResponse = yield this.execute(
558
- new import_client_s3.GetBucketVersioningCommand({ Bucket: bucketName })
622
+ new import_client_s32.GetBucketVersioningCommand({ Bucket: bucketName })
559
623
  );
560
624
  (_l = this.logger) == null ? void 0 : _l.debug("bucketInfo", "GetBucketVersioningCommandOutput", versioningResponse);
561
625
  info.versioning = versioningResponse.Status || "Disabled";
@@ -564,7 +628,7 @@ var S3BucketUtil = class _S3BucketUtil {
564
628
  }
565
629
  try {
566
630
  const encryptionResponse = yield this.execute(
567
- new import_client_s3.GetBucketEncryptionCommand({ Bucket: bucketName })
631
+ new import_client_s32.GetBucketEncryptionCommand({ Bucket: bucketName })
568
632
  );
569
633
  (_n = this.logger) == null ? void 0 : _n.debug("bucketInfo", "GetBucketEncryptionCommandOutput", encryptionResponse);
570
634
  info.encryption = {
@@ -600,21 +664,46 @@ var S3BucketUtil = class _S3BucketUtil {
600
664
  yield this.emptyBucket();
601
665
  }
602
666
  const createParams = { Bucket: bucketName };
603
- const data = yield this.execute(new import_client_s3.DeleteBucketCommand(createParams));
667
+ const data = yield this.execute(new import_client_s32.DeleteBucketCommand(createParams));
604
668
  return data;
605
669
  });
606
670
  }
607
- // ##### DIRECTORY BLOCK ##########################
671
+ };
672
+
673
+ // src/aws/s3/s3-directory.ts
674
+ var S3Directory = class extends S3Bucket {
675
+ constructor(props) {
676
+ super(props);
677
+ }
678
+ // todo: checked!
679
+ directoryExists(directoryPath) {
680
+ return __async(this, null, function* () {
681
+ var _a2;
682
+ try {
683
+ const normalizedKey = getNormalizedPath(directoryPath);
684
+ if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
685
+ const command = new import_client_s33.HeadObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
686
+ yield this.execute(command);
687
+ return true;
688
+ } catch (error) {
689
+ if (error.name === "NotFound" || ((_a2 = error.$metadata) == null ? void 0 : _a2.httpStatusCode) === 404) {
690
+ return false;
691
+ }
692
+ throw error;
693
+ }
694
+ });
695
+ }
696
+ // todo: checked!
608
697
  createDirectory(directoryPath) {
609
698
  return __async(this, null, function* () {
610
699
  let normalizedPath = getNormalizedPath(directoryPath);
611
- if (!normalizedPath) throw new Error("No directory path provided");
612
- if (normalizedPath === "/") normalizedPath = "";
613
- const command = new import_client_s3.PutObjectCommand({ Bucket: this.bucket, Key: `${normalizedPath}/` });
700
+ if (!normalizedPath || normalizedPath === "/") throw new Error("No directory path provided");
701
+ const command = new import_client_s33.PutObjectCommand({ Bucket: this.bucket, Key: `${normalizedPath}/` });
614
702
  const result = yield this.execute(command);
615
703
  return result;
616
704
  });
617
705
  }
706
+ // todo: checked!
618
707
  deleteDirectory(directoryPath) {
619
708
  return __async(this, null, function* () {
620
709
  var _a2, _b, _c, _d, _e, _f, _g;
@@ -625,7 +714,7 @@ var S3BucketUtil = class _S3BucketUtil {
625
714
  let ContinuationToken = void 0;
626
715
  do {
627
716
  const listResp = yield this.execute(
628
- new import_client_s3.ListObjectsV2Command({
717
+ new import_client_s33.ListObjectsV2Command({
629
718
  Bucket: this.bucket,
630
719
  Prefix: normalizedPath,
631
720
  ContinuationToken
@@ -633,7 +722,7 @@ var S3BucketUtil = class _S3BucketUtil {
633
722
  );
634
723
  if (listResp.Contents && listResp.Contents.length > 0) {
635
724
  const deleteResult = yield this.execute(
636
- new import_client_s3.DeleteObjectsCommand({
725
+ new import_client_s33.DeleteObjectsCommand({
637
726
  Bucket: this.bucket,
638
727
  Delete: {
639
728
  Objects: listResp.Contents.map((obj) => ({ Key: obj.Key })),
@@ -652,7 +741,7 @@ var S3BucketUtil = class _S3BucketUtil {
652
741
  ContinuationToken = listResp.NextContinuationToken;
653
742
  } while (ContinuationToken);
654
743
  if (totalDeletedCount === 0) {
655
- const directoryExists = yield this.fileExists(normalizedPath);
744
+ const directoryExists = yield this.directoryExists(normalizedPath);
656
745
  if (!directoryExists) {
657
746
  (_d = this.logger) == null ? void 0 : _d.debug(this.reqId, `Directory not found`, { directoryPath: normalizedPath });
658
747
  return null;
@@ -660,7 +749,7 @@ var S3BucketUtil = class _S3BucketUtil {
660
749
  }
661
750
  try {
662
751
  yield this.execute(
663
- new import_client_s3.DeleteObjectCommand({
752
+ new import_client_s33.DeleteObjectCommand({
664
753
  Bucket: this.bucket,
665
754
  Key: normalizedPath
666
755
  })
@@ -684,34 +773,64 @@ var S3BucketUtil = class _S3BucketUtil {
684
773
  };
685
774
  });
686
775
  }
776
+ // todo: checked!
687
777
  directoryList(directoryPath) {
688
778
  return __async(this, null, function* () {
779
+ var _a2;
689
780
  let normalizedPath = getNormalizedPath(directoryPath);
690
781
  if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
691
- else normalizedPath = "";
692
- const result = yield this.execute(
693
- new import_client_s3.ListObjectsV2Command({
694
- Bucket: this.bucket,
695
- Prefix: normalizedPath,
696
- Delimiter: "/"
697
- })
698
- );
782
+ else normalizedPath = this.localstack ? "" : "/";
783
+ let result;
784
+ if (normalizedPath === "") {
785
+ const [fileResponse, { CommonPrefixes }] = yield Promise.all([
786
+ this.execute(
787
+ new import_client_s33.ListObjectsV2Command({
788
+ Bucket: this.bucket,
789
+ Prefix: "/",
790
+ Delimiter: "/"
791
+ })
792
+ ),
793
+ yield this.execute(
794
+ new import_client_s33.ListObjectsV2Command({
795
+ Bucket: this.bucket,
796
+ Prefix: "",
797
+ Delimiter: "/"
798
+ })
799
+ )
800
+ ]);
801
+ result = fileResponse;
802
+ result.CommonPrefixes = CommonPrefixes;
803
+ } else {
804
+ result = yield this.execute(
805
+ new import_client_s33.ListObjectsV2Command({
806
+ Bucket: this.bucket,
807
+ Prefix: normalizedPath,
808
+ Delimiter: "/"
809
+ })
810
+ );
811
+ }
812
+ (_a2 = this.logger) == null ? void 0 : _a2.debug(null, "#### directoryList", {
813
+ normalizedPath,
814
+ CommonPrefixes: result.CommonPrefixes,
815
+ ContentFile: result.Contents
816
+ });
699
817
  const directories = (result.CommonPrefixes || []).map((prefix) => prefix.Prefix).map((prefix) => {
700
818
  const relativePath = prefix.replace(normalizedPath, "");
701
819
  const dir = relativePath.replace(/\/$/, "");
702
820
  return dir;
703
821
  }).filter((dir) => dir);
704
822
  const files = (result.Contents || []).filter((content) => {
705
- var _a2;
706
- return content.Key !== normalizedPath && !((_a2 = content.Key) == null ? void 0 : _a2.endsWith("/"));
823
+ var _a3;
824
+ return content.Key !== normalizedPath && !((_a3 = content.Key) == null ? void 0 : _a3.endsWith("/"));
707
825
  }).map((content) => __spreadProps(__spreadValues({}, content), {
708
826
  Name: content.Key.replace(normalizedPath, "") || content.Key,
709
- Location: `${this.link}${content.Key}`,
827
+ Location: `${this.link}${content.Key.replace(/^\//, "")}`,
710
828
  LastModified: new Date(content.LastModified)
711
829
  }));
712
830
  return { directories, files };
713
831
  });
714
832
  }
833
+ // todo: checked!
715
834
  directoryListPaginated(_0) {
716
835
  return __async(this, arguments, function* (directoryPath, {
717
836
  pageSize = 100,
@@ -726,8 +845,9 @@ var S3BucketUtil = class _S3BucketUtil {
726
845
  let allDirectories = [];
727
846
  let allFiles = [];
728
847
  while (currentPage <= pageNumber) {
729
- const result = yield this.execute(
730
- new import_client_s3.ListObjectsV2Command({
848
+ let result;
849
+ result = yield this.execute(
850
+ new import_client_s33.ListObjectsV2Command({
731
851
  Bucket: this.bucket,
732
852
  Prefix: normalizedPath,
733
853
  Delimiter: "/",
@@ -745,7 +865,7 @@ var S3BucketUtil = class _S3BucketUtil {
745
865
  return content.Key !== normalizedPath && !((_a2 = content.Key) == null ? void 0 : _a2.endsWith("/"));
746
866
  }).map((content) => __spreadProps(__spreadValues({}, content), {
747
867
  Name: content.Key.replace(normalizedPath, "") || content.Key,
748
- Location: `${this.link}${content.Key}`,
868
+ Location: `${this.link}${content.Key.replace(/^\//, "")}`,
749
869
  LastModified: new Date(content.LastModified)
750
870
  }));
751
871
  }
@@ -768,19 +888,21 @@ var S3BucketUtil = class _S3BucketUtil {
768
888
  */
769
889
  directoryListRecursive(directoryPath) {
770
890
  return __async(this, null, function* () {
891
+ var _a2;
771
892
  let normalizedPath = getNormalizedPath(directoryPath);
772
893
  if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
773
- else normalizedPath = "";
894
+ else normalizedPath = "/";
774
895
  const allDirectories = [];
775
896
  const allFiles = [];
776
897
  let ContinuationToken = void 0;
777
898
  do {
778
- const command = new import_client_s3.ListObjectsV2Command({
779
- Bucket: this.bucket,
780
- Prefix: normalizedPath,
781
- ContinuationToken
782
- });
783
- const result = yield this.execute(command);
899
+ const result = yield this.execute(
900
+ new import_client_s33.ListObjectsV2Command({
901
+ Bucket: this.bucket,
902
+ Prefix: normalizedPath,
903
+ ContinuationToken
904
+ })
905
+ );
784
906
  if (result.Contents) {
785
907
  for (const content of result.Contents) {
786
908
  const fullPath = content.Key;
@@ -792,8 +914,8 @@ var S3BucketUtil = class _S3BucketUtil {
792
914
  allFiles.push(__spreadProps(__spreadValues({}, content), {
793
915
  Name: filename,
794
916
  Path: fullPath,
795
- Location: `${this.link}${content.Key}`,
796
- LastModified: new Date(content.LastModified)
917
+ Location: content.Key ? `${this.link}${(_a2 = content.Key) == null ? void 0 : _a2.replace(/^\//, "")}` : "",
918
+ LastModified: content.LastModified ? new Date(content.LastModified) : null
797
919
  }));
798
920
  }
799
921
  }
@@ -849,7 +971,7 @@ var S3BucketUtil = class _S3BucketUtil {
849
971
  treeNode.children.push({
850
972
  path: "/" + file.Key,
851
973
  name: file.Name,
852
- location: `${this.link}${file.Key}`,
974
+ location: `${this.link}${file.Key.replace(/^\//, "")}`,
853
975
  type: "file",
854
976
  size: file.Size,
855
977
  lastModified: file.LastModified
@@ -863,14 +985,18 @@ var S3BucketUtil = class _S3BucketUtil {
863
985
  return treeNode;
864
986
  });
865
987
  }
866
- // ##### FILES BLOCK ##########################
988
+ };
989
+
990
+ // src/aws/s3/s3-file.ts
991
+ var S3File = class extends S3Directory {
992
+ constructor(props) {
993
+ super(props);
994
+ }
867
995
  fileInfo(filePath) {
868
996
  return __async(this, null, function* () {
869
997
  const normalizedKey = getNormalizedPath(filePath);
870
- if (!normalizedKey || normalizedKey === "/") {
871
- throw new Error("No file key provided");
872
- }
873
- const command = new import_client_s3.HeadObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
998
+ if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
999
+ const command = new import_client_s34.HeadObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
874
1000
  return yield this.execute(command);
875
1001
  });
876
1002
  }
@@ -879,9 +1005,9 @@ var S3BucketUtil = class _S3BucketUtil {
879
1005
  var _a2, _b;
880
1006
  let normalizedPath = getNormalizedPath(directoryPath);
881
1007
  if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
882
- else normalizedPath = "";
1008
+ else normalizedPath = this.localstack ? "" : "/";
883
1009
  const prefix = normalizedPath + (fileNamePrefix || "");
884
- const command = new import_client_s3.ListObjectsCommand({
1010
+ const command = new import_client_s34.ListObjectsCommand({
885
1011
  Bucket: this.bucket,
886
1012
  Prefix: prefix,
887
1013
  Delimiter: "/"
@@ -889,10 +1015,10 @@ var S3BucketUtil = class _S3BucketUtil {
889
1015
  const result = yield this.execute(command);
890
1016
  const files = ((_a2 = result.Contents) != null ? _a2 : []).filter((v) => v).map(
891
1017
  (content) => {
892
- var _a3, _b2;
1018
+ var _a3, _b2, _c;
893
1019
  return __spreadProps(__spreadValues({}, content), {
894
1020
  Name: (_b2 = (_a3 = content.Key) == null ? void 0 : _a3.replace(prefix, "")) != null ? _b2 : content.Key,
895
- Location: `${this.link}${content.Key}`,
1021
+ Location: content.Key ? `${this.link}${(_c = content.Key) == null ? void 0 : _c.replace(/^\//, "")}` : "",
896
1022
  LastModified: content.LastModified ? new Date(content.LastModified) : null
897
1023
  });
898
1024
  }
@@ -901,6 +1027,7 @@ var S3BucketUtil = class _S3BucketUtil {
901
1027
  return files;
902
1028
  });
903
1029
  }
1030
+ // todo: checked!
904
1031
  fileListInfoPaginated(_0) {
905
1032
  return __async(this, arguments, function* (directoryPath, {
906
1033
  fileNamePrefix,
@@ -918,7 +1045,7 @@ var S3BucketUtil = class _S3BucketUtil {
918
1045
  let resultFiles = [];
919
1046
  while (currentPage <= pageNumber) {
920
1047
  const result = yield this.execute(
921
- new import_client_s3.ListObjectsV2Command({
1048
+ new import_client_s34.ListObjectsV2Command({
922
1049
  Bucket: this.bucket,
923
1050
  Prefix: prefix,
924
1051
  Delimiter: "/",
@@ -932,7 +1059,7 @@ var S3BucketUtil = class _S3BucketUtil {
932
1059
  var _a3, _b2;
933
1060
  return __spreadProps(__spreadValues({}, content), {
934
1061
  Name: (_b2 = (_a3 = content.Key) == null ? void 0 : _a3.replace(prefix, "")) != null ? _b2 : content.Key,
935
- Location: `${this.link}${content.Key}`,
1062
+ Location: content.Key ? `${this.link}${content.Key.replace(/^\//, "")}` : "",
936
1063
  LastModified: content.LastModified ? new Date(content.LastModified) : null
937
1064
  });
938
1065
  }
@@ -956,41 +1083,48 @@ var S3BucketUtil = class _S3BucketUtil {
956
1083
  };
957
1084
  });
958
1085
  }
959
- taggingFile(filePath, tagVersion = "1.0.0") {
1086
+ // todo: checked!
1087
+ taggingFile(filePath, tag) {
960
1088
  return __async(this, null, function* () {
1089
+ var _a2;
1090
+ let normalizedKey = "";
1091
+ const tags = [].concat(tag);
961
1092
  try {
962
- const normalizedKey = getNormalizedPath(filePath);
1093
+ normalizedKey = getNormalizedPath(filePath);
963
1094
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
964
- const command = new import_client_s3.PutObjectTaggingCommand({
1095
+ const command = new import_client_s34.PutObjectTaggingCommand({
965
1096
  Bucket: this.bucket,
966
1097
  Key: normalizedKey,
967
- Tagging: { TagSet: [{ Key: "version", Value: tagVersion }] }
1098
+ Tagging: { TagSet: tags }
968
1099
  });
969
1100
  yield this.execute(command);
970
1101
  return true;
971
- } catch (e) {
1102
+ } catch (error) {
1103
+ (_a2 = this.logger) == null ? void 0 : _a2.warn(null, "failed to tagging file", { errMsg: error.message, fileKey: normalizedKey, tags });
972
1104
  return false;
973
1105
  }
974
1106
  });
975
1107
  }
1108
+ // todo: checked!
976
1109
  fileVersion(filePath) {
977
1110
  return __async(this, null, function* () {
978
1111
  var _a2, _b;
979
1112
  const normalizedKey = getNormalizedPath(filePath);
980
1113
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
981
- const command = new import_client_s3.GetObjectTaggingCommand({ Bucket: this.bucket, Key: normalizedKey });
1114
+ const command = new import_client_s34.GetObjectTaggingCommand({ Bucket: this.bucket, Key: normalizedKey });
982
1115
  const result = yield this.execute(command);
983
1116
  const tag = (_a2 = result.TagSet) == null ? void 0 : _a2.find((tag2) => tag2.Key === "version");
984
1117
  return (_b = tag == null ? void 0 : tag.Value) != null ? _b : "";
985
1118
  });
986
1119
  }
1120
+ // todo: checked!
987
1121
  fileUrl(filePath, expiresIn = "15m") {
988
1122
  return __async(this, null, function* () {
989
1123
  var _a2;
990
- const normalizedKey = getNormalizedPath(filePath);
1124
+ let normalizedKey = getNormalizedPath(filePath);
991
1125
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
992
1126
  const expiresInSeconds = typeof expiresIn === "number" ? expiresIn : (0, import_ms.default)(expiresIn) / 1e3;
993
- const command = new import_client_s3.GetObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1127
+ const command = new import_client_s34.GetObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
994
1128
  const url = yield (0, import_s3_request_presigner.getSignedUrl)(this.s3Client, command, {
995
1129
  expiresIn: expiresInSeconds
996
1130
  // is using 3600 it's will expire in 1 hour (default is 900 seconds = 15 minutes)
@@ -1005,7 +1139,7 @@ var S3BucketUtil = class _S3BucketUtil {
1005
1139
  const normalizedKey = getNormalizedPath(filePath);
1006
1140
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1007
1141
  try {
1008
- const command = new import_client_s3.HeadObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1142
+ const command = new import_client_s34.HeadObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1009
1143
  const headObject = yield this.execute(command);
1010
1144
  const bytes2 = (_a2 = headObject.ContentLength) != null ? _a2 : 0;
1011
1145
  switch (unit) {
@@ -1027,13 +1161,14 @@ var S3BucketUtil = class _S3BucketUtil {
1027
1161
  }
1028
1162
  });
1029
1163
  }
1164
+ // todo: checked!
1030
1165
  fileExists(filePath) {
1031
1166
  return __async(this, null, function* () {
1032
1167
  var _a2;
1033
1168
  try {
1034
1169
  const normalizedKey = getNormalizedPath(filePath);
1035
1170
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1036
- const command = new import_client_s3.HeadObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1171
+ const command = new import_client_s34.HeadObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1037
1172
  yield this.execute(command);
1038
1173
  return true;
1039
1174
  } catch (error) {
@@ -1044,11 +1179,12 @@ var S3BucketUtil = class _S3BucketUtil {
1044
1179
  }
1045
1180
  });
1046
1181
  }
1182
+ // todo: checked!
1047
1183
  fileContent(filePath, format = "buffer") {
1048
1184
  return __async(this, null, function* () {
1049
- const normalizedKey = getNormalizedPath(filePath);
1185
+ let normalizedKey = getNormalizedPath(filePath);
1050
1186
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1051
- const command = new import_client_s3.GetObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1187
+ const command = new import_client_s34.GetObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1052
1188
  const result = yield this.execute(command);
1053
1189
  if (!result.Body) {
1054
1190
  throw new Error("File body is empty");
@@ -1077,6 +1213,7 @@ var S3BucketUtil = class _S3BucketUtil {
1077
1213
  return buffer;
1078
1214
  });
1079
1215
  }
1216
+ // todo: checked!
1080
1217
  uploadFile(_0, _1) {
1081
1218
  return __async(this, arguments, function* (filePath, fileData, acl = "private" /* private */, version = "1.0.0") {
1082
1219
  const normalizedKey = getNormalizedPath(filePath);
@@ -1100,38 +1237,130 @@ var S3BucketUtil = class _S3BucketUtil {
1100
1237
  };
1101
1238
  });
1102
1239
  }
1240
+ // todo: checked!
1103
1241
  deleteFile(filePath) {
1104
1242
  return __async(this, null, function* () {
1105
1243
  const normalizedKey = getNormalizedPath(filePath);
1106
1244
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1107
- const command = new import_client_s3.DeleteObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1245
+ const command = new import_client_s34.DeleteObjectCommand({ Bucket: this.bucket, Key: normalizedKey });
1108
1246
  return yield this.execute(command);
1109
1247
  });
1110
1248
  }
1111
- // ##### STREAMING BLOCK ##########################
1249
+ };
1250
+
1251
+ // src/aws/s3/s3-stream.ts
1252
+ var pump = (0, import_util.promisify)(import_stream.pipeline);
1253
+ var S3Stream = class _S3Stream extends S3File {
1254
+ constructor(_a2) {
1255
+ var _b = _a2, { maxUploadFileSizeRestriction = "10GB" } = _b, props = __objRest(_b, ["maxUploadFileSizeRestriction"]);
1256
+ super(props);
1257
+ __publicField(this, "maxUploadFileSizeRestriction");
1258
+ // todo: LOCALSTACK SANITY CHECKED - WORKING WELL, DON'T TOUCH!
1259
+ __publicField(this, "getImageFileViewCtrl", ({
1260
+ fileKey: _fileKey,
1261
+ queryField = "file",
1262
+ cachingAge = 31536e3
1263
+ } = {}) => {
1264
+ return (req, res, next) => __async(this, null, function* () {
1265
+ var _a2, _b, _c, _d, _e;
1266
+ let fileKey = _fileKey || (((_a2 = req.query) == null ? void 0 : _a2[queryField]) ? decodeURIComponent((_b = req.query) == null ? void 0 : _b[queryField]) : void 0);
1267
+ if (!fileKey || fileKey === "/") {
1268
+ (_d = this.logger) == null ? void 0 : _d.warn(req.id, "image file view required file query field", {
1269
+ fileKey: (_c = req.query) == null ? void 0 : _c[queryField],
1270
+ queryField
1271
+ });
1272
+ next("image file key is required");
1273
+ return;
1274
+ }
1275
+ try {
1276
+ const imageBuffer = yield this.fileContent(fileKey, "buffer");
1277
+ const ext = import_pathe.default.extname(fileKey).slice(1).toLowerCase();
1278
+ const mimeTypeMap = {
1279
+ jpg: "image/jpeg",
1280
+ jpeg: "image/jpeg",
1281
+ png: "image/png",
1282
+ gif: "image/gif",
1283
+ webp: "image/webp",
1284
+ svg: "image/svg+xml",
1285
+ ico: "image/x-icon"
1286
+ };
1287
+ const contentType = mimeTypeMap[ext] || "application/octet-stream";
1288
+ res.setHeader("Content-Type", contentType);
1289
+ if (cachingAge) res.setHeader("Cache-Control", `public, max-age=${cachingAge}`);
1290
+ res.setHeader("Content-Length", imageBuffer.length);
1291
+ res.status(200).send(imageBuffer);
1292
+ } catch (error) {
1293
+ (_e = this.logger) == null ? void 0 : _e.warn(req.id, "image view fileKey not found", {
1294
+ fileKey,
1295
+ localstack: this.localstack
1296
+ });
1297
+ next(`Failed to retrieve image file: ${error.message}`);
1298
+ }
1299
+ });
1300
+ });
1301
+ // todo: LOCALSTACK SANITY CHECKED - WORKING WELL, DON'T TOUCH!
1302
+ __publicField(this, "getPdfFileViewCtrl", ({
1303
+ fileKey: _fileKey,
1304
+ queryField = "file",
1305
+ cachingAge = 31536e3
1306
+ } = {}) => {
1307
+ return (req, res, next) => __async(this, null, function* () {
1308
+ var _a2, _b;
1309
+ let fileKey = _fileKey || (((_a2 = req.query) == null ? void 0 : _a2[queryField]) ? decodeURIComponent((_b = req.query) == null ? void 0 : _b[queryField]) : void 0);
1310
+ if (!fileKey) {
1311
+ next("pdf file key is required");
1312
+ return;
1313
+ }
1314
+ try {
1315
+ const fileBuffer = yield this.fileContent(fileKey, "buffer");
1316
+ const ext = import_pathe.default.extname(fileKey).slice(1).toLowerCase();
1317
+ const mimeTypeMap = {
1318
+ pdf: "application/pdf",
1319
+ txt: "text/plain",
1320
+ doc: "application/msword",
1321
+ docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
1322
+ xls: "application/vnd.ms-excel",
1323
+ xlsx: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
1324
+ ppt: "application/vnd.ms-powerpoint",
1325
+ pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation"
1326
+ };
1327
+ const contentType = mimeTypeMap[ext] || "application/octet-stream";
1328
+ res.setHeader("Content-Type", contentType);
1329
+ res.setHeader("Content-Disposition", `inline; filename="${import_pathe.default.basename(fileKey)}"`);
1330
+ res.setHeader("Cache-Control", `public, max-age=${cachingAge}`);
1331
+ res.setHeader("Content-Length", fileBuffer.length);
1332
+ res.status(200).send(fileBuffer);
1333
+ } catch (error) {
1334
+ next(`Failed to retrieve pdf file: ${error.message}`);
1335
+ }
1336
+ });
1337
+ });
1338
+ this.maxUploadFileSizeRestriction = maxUploadFileSizeRestriction;
1339
+ }
1112
1340
  streamObjectFile(_0) {
1113
1341
  return __async(this, arguments, function* (filePath, {
1114
1342
  Range,
1115
1343
  checkFileExists = true,
1116
1344
  abortSignal
1117
1345
  } = {}) {
1118
- const normalizedKey = getNormalizedPath(filePath);
1346
+ let normalizedKey = getNormalizedPath(filePath);
1119
1347
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1120
1348
  if (checkFileExists) {
1121
1349
  const isExists = yield this.fileExists(normalizedKey);
1122
1350
  if (!isExists) return null;
1123
1351
  }
1124
- const command = new import_client_s3.GetObjectCommand(__spreadValues({
1352
+ const command = new import_client_s35.GetObjectCommand(__spreadValues({
1125
1353
  Bucket: this.bucket,
1126
1354
  Key: normalizedKey
1127
1355
  }, Range ? { Range } : {}));
1128
1356
  const response = yield this.execute(command, { abortSignal });
1129
- if (!response.Body || !(response.Body instanceof import_node_stream.Readable)) {
1357
+ if (!response.Body || !(response.Body instanceof import_node_stream2.Readable)) {
1130
1358
  throw new Error("Invalid response body: not a Readable stream");
1131
1359
  }
1132
1360
  return response.Body;
1133
1361
  });
1134
1362
  }
1363
+ // todo: LOCALSTACK SANITY CHECKED - WORKING WELL, DON'T TOUCH!
1135
1364
  streamVideoFile(_0) {
1136
1365
  return __async(this, arguments, function* ({
1137
1366
  filePath,
@@ -1139,10 +1368,10 @@ var S3BucketUtil = class _S3BucketUtil {
1139
1368
  abortSignal
1140
1369
  }) {
1141
1370
  var _a2;
1142
- const normalizedKey = getNormalizedPath(filePath);
1371
+ let normalizedKey = getNormalizedPath(filePath);
1143
1372
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1144
1373
  try {
1145
- const cmd = new import_client_s3.GetObjectCommand(__spreadValues({
1374
+ const cmd = new import_client_s35.GetObjectCommand(__spreadValues({
1146
1375
  Bucket: this.bucket,
1147
1376
  Key: normalizedKey
1148
1377
  }, Range ? { Range } : {}));
@@ -1171,145 +1400,127 @@ var S3BucketUtil = class _S3BucketUtil {
1171
1400
  }
1172
1401
  });
1173
1402
  }
1174
- getStreamZipFileCtr(_0) {
1403
+ // todo: LOCALSTACK SANITY CHECKED - WORKING WELL, DON'T TOUCH!
1404
+ getStreamVideoFileCtrl(_0) {
1175
1405
  return __async(this, arguments, function* ({
1176
- filePath,
1177
- filename: _filename,
1178
- compressionLevel = 5
1406
+ fileKey,
1407
+ allowedWhitelist,
1408
+ contentType = "video/mp4",
1409
+ streamTimeoutMS = 3e4,
1410
+ bufferMB = 5
1179
1411
  }) {
1180
1412
  return (req, res, next) => __async(this, null, function* () {
1181
- var _a2, _b, _c, _d, _e;
1182
- const filePaths = [].concat(filePath).map((filePath2) => getNormalizedPath(filePath2)).filter((v) => v && v !== "/");
1183
- if (!filePaths.length) {
1184
- throw new Error("No file keys provided");
1413
+ var _a2, _b, _c, _d, _e, _f;
1414
+ let normalizedKey = getNormalizedPath(fileKey);
1415
+ if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1416
+ const isExists = yield this.fileExists(normalizedKey);
1417
+ const fileSize = yield this.sizeOf(normalizedKey);
1418
+ let Range;
1419
+ if (!isExists) {
1420
+ next(Error(`File does not exist: "${normalizedKey}"`));
1421
+ return;
1185
1422
  }
1186
- let filename = _filename || (/* @__PURE__ */ new Date()).toISOString();
1187
- filename = filename.endsWith(".zip") ? filename : `${filename}.zip`;
1188
- const abort = new AbortController();
1189
- const onClose = () => {
1190
- abort.abort();
1191
- };
1192
- req.once("close", onClose);
1193
1423
  try {
1194
- (_a2 = this.logger) == null ? void 0 : _a2.info(this.reqId, "Starting parallel file download...", { fileCount: filePaths.length });
1195
- const downloadPromises = filePaths.map((filePath2) => __async(this, null, function* () {
1196
- var _a3, _b2, _c2;
1197
- try {
1198
- if (abort.signal.aborted) return null;
1199
- const stream = yield this.streamObjectFile(filePath2, { abortSignal: abort.signal });
1200
- if (!stream) {
1201
- (_a3 = this.logger) == null ? void 0 : _a3.warn(this.reqId, "File not found", { filePath: filePath2 });
1202
- return null;
1203
- }
1204
- const chunks = [];
1205
- try {
1206
- for (var iter = __forAwait(stream), more, temp, error; more = !(temp = yield iter.next()).done; more = false) {
1207
- const chunk = temp.value;
1208
- if (abort.signal.aborted) {
1209
- stream.destroy();
1210
- return null;
1211
- }
1212
- chunks.push(import_buffer.Buffer.from(chunk));
1213
- }
1214
- } catch (temp) {
1215
- error = [temp];
1216
- } finally {
1217
- try {
1218
- more && (temp = iter.return) && (yield temp.call(iter));
1219
- } finally {
1220
- if (error)
1221
- throw error[0];
1222
- }
1223
- }
1224
- const buffer = import_buffer.Buffer.concat(chunks);
1225
- const fileName = filePath2.split("/").pop() || filePath2;
1226
- (_b2 = this.logger) == null ? void 0 : _b2.debug(this.reqId, "File downloaded", {
1227
- filePath: filePath2,
1228
- sizeMB: (buffer.length / (1024 * 1024)).toFixed(2)
1229
- });
1230
- return { buffer, name: fileName, path: filePath2 };
1231
- } catch (error2) {
1232
- (_c2 = this.logger) == null ? void 0 : _c2.warn(this.reqId, "Failed to download file", { filePath: filePath2, error: error2 });
1233
- return null;
1234
- }
1235
- }));
1236
- const fileBuffers = (yield Promise.all(downloadPromises)).filter(Boolean);
1237
- if (abort.signal.aborted || fileBuffers.length === 0) {
1238
- req.off("close", onClose);
1239
- if (fileBuffers.length === 0) {
1240
- next(new Error("No files available to zip"));
1241
- }
1242
- return;
1243
- }
1244
- (_b = this.logger) == null ? void 0 : _b.info(this.reqId, "All files downloaded, measuring zip size...", {
1245
- fileCount: fileBuffers.length,
1246
- totalSizeMB: (fileBuffers.reduce((sum, f) => sum + f.buffer.length, 0) / (1024 * 1024)).toFixed(2)
1247
- });
1248
- const measureArchive = (0, import_archiver.default)("zip", { zlib: { level: compressionLevel } });
1249
- let actualZipSize = 0;
1250
- measureArchive.on("data", (chunk) => {
1251
- actualZipSize += chunk.length;
1252
- });
1253
- for (const file of fileBuffers) {
1254
- if (abort.signal.aborted) break;
1255
- measureArchive.append(file.buffer, { name: file.name });
1424
+ if (req.method === "HEAD") {
1425
+ res.setHeader("Content-Type", contentType);
1426
+ res.setHeader("Accept-Ranges", "bytes");
1427
+ if (fileSize) res.setHeader("Content-Length", String(fileSize));
1428
+ return res.status(200).end();
1256
1429
  }
1257
- yield measureArchive.finalize();
1258
- if (abort.signal.aborted) {
1259
- req.off("close", onClose);
1430
+ const bufferSize = bufferMB;
1431
+ const CHUNK_SIZE = __pow(10, 6) * bufferSize;
1432
+ const rangeValues = parseRangeHeader(req.headers.range, fileSize, CHUNK_SIZE);
1433
+ let [start, end] = rangeValues || [];
1434
+ if (!rangeValues || start < 0 || start >= fileSize || end < 0 || end >= fileSize || start > end) {
1435
+ res.status(416).send("Requested Range Not Satisfiable");
1260
1436
  return;
1261
1437
  }
1262
- (_c = this.logger) == null ? void 0 : _c.info(this.reqId, "Zip size calculated", {
1263
- actualZipSize,
1264
- sizeMB: (actualZipSize / (1024 * 1024)).toFixed(2)
1265
- });
1266
- const actualArchive = (0, import_archiver.default)("zip", { zlib: { level: compressionLevel } });
1267
- res.setHeader("Content-Type", "application/zip");
1268
- res.setHeader("Content-Disposition", `attachment; filename="${filename}"`);
1269
- res.setHeader("Content-Length", String(actualZipSize));
1270
- res.setHeader("Access-Control-Expose-Headers", "Content-Type, Content-Disposition, Content-Length");
1271
- actualArchive.on("error", (err) => {
1272
- var _a3;
1273
- (_a3 = this.logger) == null ? void 0 : _a3.error(this.reqId, "Archive error", { error: err });
1274
- abort.abort();
1275
- if (!res.headersSent) {
1276
- next(err);
1277
- }
1438
+ res.statusCode = 206;
1439
+ const chunkLength = end - start + 1;
1440
+ res.setHeader("Content-Length", chunkLength);
1441
+ res.setHeader("Content-Range", `bytes ${start}-${end}/${fileSize}`);
1442
+ res.setHeader("Accept-Ranges", "bytes");
1443
+ res.setHeader("Content-Type", "video/mp4");
1444
+ Range = `bytes=${start}-${end}`;
1445
+ } catch (error) {
1446
+ next(error);
1447
+ return;
1448
+ }
1449
+ const abort = new AbortController();
1450
+ const onClose = () => abort.abort();
1451
+ req.once("close", onClose);
1452
+ try {
1453
+ const result = yield this.streamVideoFile({
1454
+ filePath: normalizedKey,
1455
+ Range,
1456
+ abortSignal: abort.signal
1278
1457
  });
1279
- actualArchive.pipe(res);
1280
- for (const file of fileBuffers) {
1281
- if (abort.signal.aborted) break;
1282
- actualArchive.append(file.buffer, { name: file.name });
1458
+ const { body, meta } = result;
1459
+ const origin = Array.isArray(allowedWhitelist) ? allowedWhitelist.includes((_a2 = req.headers.origin) != null ? _a2 : "") ? req.headers.origin : void 0 : allowedWhitelist;
1460
+ if (origin) {
1461
+ res.setHeader("Access-Control-Allow-Origin", origin);
1462
+ res.setHeader("Vary", "Origin");
1283
1463
  }
1284
- yield actualArchive.finalize();
1285
- (_d = this.logger) == null ? void 0 : _d.info(this.reqId, "Zip download completed", {
1286
- fileCount: fileBuffers.length,
1287
- totalSize: actualZipSize
1464
+ const finalContentType = contentType.startsWith("video/") ? contentType : `video/${contentType}`;
1465
+ res.setHeader("Content-Type", (_b = meta.contentType) != null ? _b : finalContentType);
1466
+ res.setHeader("Accept-Ranges", (_c = meta.acceptRanges) != null ? _c : "bytes");
1467
+ if (Range && meta.contentRange) {
1468
+ res.status(206);
1469
+ res.setHeader("Content-Range", meta.contentRange);
1470
+ if (typeof meta.contentLength === "number") {
1471
+ res.setHeader("Content-Length", String(meta.contentLength));
1472
+ }
1473
+ } else if (fileSize) {
1474
+ res.setHeader("Content-Length", String(fileSize));
1475
+ }
1476
+ if (meta.etag) res.setHeader("ETag", meta.etag);
1477
+ if (meta.lastModified) res.setHeader("Last-Modified", meta.lastModified.toUTCString());
1478
+ const timeout = setTimeout(() => {
1479
+ abort.abort();
1480
+ if (!res.headersSent) res.status(504);
1481
+ res.end();
1482
+ }, streamTimeoutMS);
1483
+ res.once("close", () => {
1484
+ var _a3;
1485
+ clearTimeout(timeout);
1486
+ (_a3 = body.destroy) == null ? void 0 : _a3.call(body);
1487
+ req.off("close", onClose);
1288
1488
  });
1289
- req.off("close", onClose);
1489
+ yield pump(body, res);
1490
+ clearTimeout(timeout);
1290
1491
  } catch (error) {
1291
- abort.abort();
1292
- const isBenignError = (error == null ? void 0 : error.code) === "ERR_STREAM_PREMATURE_CLOSE" || (error == null ? void 0 : error.name) === "AbortError" || (error == null ? void 0 : error.code) === "ECONNRESET";
1293
- if (isBenignError) {
1492
+ const isBenignStreamError = (error == null ? void 0 : error.code) === "ERR_STREAM_PREMATURE_CLOSE" || (error == null ? void 0 : error.name) === "AbortError" || (error == null ? void 0 : error.code) === "ECONNRESET";
1493
+ if (isBenignStreamError) {
1294
1494
  return;
1295
1495
  }
1296
1496
  if (!res.headersSent) {
1297
- (_e = this.logger) == null ? void 0 : _e.error(this.reqId, "Failed to create zip archive", { error });
1497
+ (_f = this.logger) == null ? void 0 : _f.warn(req.id, "caught exception in stream controller", {
1498
+ error: (_d = error == null ? void 0 : error.message) != null ? _d : String(error),
1499
+ key: fileKey,
1500
+ url: req.originalUrl,
1501
+ userId: (_e = req.user) == null ? void 0 : _e._id
1502
+ });
1298
1503
  next(error);
1299
- } else if (!res.writableEnded) {
1504
+ return;
1505
+ }
1506
+ if (!res.writableEnded) {
1300
1507
  try {
1301
1508
  res.end();
1302
1509
  } catch (e) {
1303
1510
  }
1304
1511
  }
1305
- } finally {
1306
- req.off("close", onClose);
1512
+ return;
1307
1513
  }
1308
1514
  });
1309
1515
  });
1310
1516
  }
1517
+ // todo: LOCALSTACK SANITY CHECKED - WORKING WELL, DON'T TOUCH!
1311
1518
  getStreamFileCtrl(_0) {
1312
- return __async(this, arguments, function* ({ filePath, filename }) {
1519
+ return __async(this, arguments, function* ({
1520
+ filePath,
1521
+ filename,
1522
+ forDownloading = false
1523
+ }) {
1313
1524
  return (req, res, next) => __async(this, null, function* () {
1314
1525
  var _a2, _b;
1315
1526
  const abort = new AbortController();
@@ -1320,7 +1531,7 @@ var S3BucketUtil = class _S3BucketUtil {
1320
1531
  (_a3 = stream == null ? void 0 : stream.destroy) == null ? void 0 : _a3.call(stream);
1321
1532
  };
1322
1533
  req.once("close", onClose);
1323
- const normalizedKey = getNormalizedPath(filePath);
1534
+ let normalizedKey = getNormalizedPath(filePath);
1324
1535
  if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1325
1536
  try {
1326
1537
  const isExists = yield this.fileExists(normalizedKey);
@@ -1341,7 +1552,9 @@ var S3BucketUtil = class _S3BucketUtil {
1341
1552
  const fileInfo = yield this.fileInfo(normalizedKey);
1342
1553
  const fileName = filename || normalizedKey.split("/").pop() || "download";
1343
1554
  res.setHeader("Content-Type", fileInfo.ContentType || "application/octet-stream");
1344
- res.setHeader("Content-Disposition", `attachment; filename="${fileName}"`);
1555
+ if (forDownloading) {
1556
+ res.setHeader("Content-Disposition", `attachment; filename="${fileName}"`);
1557
+ }
1345
1558
  if (fileInfo.ContentLength) {
1346
1559
  res.setHeader("Content-Length", String(fileInfo.ContentLength));
1347
1560
  }
@@ -1382,115 +1595,140 @@ var S3BucketUtil = class _S3BucketUtil {
1382
1595
  });
1383
1596
  });
1384
1597
  }
1385
- getStreamVideoFileCtrl(_0) {
1598
+ // todo: LOCALSTACK SANITY CHECKED - WORKING WELL, DON'T TOUCH!
1599
+ getStreamZipFileCtr(_0) {
1386
1600
  return __async(this, arguments, function* ({
1387
- fileKey,
1388
- allowedWhitelist,
1389
- contentType = "video/mp4",
1390
- streamTimeoutMS = 3e4,
1391
- bufferMB = 5
1601
+ filePath,
1602
+ filename: _filename,
1603
+ compressionLevel = 5
1392
1604
  }) {
1393
1605
  return (req, res, next) => __async(this, null, function* () {
1394
- var _a2, _b, _c, _d, _e, _f;
1395
- const normalizedKey = getNormalizedPath(fileKey);
1396
- if (!normalizedKey || normalizedKey === "/") throw new Error("No file key provided");
1397
- const isExists = yield this.fileExists(normalizedKey);
1398
- const fileSize = yield this.sizeOf(normalizedKey);
1399
- let Range;
1400
- if (!isExists) {
1401
- next(Error(`File does not exist: "${normalizedKey}"`));
1402
- return;
1403
- }
1404
- try {
1405
- if (req.method === "HEAD") {
1406
- res.setHeader("Content-Type", contentType);
1407
- res.setHeader("Accept-Ranges", "bytes");
1408
- if (fileSize) res.setHeader("Content-Length", String(fileSize));
1409
- return res.status(200).end();
1410
- }
1411
- const bufferSize = bufferMB;
1412
- const CHUNK_SIZE = __pow(10, 6) * bufferSize;
1413
- const rangeValues = parseRangeHeader(req.headers.range, fileSize, CHUNK_SIZE);
1414
- let [start, end] = rangeValues || [];
1415
- if (!rangeValues || start < 0 || start >= fileSize || end < 0 || end >= fileSize || start > end) {
1416
- res.status(416).send("Requested Range Not Satisfiable");
1417
- return;
1418
- }
1419
- res.statusCode = 206;
1420
- const chunkLength = end - start + 1;
1421
- res.setHeader("Content-Length", chunkLength);
1422
- res.setHeader("Content-Range", `bytes ${start}-${end}/${fileSize}`);
1423
- res.setHeader("Accept-Ranges", "bytes");
1424
- res.setHeader("Content-Type", "video/mp4");
1425
- Range = `bytes=${start}-${end}`;
1426
- } catch (error) {
1427
- next(error);
1428
- return;
1606
+ var _a2, _b, _c, _d, _e;
1607
+ const filePaths = [].concat(filePath).map((filePath2) => getNormalizedPath(filePath2)).filter((v) => v && v !== "/");
1608
+ if (!filePaths.length) {
1609
+ throw new Error("No file keys provided");
1429
1610
  }
1611
+ let filename = _filename || (/* @__PURE__ */ new Date()).toISOString();
1612
+ filename = filename.endsWith(".zip") ? filename : `${filename}.zip`;
1430
1613
  const abort = new AbortController();
1431
- const onClose = () => abort.abort();
1614
+ const onClose = () => {
1615
+ abort.abort();
1616
+ };
1432
1617
  req.once("close", onClose);
1433
1618
  try {
1434
- const result = yield this.streamVideoFile({
1435
- filePath: normalizedKey,
1436
- Range,
1437
- abortSignal: abort.signal
1619
+ (_a2 = this.logger) == null ? void 0 : _a2.info(this.reqId, "Starting parallel file download...", { fileCount: filePaths.length });
1620
+ const downloadPromises = filePaths.map((filePath2) => __async(this, null, function* () {
1621
+ var _a3, _b2, _c2;
1622
+ try {
1623
+ if (abort.signal.aborted) return null;
1624
+ const stream = yield this.streamObjectFile(filePath2, { abortSignal: abort.signal });
1625
+ if (!stream) {
1626
+ (_a3 = this.logger) == null ? void 0 : _a3.warn(this.reqId, "File not found", { filePath: filePath2 });
1627
+ return null;
1628
+ }
1629
+ const chunks = [];
1630
+ try {
1631
+ for (var iter = __forAwait(stream), more, temp, error; more = !(temp = yield iter.next()).done; more = false) {
1632
+ const chunk = temp.value;
1633
+ if (abort.signal.aborted) {
1634
+ stream.destroy();
1635
+ return null;
1636
+ }
1637
+ chunks.push(import_buffer2.Buffer.from(chunk));
1638
+ }
1639
+ } catch (temp) {
1640
+ error = [temp];
1641
+ } finally {
1642
+ try {
1643
+ more && (temp = iter.return) && (yield temp.call(iter));
1644
+ } finally {
1645
+ if (error)
1646
+ throw error[0];
1647
+ }
1648
+ }
1649
+ const buffer = import_buffer2.Buffer.concat(chunks);
1650
+ const fileName = filePath2.split("/").pop() || filePath2;
1651
+ (_b2 = this.logger) == null ? void 0 : _b2.debug(this.reqId, "File downloaded", {
1652
+ filePath: filePath2,
1653
+ sizeMB: (buffer.length / (1024 * 1024)).toFixed(2)
1654
+ });
1655
+ return { buffer, name: fileName, path: filePath2 };
1656
+ } catch (error2) {
1657
+ (_c2 = this.logger) == null ? void 0 : _c2.warn(this.reqId, "Failed to download file", { filePath: filePath2, error: error2 });
1658
+ return null;
1659
+ }
1660
+ }));
1661
+ const fileBuffers = (yield Promise.all(downloadPromises)).filter(Boolean);
1662
+ if (abort.signal.aborted || fileBuffers.length === 0) {
1663
+ req.off("close", onClose);
1664
+ if (fileBuffers.length === 0) {
1665
+ next(new Error("No files available to zip"));
1666
+ }
1667
+ return;
1668
+ }
1669
+ (_b = this.logger) == null ? void 0 : _b.info(this.reqId, "All files downloaded, measuring zip size...", {
1670
+ fileCount: fileBuffers.length,
1671
+ totalSizeMB: (fileBuffers.reduce((sum, f) => sum + f.buffer.length, 0) / (1024 * 1024)).toFixed(2)
1438
1672
  });
1439
- const { body, meta } = result;
1440
- const origin = Array.isArray(allowedWhitelist) ? allowedWhitelist.includes((_a2 = req.headers.origin) != null ? _a2 : "") ? req.headers.origin : void 0 : allowedWhitelist;
1441
- if (origin) {
1442
- res.setHeader("Access-Control-Allow-Origin", origin);
1443
- res.setHeader("Vary", "Origin");
1673
+ const measureArchive = (0, import_archiver.default)("zip", { zlib: { level: compressionLevel } });
1674
+ let actualZipSize = 0;
1675
+ measureArchive.on("data", (chunk) => {
1676
+ actualZipSize += chunk.length;
1677
+ });
1678
+ for (const file of fileBuffers) {
1679
+ if (abort.signal.aborted) break;
1680
+ measureArchive.append(file.buffer, { name: file.name });
1444
1681
  }
1445
- const finalContentType = contentType.startsWith("video/") ? contentType : `video/${contentType}`;
1446
- res.setHeader("Content-Type", (_b = meta.contentType) != null ? _b : finalContentType);
1447
- res.setHeader("Accept-Ranges", (_c = meta.acceptRanges) != null ? _c : "bytes");
1448
- if (Range && meta.contentRange) {
1449
- res.status(206);
1450
- res.setHeader("Content-Range", meta.contentRange);
1451
- if (typeof meta.contentLength === "number") {
1452
- res.setHeader("Content-Length", String(meta.contentLength));
1453
- }
1454
- } else if (fileSize) {
1455
- res.setHeader("Content-Length", String(fileSize));
1682
+ yield measureArchive.finalize();
1683
+ if (abort.signal.aborted) {
1684
+ req.off("close", onClose);
1685
+ return;
1456
1686
  }
1457
- if (meta.etag) res.setHeader("ETag", meta.etag);
1458
- if (meta.lastModified) res.setHeader("Last-Modified", meta.lastModified.toUTCString());
1459
- const timeout = setTimeout(() => {
1460
- abort.abort();
1461
- if (!res.headersSent) res.status(504);
1462
- res.end();
1463
- }, streamTimeoutMS);
1464
- res.once("close", () => {
1687
+ (_c = this.logger) == null ? void 0 : _c.info(this.reqId, "Zip size calculated", {
1688
+ actualZipSize,
1689
+ sizeMB: (actualZipSize / (1024 * 1024)).toFixed(2)
1690
+ });
1691
+ const actualArchive = (0, import_archiver.default)("zip", { zlib: { level: compressionLevel } });
1692
+ res.setHeader("Content-Type", "application/zip");
1693
+ res.setHeader("Content-Disposition", `attachment; filename="${filename}"`);
1694
+ res.setHeader("Content-Length", String(actualZipSize));
1695
+ res.setHeader("Access-Control-Expose-Headers", "Content-Type, Content-Disposition, Content-Length");
1696
+ actualArchive.on("error", (err) => {
1465
1697
  var _a3;
1466
- clearTimeout(timeout);
1467
- (_a3 = body.destroy) == null ? void 0 : _a3.call(body);
1468
- req.off("close", onClose);
1698
+ (_a3 = this.logger) == null ? void 0 : _a3.error(this.reqId, "Archive error", { error: err });
1699
+ abort.abort();
1700
+ if (!res.headersSent) {
1701
+ next(err);
1702
+ }
1469
1703
  });
1470
- yield pump(body, res);
1471
- clearTimeout(timeout);
1704
+ actualArchive.pipe(res);
1705
+ for (const file of fileBuffers) {
1706
+ if (abort.signal.aborted) break;
1707
+ actualArchive.append(file.buffer, { name: file.name });
1708
+ }
1709
+ yield actualArchive.finalize();
1710
+ (_d = this.logger) == null ? void 0 : _d.info(this.reqId, "Zip download completed", {
1711
+ fileCount: fileBuffers.length,
1712
+ totalSize: actualZipSize
1713
+ });
1714
+ req.off("close", onClose);
1472
1715
  } catch (error) {
1473
- const isBenignStreamError = (error == null ? void 0 : error.code) === "ERR_STREAM_PREMATURE_CLOSE" || (error == null ? void 0 : error.name) === "AbortError" || (error == null ? void 0 : error.code) === "ECONNRESET";
1474
- if (isBenignStreamError) {
1716
+ abort.abort();
1717
+ const isBenignError = (error == null ? void 0 : error.code) === "ERR_STREAM_PREMATURE_CLOSE" || (error == null ? void 0 : error.name) === "AbortError" || (error == null ? void 0 : error.code) === "ECONNRESET";
1718
+ if (isBenignError) {
1475
1719
  return;
1476
1720
  }
1477
1721
  if (!res.headersSent) {
1478
- (_f = this.logger) == null ? void 0 : _f.warn(req.id, "caught exception in stream controller", {
1479
- error: (_d = error == null ? void 0 : error.message) != null ? _d : String(error),
1480
- key: fileKey,
1481
- url: req.originalUrl,
1482
- userId: (_e = req.user) == null ? void 0 : _e._id
1483
- });
1722
+ (_e = this.logger) == null ? void 0 : _e.error(this.reqId, "Failed to create zip archive", { error });
1484
1723
  next(error);
1485
- return;
1486
- }
1487
- if (!res.writableEnded) {
1724
+ } else if (!res.writableEnded) {
1488
1725
  try {
1489
1726
  res.end();
1490
1727
  } catch (e) {
1491
1728
  }
1492
1729
  }
1493
- return;
1730
+ } finally {
1731
+ req.off("close", onClose);
1494
1732
  }
1495
1733
  });
1496
1734
  });
@@ -1508,19 +1746,6 @@ var S3BucketUtil = class _S3BucketUtil {
1508
1746
  return cb(new Error(errorMsg));
1509
1747
  };
1510
1748
  }
1511
- getFileSize(maxFileSize) {
1512
- var _a2;
1513
- const fileSizeUnitValue = maxFileSize != null ? maxFileSize : this.maxUploadFileSizeRestriction;
1514
- const fileSize = typeof fileSizeUnitValue === "number" ? fileSizeUnitValue : (0, import_bytes.default)(fileSizeUnitValue);
1515
- if (!fileSize) {
1516
- (_a2 = this.logger) == null ? void 0 : _a2.warn(this.reqId, "Failed to convert fileSize restriction, proceeding without limit", {
1517
- maxFileSize,
1518
- maxUploadFileSizeRestriction: this.maxUploadFileSizeRestriction
1519
- });
1520
- return void 0;
1521
- }
1522
- return fileSize;
1523
- }
1524
1749
  getUploadFileMW(directoryPath, {
1525
1750
  acl = "private" /* private */,
1526
1751
  maxFileSize,
@@ -1532,10 +1757,10 @@ var S3BucketUtil = class _S3BucketUtil {
1532
1757
  let normalizedPath = getNormalizedPath(directoryPath);
1533
1758
  if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
1534
1759
  else normalizedPath = "";
1535
- const fileSize = this.getFileSize(maxFileSize);
1760
+ const fileSize = getFileSize(maxFileSize, this.maxUploadFileSizeRestriction);
1536
1761
  const fileTypes = [].concat(fileType);
1537
1762
  const fileExts = [].concat(fileExt);
1538
- const fileFilter = (fileTypes == null ? void 0 : fileTypes.length) || (fileExts == null ? void 0 : fileExts.length) ? _S3BucketUtil.fileFilter(fileTypes, fileExts) : void 0;
1763
+ const fileFilter = (fileTypes == null ? void 0 : fileTypes.length) || (fileExts == null ? void 0 : fileExts.length) ? _S3Stream.fileFilter(fileTypes, fileExts) : void 0;
1539
1764
  return (0, import_multer.default)({
1540
1765
  fileFilter,
1541
1766
  limits: __spreadValues({}, fileSize && { fileSize }),
@@ -1572,14 +1797,19 @@ var S3BucketUtil = class _S3BucketUtil {
1572
1797
  * Middleware for uploading a single file
1573
1798
  * Adds the uploaded file info to req.s3File
1574
1799
  */
1575
- uploadSingleFile(fieldName, directory, options = {}) {
1576
- const upload = this.getUploadFileMW(directory, options);
1800
+ uploadSingleFile(fieldName, directoryPath, options = {}) {
1801
+ var _a2;
1802
+ let normalizedPath = getNormalizedPath(directoryPath);
1803
+ if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
1804
+ else normalizedPath = "";
1805
+ (_a2 = this.logger) == null ? void 0 : _a2.debug(null, "####### uploadSingleFile", { directoryPath, normalizedPath, fieldName });
1806
+ const upload = this.getUploadFileMW(normalizedPath, options);
1577
1807
  return (req, res, next) => {
1578
1808
  const mw = upload.single(fieldName);
1579
1809
  mw(req, res, (err) => {
1580
- var _a2, _b;
1810
+ var _a3, _b;
1581
1811
  if (err) {
1582
- (_a2 = this.logger) == null ? void 0 : _a2.error(this.reqId, "Single file upload error", { fieldName, error: err.message });
1812
+ (_a3 = this.logger) == null ? void 0 : _a3.error(this.reqId, "Single file upload error", { fieldName, error: err.message });
1583
1813
  return next(err);
1584
1814
  }
1585
1815
  if (req.file) {
@@ -1599,8 +1829,11 @@ var S3BucketUtil = class _S3BucketUtil {
1599
1829
  * Middleware for uploading multiple files with the same field name
1600
1830
  * Adds the uploaded files info to req.s3Files
1601
1831
  */
1602
- uploadMultipleFiles(fieldName, directory, options = {}) {
1603
- const upload = this.getUploadFileMW(directory, options);
1832
+ uploadMultipleFiles(fieldName, directoryPath, options = {}) {
1833
+ let normalizedPath = getNormalizedPath(directoryPath);
1834
+ if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
1835
+ else normalizedPath = "";
1836
+ const upload = this.getUploadFileMW(normalizedPath, options);
1604
1837
  return (req, res, next) => {
1605
1838
  const mw = upload.array(fieldName, options.maxFilesCount || void 0);
1606
1839
  mw(req, res, (err) => {
@@ -1621,49 +1854,15 @@ var S3BucketUtil = class _S3BucketUtil {
1621
1854
  });
1622
1855
  };
1623
1856
  }
1624
- /**
1625
- * Middleware for uploading multiple files with different field names
1626
- * Adds the uploaded files info to req.s3FilesByField
1627
- */
1628
- uploadFieldsFiles(fields) {
1629
- const fieldConfigs = fields.map((field) => {
1630
- const upload = this.getUploadFileMW(field.directory, field.options || {});
1631
- return {
1632
- name: field.name,
1633
- maxCount: field.maxCount || 1,
1634
- upload,
1635
- directory: field.directory
1636
- };
1637
- });
1638
- return (req, res, next) => __async(this, null, function* () {
1639
- const multerFields = fieldConfigs.map((f) => ({ name: f.name, maxCount: f.maxCount }));
1640
- const upload = this.getUploadFileMW(fieldConfigs[0].directory);
1641
- const mw = upload.fields(multerFields);
1642
- mw(req, res, (err) => {
1643
- var _a2, _b;
1644
- if (err) {
1645
- (_a2 = this.logger) == null ? void 0 : _a2.error(this.reqId, "Fields upload error", { error: err.message });
1646
- return next(err);
1647
- }
1648
- if (req.files && typeof req.files === "object" && !Array.isArray(req.files)) {
1649
- req.s3FilesByField = req.files;
1650
- const uploadSummary = Object.entries(req.s3FilesByField).map(([field, files]) => ({
1651
- field,
1652
- count: files.length,
1653
- keys: files.map((f) => f.key)
1654
- }));
1655
- (_b = this.logger) == null ? void 0 : _b.info(this.reqId, "Fields uploaded successfully", { uploadSummary });
1656
- }
1657
- next();
1658
- });
1659
- });
1660
- }
1661
1857
  /**
1662
1858
  * Middleware for uploading any files (mixed field names)
1663
1859
  * Adds the uploaded files info to req.s3AllFiles
1664
1860
  */
1665
- uploadAnyFiles(directory, maxCount, options = {}) {
1666
- const upload = this.getUploadFileMW(directory, options);
1861
+ uploadAnyFiles(directoryPath, maxCount, options = {}) {
1862
+ let normalizedPath = getNormalizedPath(directoryPath);
1863
+ if (normalizedPath !== "/" && normalizedPath !== "" && directoryPath !== void 0) normalizedPath += "/";
1864
+ else normalizedPath = "";
1865
+ const upload = this.getUploadFileMW(normalizedPath, options);
1667
1866
  return (req, res, next) => {
1668
1867
  const anyUpload = maxCount ? upload.any() : upload.any();
1669
1868
  anyUpload(req, res, (err) => {
@@ -1686,6 +1885,159 @@ var S3BucketUtil = class _S3BucketUtil {
1686
1885
  });
1687
1886
  };
1688
1887
  }
1888
+ /**
1889
+ * Middleware for uploading multiple files with different field names
1890
+ * Adds the uploaded files info to req.s3FilesByField
1891
+ */
1892
+ // uploadFieldsFiles(
1893
+ // fields: Array<{ name: string; directory: string; maxCount?: number; options?: S3UploadOptions }>
1894
+ // ): RequestHandler {
1895
+ // // Create separate multer instances for each field (since each might have different options)
1896
+ // const fieldConfigs = fields.map((field) => {
1897
+ // const upload = this.getUploadFileMW(field.directory, field.options || {});
1898
+ //
1899
+ // return {
1900
+ // name: getNormalizedPath(field.name),
1901
+ // directory: getNormalizedPath(field.directory),
1902
+ // maxCount: field.maxCount || 1,
1903
+ // upload,
1904
+ // };
1905
+ // });
1906
+ //
1907
+ // return async (
1908
+ // req: Request & { s3FilesByField?: Record<string, UploadedS3File[]> } & any,
1909
+ // res: Response,
1910
+ // next: NextFunction & any
1911
+ // ) => {
1912
+ // // We'll use the first upload instance but with fields configuration
1913
+ // const multerFields = fieldConfigs.map((f) => ({ name: f.name, maxCount: f.maxCount }));
1914
+ // const upload = this.getUploadFileMW(fieldConfigs[0].directory);
1915
+ //
1916
+ // const mw: RequestHandler & any = upload.fields(multerFields);
1917
+ // mw(req, res, (err: any) => {
1918
+ // if (err) {
1919
+ // this.logger?.error(this.reqId, 'Fields upload error', { error: err.message });
1920
+ // return next(err);
1921
+ // }
1922
+ //
1923
+ // if (req.files && typeof req.files === 'object' && !Array.isArray(req.files)) {
1924
+ // req.s3FilesByField = req.files as Record<string, UploadedS3File[]>;
1925
+ //
1926
+ // const uploadSummary = Object.entries(req.s3FilesByField).map(([field, files]: any) => ({
1927
+ // field,
1928
+ // count: files.length,
1929
+ // keys: files.map((f: any) => f.key),
1930
+ // }));
1931
+ //
1932
+ // this.logger?.info(this.reqId, 'Fields uploaded successfully', { uploadSummary });
1933
+ // }
1934
+ //
1935
+ // next();
1936
+ // });
1937
+ // };
1938
+ // }
1939
+ };
1940
+
1941
+ // src/aws/s3/s3-util.ts
1942
+ var S3Util = class extends S3Stream {
1943
+ constructor(props) {
1944
+ super(props);
1945
+ }
1946
+ };
1947
+
1948
+ // src/aws/s3/s3-util.localstack.ts
1949
+ var S3LocalstackUtil = class extends S3Util {
1950
+ constructor(props) {
1951
+ super(__spreadProps(__spreadValues({}, props), { localstack: true }));
1952
+ }
1953
+ // todo: checked!
1954
+ directoryList(directoryPath) {
1955
+ return __async(this, null, function* () {
1956
+ var _a2;
1957
+ let normalizedPath = getNormalizedPath(directoryPath);
1958
+ if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
1959
+ else normalizedPath = "";
1960
+ let result;
1961
+ result = yield this.execute(
1962
+ new import_client_s36.ListObjectsV2Command({
1963
+ Bucket: this.bucket,
1964
+ Prefix: normalizedPath,
1965
+ Delimiter: "/"
1966
+ })
1967
+ );
1968
+ (_a2 = this.logger) == null ? void 0 : _a2.debug(null, "#### directoryList", {
1969
+ normalizedPath,
1970
+ CommonPrefixes: result.CommonPrefixes,
1971
+ ContentFile: result.Contents
1972
+ });
1973
+ const directories = (result.CommonPrefixes || []).map((prefix) => prefix.Prefix).map((prefix) => {
1974
+ const relativePath = prefix.replace(normalizedPath, "");
1975
+ const dir = relativePath.replace(/\/$/, "");
1976
+ return dir;
1977
+ }).filter((dir) => dir);
1978
+ const files = (result.Contents || []).filter((content) => {
1979
+ var _a3;
1980
+ return content.Key !== normalizedPath && !((_a3 = content.Key) == null ? void 0 : _a3.endsWith("/"));
1981
+ }).map((content) => __spreadProps(__spreadValues({}, content), {
1982
+ Name: content.Key.replace(normalizedPath, "") || content.Key,
1983
+ Location: `${this.link}${content.Key.replace(/^\//, "")}`,
1984
+ LastModified: new Date(content.LastModified)
1985
+ }));
1986
+ return { directories, files };
1987
+ });
1988
+ }
1989
+ // todo: checked!
1990
+ directoryListPaginated(_0) {
1991
+ return __async(this, arguments, function* (directoryPath, {
1992
+ pageSize = 100,
1993
+ pageNumber = 0
1994
+ // 0-based: page 0 = items 0-99, page 1 = items 100-199, page 2 = items 200-299
1995
+ } = {}) {
1996
+ let normalizedPath = getNormalizedPath(directoryPath);
1997
+ if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
1998
+ else normalizedPath = this.localstack ? "" : "/";
1999
+ let continuationToken = void 0;
2000
+ let currentPage = 0;
2001
+ let allDirectories = [];
2002
+ let allFiles = [];
2003
+ while (currentPage <= pageNumber) {
2004
+ let result;
2005
+ result = yield this.execute(
2006
+ new import_client_s36.ListObjectsV2Command({
2007
+ Bucket: this.bucket,
2008
+ Prefix: normalizedPath,
2009
+ Delimiter: "/",
2010
+ MaxKeys: pageSize,
2011
+ ContinuationToken: continuationToken
2012
+ })
2013
+ );
2014
+ if (currentPage === pageNumber) {
2015
+ allDirectories = (result.CommonPrefixes || []).map((prefix) => prefix.Prefix).map((prefix) => {
2016
+ const relativePath = prefix.replace(normalizedPath, "");
2017
+ return relativePath.replace(/\/$/, "");
2018
+ }).filter((dir) => dir);
2019
+ allFiles = (result.Contents || []).filter((content) => {
2020
+ var _a2;
2021
+ return content.Key !== normalizedPath && !((_a2 = content.Key) == null ? void 0 : _a2.endsWith("/"));
2022
+ }).map((content) => __spreadProps(__spreadValues({}, content), {
2023
+ Name: content.Key.replace(normalizedPath, "") || content.Key,
2024
+ Location: `${this.link}${content.Key.replace(/^\//, "")}`,
2025
+ LastModified: new Date(content.LastModified)
2026
+ }));
2027
+ }
2028
+ continuationToken = result.NextContinuationToken;
2029
+ if (!result.IsTruncated || !continuationToken) {
2030
+ break;
2031
+ }
2032
+ currentPage++;
2033
+ }
2034
+ return {
2035
+ directories: allDirectories,
2036
+ files: allFiles,
2037
+ totalFetched: allFiles.length + allDirectories.length
2038
+ };
2039
+ });
2040
+ }
1689
2041
  };
1690
2042
 
1691
2043
  // src/aws/sns.ts
@@ -1724,6 +2076,7 @@ var SNSUtil = class {
1724
2076
  AWSConfigSharingUtil,
1725
2077
  IAMUtil,
1726
2078
  LambdaUtil,
1727
- S3BucketUtil,
2079
+ S3LocalstackUtil,
2080
+ S3Util,
1728
2081
  SNSUtil
1729
2082
  });