@hdriel/aws-utils 1.1.2 โ†’ 1.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,744 @@
1
+ # S3 Utility Package
2
+
3
+ A powerful, type-safe wrapper around AWS S3 SDK v3 that simplifies S3 operations with advanced features like streaming, file uploads, directory management, and LocalStack support.
4
+
5
+ ## Features
6
+
7
+ โœจ **Simplified API** - Clean, intuitive methods for common S3 operations
8
+ ๐Ÿ“ **Directory Management** - Create, list, and delete directories with ease
9
+ ๐Ÿ“ค **Advanced File Uploads** - Multer integration with Express.js middleware
10
+ ๐ŸŽฌ **Video Streaming** - Built-in support for range requests and video streaming
11
+ ๐Ÿ“ฆ **Zip Streaming** - Stream multiple files as a zip archive
12
+ ๐Ÿท๏ธ **File Tagging & Versioning** - Tag files and manage versions
13
+ ๐Ÿ”— **Presigned URLs** - Generate temporary signed URLs
14
+ ๐Ÿงช **LocalStack Support** - First-class support for local S3 testing
15
+ โšก **Connection Pooling** - Optimized HTTP/HTTPS agents for better performance
16
+ ๐Ÿ“Š **Pagination** - Built-in pagination for large directory listings
17
+
18
+ ## Installation
19
+
20
+ ```bash
21
+ npm install @hdriel/aws-utils
22
+ ```
23
+
24
+ ## Quick Start
25
+
26
+ First load this file somewhere on starting server
27
+ ```typescript
28
+ // aws-utils-config.ts
29
+ import env from './dotenv.ts';
30
+ import { AWSConfigSharingUtil } from '@hdriel/aws-utils';
31
+
32
+ AWSConfigSharingUtil.setConfig({
33
+ accessKeyId: env?.AWS_ACCESS_KEY_ID,
34
+ secretAccessKey: env?.AWS_SECRET_ACCESS_KEY,
35
+ region: env?.AWS_REGION,
36
+ endpoint: env?.AWS_ENDPOINT,
37
+ });
38
+
39
+ // console.log('AWSConfigSharingUtil configuration');
40
+ // console.table(AWSConfigSharingUtil.getConfig());
41
+ ```
42
+ on your server files:
43
+ ```typescript
44
+ import './aws-utils-config';
45
+ ...
46
+ ```
47
+
48
+ then write your code...
49
+
50
+ for example:
51
+
52
+ ```typescript
53
+ import { S3Util, S3LocalstackUtil } from '@hdriel/aws-utils';
54
+
55
+ // Initialize S3 utility
56
+
57
+ // for localstack usage
58
+ const s3 = new S3LocalstackUtil({ bucket: 'demo' });
59
+ const directoryTreeInfo = await s3.directoryListPaginated('/', { pageSize: 100, pageNumber: 0 });
60
+ console.log('Directory tree info', JSON.stringify(directoryTreeInfo, null, 2));
61
+
62
+ // OR
63
+
64
+ // for production usage
65
+ const s3 = new S3Util({ bucket: 'demo' });
66
+ const directoryTreeInfo = await s3.directoryListPaginated('/', { pageSize: 100, pageNumber: 0 });
67
+ console.log('Directory tree info', JSON.stringify(directoryTreeInfo, null, 2));
68
+
69
+
70
+ // Usage examples
71
+ // Initialize bucket (creates if doesn't exist)
72
+ await s3.initBucket();
73
+
74
+ // Upload a file
75
+ await s3.uploadFile('/documents/file.pdf', fileBuffer);
76
+
77
+ // Check if file exists
78
+ const exists = await s3.fileExists('/documents/file.pdf');
79
+
80
+ // Get file content
81
+ const content = await s3.fileContent('/documents/file.pdf', 'utf8');
82
+ ```
83
+
84
+ ## Configuration Options
85
+
86
+ ```typescript
87
+ interface S3UtilProps {
88
+ bucket: string; // Required: S3 bucket name
89
+ logger?: Logger; // Optional: Logger instance
90
+ reqId?: string; // Optional: Request ID for logging
91
+ accessKeyId?: string; // AWS credentials
92
+ secretAccessKey?: string; // AWS credentials
93
+ endpoint?: string; // Custom endpoint (e.g., LocalStack)
94
+ region?: string; // AWS region (default: from config)
95
+ s3ForcePathStyle?: boolean; // Use path-style URLs (default: true)
96
+ maxUploadFileSizeRestriction?: string; // Max upload size (default: '10GB')
97
+ }
98
+ ```
99
+ ----
100
+
101
+ # FULL DEMO PROJECT EXAMPLE:
102
+ please see this project code before using: [AWS-UTILS-DEMO github linke](https://github.com/hdriel/aws-utils-demo)
103
+
104
+
105
+ ---
106
+
107
+
108
+ ## Core Features
109
+
110
+ ### ๐Ÿชฃ Bucket Operations
111
+
112
+ #### Initialize Bucket
113
+ ```typescript
114
+ // Create private bucket
115
+ await s3.initBucket('private');
116
+
117
+ // Create public bucket
118
+ await s3.initBucket('public-read');
119
+
120
+ // With location constraint
121
+ await s3.initBucket('private', {
122
+ includeConstraintLocation: true
123
+ });
124
+ ```
125
+
126
+ #### Bucket Information
127
+ ```typescript
128
+ const info = await s3.bucketInfo();
129
+ console.log(info);
130
+ // {
131
+ // name: 'my-bucket',
132
+ // region: 'us-east-1',
133
+ // exists: true,
134
+ // creationDate: Date,
135
+ // versioning: 'Enabled',
136
+ // encryption: { enabled: true, type: 'AES256' },
137
+ // publicAccessBlock: { ... },
138
+ // policy: { ... }
139
+ // }
140
+ ```
141
+
142
+ #### Check Bucket Exists
143
+ ```typescript
144
+ const exists = await s3.isBucketExists();
145
+ ```
146
+
147
+ #### Delete Bucket
148
+ ```typescript
149
+ // Delete bucket (must be empty)
150
+ await s3.destroyBucket();
151
+
152
+ // Force delete with all contents
153
+ await s3.destroyBucket(true);
154
+ ```
155
+
156
+ #### List All Buckets
157
+ ```typescript
158
+ const buckets = await s3.getBucketList();
159
+
160
+ // Include public access configuration
161
+ const bucketsWithAccess = await s3.getBucketList({
162
+ includePublicAccess: true
163
+ });
164
+ ```
165
+
166
+ ### ๐Ÿ“ Directory Operations
167
+
168
+ #### Create Directory
169
+ ```typescript
170
+ await s3.createDirectory('/uploads/images');
171
+ ```
172
+
173
+ #### List Directory Contents
174
+ ```typescript
175
+ const { directories, files } = await s3.directoryList('/uploads');
176
+
177
+ console.log('Subdirectories:', directories);
178
+ console.log('Files:', files);
179
+ // files: [
180
+ // {
181
+ // Key: '/uploads/image.jpg',
182
+ // Name: 'image.jpg',
183
+ // Size: 12345,
184
+ // LastModified: Date,
185
+ // Location: 'https://...'
186
+ // }
187
+ // ]
188
+ ```
189
+
190
+ #### Paginated Directory Listing
191
+ ```typescript
192
+ // Get second page with 50 items per page
193
+ const result = await s3.directoryListPaginated('/uploads', {
194
+ pageSize: 50,
195
+ pageNumber: 1
196
+ });
197
+
198
+ console.log(result.directories); // Array of directory names
199
+ console.log(result.files); // Array of file objects
200
+ console.log(result.totalFetched); // Number of items returned
201
+ ```
202
+
203
+ #### Delete Directory
204
+ ```typescript
205
+ // Delete directory and all contents
206
+ await s3.deleteDirectory('/uploads/temp');
207
+ ```
208
+
209
+ #### Check Directory Exists
210
+ ```typescript
211
+ const exists = await s3.directoryExists('/uploads/images');
212
+ ```
213
+
214
+ ### ๐Ÿ“„ File Operations
215
+
216
+ #### Upload File
217
+ ```typescript
218
+ import { ACLs } from '@hdriel/aws-utils';
219
+
220
+ // Upload buffer
221
+ await s3.uploadFile('/documents/file.pdf', buffer);
222
+
223
+ // Upload with public access
224
+ await s3.uploadFile('/public/image.jpg', buffer, ACLs.public_read);
225
+
226
+ // Upload with version tag
227
+ await s3.uploadFile('/docs/v2.pdf', buffer, ACLs.private, '2.0.0');
228
+ ```
229
+
230
+ #### Check File Exists
231
+ ```typescript
232
+ const exists = await s3.fileExists('/documents/file.pdf');
233
+ ```
234
+
235
+ #### Get File Content
236
+ ```typescript
237
+ // As buffer
238
+ const buffer = await s3.fileContent('/documents/file.pdf');
239
+
240
+ // As base64 string
241
+ const base64 = await s3.fileContent('/image.jpg', 'base64');
242
+
243
+ // As UTF-8 string
244
+ const text = await s3.fileContent('/data.json', 'utf8');
245
+ ```
246
+
247
+ #### File Information
248
+ ```typescript
249
+ const info = await s3.fileInfo('/documents/file.pdf');
250
+ console.log(info.ContentLength);
251
+ console.log(info.ContentType);
252
+ console.log(info.LastModified);
253
+ ```
254
+
255
+ #### List Files
256
+ ```typescript
257
+ // List all files in directory
258
+ const files = await s3.fileListInfo('/documents');
259
+
260
+ // List files with prefix
261
+ const pdfFiles = await s3.fileListInfo('/documents', 'report-');
262
+
263
+ // Paginated file listing
264
+ const { files, totalFetched } = await s3.fileListInfoPaginated('/documents', {
265
+ fileNamePrefix: 'invoice-',
266
+ pageSize: 100,
267
+ pageNumber: 0
268
+ });
269
+ ```
270
+
271
+ #### File Size
272
+ ```typescript
273
+ const bytes = await s3.sizeOf('/large-file.zip');
274
+ const kb = await s3.sizeOf('/large-file.zip', 'KB');
275
+ const mb = await s3.sizeOf('/large-file.zip', 'MB');
276
+ const gb = await s3.sizeOf('/large-file.zip', 'GB');
277
+ ```
278
+
279
+ #### Delete File
280
+ ```typescript
281
+ await s3.deleteFile('/documents/old-file.pdf');
282
+ ```
283
+
284
+ #### Generate Presigned URL
285
+ ```typescript
286
+ // Expires in 15 minutes (default)
287
+ const url = await s3.fileUrl('/private/document.pdf');
288
+
289
+ // Custom expiration
290
+ const url = await s3.fileUrl('/private/document.pdf', '1h');
291
+ const url = await s3.fileUrl('/private/document.pdf', 3600); // seconds
292
+ ```
293
+
294
+ #### File Tagging
295
+ ```typescript
296
+ // Tag file with version
297
+ await s3.taggingFile('/documents/file.pdf', '1.0.0');
298
+
299
+ // Get file version
300
+ const version = await s3.fileVersion('/documents/file.pdf');
301
+ ```
302
+
303
+ ### ๐ŸŽฌ Streaming & Express.js Integration
304
+
305
+ #### Stream File Download
306
+ ```typescript
307
+ import express from 'express';
308
+
309
+ const app = express();
310
+
311
+ // Stream single file
312
+ app.get('/download/:file',
313
+ await s3.getStreamFileCtrl({
314
+ filePath: '/documents/file.pdf',
315
+ filename: 'download.pdf',
316
+ forDownloading: true
317
+ })
318
+ );
319
+ ```
320
+
321
+ #### Stream Zip Archive
322
+ ```typescript
323
+ // Download multiple files as zip
324
+ app.get('/download-all',
325
+ await s3.getStreamZipFileCtr({
326
+ filePath: [
327
+ '/documents/file1.pdf',
328
+ '/documents/file2.pdf',
329
+ '/images/photo.jpg'
330
+ ],
331
+ filename: 'archive.zip',
332
+ compressionLevel: 5 // 0-9, lower = faster
333
+ })
334
+ );
335
+ ```
336
+
337
+ #### Stream Video with Range Support
338
+ ```typescript
339
+ // Video streaming with range requests
340
+ app.get('/video/:id',
341
+ await s3.getStreamVideoFileCtrl({
342
+ fileKey: '/videos/movie.mp4',
343
+ contentType: 'video/mp4',
344
+ bufferMB: 5,
345
+ streamTimeoutMS: 30000,
346
+ allowedWhitelist: ['https://myapp.com']
347
+ })
348
+ );
349
+ ```
350
+
351
+ #### View Image
352
+ ```typescript
353
+ // Serve image with caching
354
+ app.get('/image',
355
+ s3.getImageFileViewCtrl({
356
+ queryField: 'path', // ?path=/images/photo.jpg
357
+ cachingAge: 31536000 // 1 year
358
+ })
359
+ );
360
+
361
+ // With fixed file path
362
+ app.get('/logo',
363
+ s3.getImageFileViewCtrl({
364
+ fileKey: '/public/logo.png'
365
+ })
366
+ );
367
+ ```
368
+
369
+ #### View PDF
370
+ ```typescript
371
+ app.get('/pdf',
372
+ s3.getPdfFileViewCtrl({
373
+ queryField: 'document',
374
+ cachingAge: 86400 // 1 day
375
+ })
376
+ );
377
+ ```
378
+
379
+ ### ๐Ÿ“ค File Upload Middleware
380
+
381
+ #### Single File Upload
382
+ ```typescript
383
+ import express from 'express';
384
+
385
+ const app = express();
386
+
387
+ app.post('/upload',
388
+ s3.uploadSingleFile('file', '/uploads', {
389
+ maxFileSize: '5MB',
390
+ fileType: ['image', 'application'],
391
+ fileExt: ['jpg', 'png', 'pdf']
392
+ }),
393
+ (req, res) => {
394
+ console.log(req.s3File);
395
+ // {
396
+ // key: '/uploads/photo.jpg',
397
+ // location: 'https://...',
398
+ // size: 12345,
399
+ // mimetype: 'image/jpeg',
400
+ // ...
401
+ // }
402
+ res.json({ file: req.s3File });
403
+ }
404
+ );
405
+ ```
406
+
407
+ #### Multiple Files Upload
408
+ ```typescript
409
+ app.post('/upload-multiple',
410
+ s3.uploadMultipleFiles('photos', '/uploads/gallery', {
411
+ maxFileSize: '10MB',
412
+ maxFilesCount: 5,
413
+ fileType: ['image']
414
+ }),
415
+ (req, res) => {
416
+ console.log(req.s3Files); // Array of uploaded files
417
+ res.json({ files: req.s3Files });
418
+ }
419
+ );
420
+ ```
421
+
422
+ #### Upload with Custom Filename
423
+ ```typescript
424
+ app.post('/upload',
425
+ s3.uploadSingleFile('file', '/uploads', {
426
+ filename: async (req, file) => {
427
+ const timestamp = Date.now();
428
+ const ext = path.extname(file.originalname);
429
+ return `${req.user.id}-${timestamp}${ext}`;
430
+ }
431
+ }),
432
+ (req, res) => {
433
+ res.json({ file: req.s3File });
434
+ }
435
+ );
436
+ ```
437
+
438
+ #### Upload with Custom Metadata
439
+ ```typescript
440
+ app.post('/upload',
441
+ s3.uploadSingleFile('file', '/uploads', {
442
+ metadata: async (req, file) => ({
443
+ userId: req.user.id,
444
+ uploadDate: new Date().toISOString(),
445
+ originalName: file.originalname
446
+ })
447
+ }),
448
+ (req, res) => {
449
+ res.json({ file: req.s3File });
450
+ }
451
+ );
452
+ ```
453
+
454
+ #### Upload Any Files (Mixed Fields)
455
+ ```typescript
456
+ app.post('/upload-any',
457
+ s3.uploadAnyFiles('/uploads', 10, {
458
+ maxFileSize: '20MB'
459
+ }),
460
+ (req, res) => {
461
+ console.log(req.s3AllFiles); // All uploaded files
462
+ res.json({ files: req.s3AllFiles });
463
+ }
464
+ );
465
+ ```
466
+
467
+ ### Upload Options
468
+
469
+ ```typescript
470
+ interface S3UploadOptions {
471
+ acl?: 'private' | 'public-read' | 'public-read-write';
472
+ maxFileSize?: string | number; // '5MB', '1GB', or bytes
473
+ maxFilesCount?: number; // For multiple file uploads
474
+ filename?: string | ((req, file) => string | Promise<string>);
475
+ fileType?: Array<'image' | 'video' | 'audio' | 'application' | 'text'>;
476
+ fileExt?: string[]; // ['jpg', 'png', 'pdf']
477
+ metadata?: object | ((req, file) => object | Promise<object>);
478
+ }
479
+ ```
480
+
481
+ ## ๐Ÿงช LocalStack Support
482
+
483
+ For local development and testing with LocalStack:
484
+
485
+ ```typescript
486
+ import { S3LocalstackUtil } from '@hdriel/aws-utils';
487
+
488
+ const s3 = new S3LocalstackUtil({
489
+ bucket: 'test-bucket',
490
+ endpoint: 'http://localhost:4566',
491
+ region: 'us-east-1',
492
+ accessKeyId: 'test',
493
+ secretAccessKey: 'test',
494
+ s3ForcePathStyle: true
495
+ });
496
+
497
+ // Use same API as S3Util
498
+ await s3.initBucket();
499
+ await s3.uploadFile('/test.txt', Buffer.from('Hello LocalStack!'));
500
+ ```
501
+
502
+ ### LocalStack Docker Setup
503
+
504
+ ```yaml
505
+ # docker-compose.yml
506
+ services:
507
+ localstack:
508
+ image: localstack/localstack
509
+ ports:
510
+ - "127.0.0.1:4566:4566" # LocalStack Gateway
511
+ - "127.0.0.1:4510-4559:4510-4559" # external services port range
512
+ environment:
513
+ # LocalStack configuration: https://docs.localstack.cloud/references/configuration/
514
+ - CLEAR_TMP_FOLDER=0
515
+ - DEBUG=${DEBUG:-1}
516
+ - PERSISTENCE=${PERSISTENCE:-1}
517
+ - LAMBDA_EXECUTOR=${LAMBDA_EXECUTOR:-}
518
+ - LOCALSTACK_API_KEY=${LOCALSTACK_API_KEY:-} # only required for Pro
519
+ - SERVICES=s3,lambda,sns,sqs,iam
520
+ - DATA_DIR=/tmp/localstack/data
521
+ - START_WEB=1
522
+ - DOCKER_HOST=unix:///var/run/docker.sock
523
+ - DEFAULT_REGION=us-east-1
524
+ - AWS_DEFAULT_REGION=us-east-1
525
+ - AWS_EXECUTION_ENV=True
526
+ - ENV=${NODE_ENV}
527
+ - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-xxxxxxxxx}
528
+ - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-xxxxxxxxxxxxxxxxxxxxx}
529
+ - HOSTNAME_EXTERNAL=localhost
530
+ volumes:
531
+ - "/var/run/docker.sock:/var/run/docker.sock"
532
+ - "${VOLUME_DIR_LOCALSTACK:-./docker-data/aws-localstack}:/var/lib/localstack"
533
+ - "${VOLUME_DIR_LOCALSTACK:-./docker-data/aws-localstack}/aws-s3:/tmp/localstack"
534
+ - "${VOLUME_DIR_LOCALSTACK:-./docker-data/aws-localstack}/aws-bootstrap:/opt/bootstrap/"
535
+ networks:
536
+ - app-network
537
+ ```
538
+
539
+ ## ๐Ÿ”ง Advanced Usage
540
+
541
+ ### Dynamic Bucket Switching
542
+
543
+ ```typescript
544
+ const s3 = new S3Util({
545
+ bucket: 'default-bucket',
546
+ // ... other config
547
+ });
548
+
549
+ // Switch to different bucket
550
+ s3.changeBucket('another-bucket');
551
+
552
+ // Operations now use 'another-bucket'
553
+ await s3.fileExists('/file.txt');
554
+ ```
555
+
556
+ ### Custom Logger Integration
557
+
558
+ ```typescript
559
+ import { Logger } from 'stack-trace-logger';
560
+
561
+ const logger = new Logger('S3Service');
562
+
563
+ const s3 = new S3Util({
564
+ bucket: 'my-bucket',
565
+ logger,
566
+ reqId: 'request-123'
567
+ });
568
+
569
+ // All operations will log with your logger
570
+ await s3.uploadFile('/test.txt', buffer);
571
+ ```
572
+
573
+ ### Connection Pooling Configuration
574
+
575
+ The utility includes optimized HTTP/HTTPS agents:
576
+
577
+ ```typescript
578
+ // Default configuration (already included):
579
+ // - keepAlive: true
580
+ // - maxSockets: 300
581
+ // - connectionTimeout: 3000ms
582
+ // - socketTimeout: 30000ms
583
+ ```
584
+
585
+ ### Batch Operations
586
+
587
+ ```typescript
588
+ // Upload multiple files in parallel
589
+ const files = [
590
+ { path: '/docs/file1.pdf', data: buffer1 },
591
+ { path: '/docs/file2.pdf', data: buffer2 },
592
+ { path: '/docs/file3.pdf', data: buffer3 }
593
+ ];
594
+
595
+ await Promise.all(
596
+ files.map(file => s3.uploadFile(file.path, file.data))
597
+ );
598
+
599
+ // Delete multiple files
600
+ const filesToDelete = ['/old/file1.txt', '/old/file2.txt'];
601
+ await Promise.all(
602
+ filesToDelete.map(path => s3.deleteFile(path))
603
+ );
604
+ ```
605
+
606
+ ## ๐Ÿ“‹ Complete Express.js Example
607
+
608
+ ```typescript
609
+ import express from 'express';
610
+ import { S3Util, ACLs } from '@hdriel/aws-utils';
611
+
612
+ const app = express();
613
+ const s3 = new S3Util({
614
+ bucket: process.env.S3_BUCKET!,
615
+ region: process.env.AWS_REGION,
616
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
617
+ secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
618
+ });
619
+
620
+ // Initialize bucket on startup
621
+ (async () => {
622
+ await s3.initBucket();
623
+ console.log('S3 bucket initialized');
624
+ })();
625
+
626
+ // Upload endpoint
627
+ app.post('/api/upload',
628
+ s3.uploadSingleFile('file', '/uploads', {
629
+ maxFileSize: '10MB',
630
+ fileType: ['image', 'application'],
631
+ filename: async (req, file) => {
632
+ const timestamp = Date.now();
633
+ const sanitized = file.originalname.replace(/[^a-zA-Z0-9.-]/g, '_');
634
+ return `${timestamp}-${sanitized}`;
635
+ }
636
+ }),
637
+ async (req, res) => {
638
+ const { key, location, size } = req.s3File!;
639
+
640
+ // Generate temporary URL
641
+ const url = await s3.fileUrl(key, '1h');
642
+
643
+ res.json({ key, location, size, temporaryUrl: url });
644
+ }
645
+ );
646
+
647
+ // Download endpoint
648
+ app.get('/api/download/:key(*)',
649
+ async (req, res, next) => {
650
+ const key = decodeURIComponent(req.params.key);
651
+ const ctrl = await s3.getStreamFileCtrl({
652
+ filePath: key,
653
+ forDownloading: true
654
+ });
655
+ ctrl(req, res, next);
656
+ }
657
+ );
658
+
659
+ // List files endpoint
660
+ app.get('/api/files', async (req, res) => {
661
+ const { page = '0', size = '50' } = req.query;
662
+
663
+ const result = await s3.directoryListPaginated('/uploads', {
664
+ pageNumber: parseInt(page as string),
665
+ pageSize: parseInt(size as string)
666
+ });
667
+
668
+ res.json(result);
669
+ });
670
+
671
+ // Delete file endpoint
672
+ app.delete('/api/files/:key(*)', async (req, res) => {
673
+ const key = decodeURIComponent(req.params.key);
674
+ await s3.deleteFile(key);
675
+ res.json({ success: true });
676
+ });
677
+
678
+ // Video streaming endpoint
679
+ app.get('/api/video/:id',
680
+ async (req, res, next) => {
681
+ const videoPath = `/videos/${req.params.id}.mp4`;
682
+ const ctrl = await s3.getStreamVideoFileCtrl({
683
+ fileKey: videoPath,
684
+ contentType: 'video/mp4',
685
+ bufferMB: 5
686
+ });
687
+ ctrl(req, res, next);
688
+ }
689
+ );
690
+
691
+ app.listen(3000, () => {
692
+ console.log('Server running on port 3000');
693
+ });
694
+ ```
695
+
696
+ ## ๐Ÿš€ Performance Tips
697
+
698
+ 1. **Use Pagination**: For large directories, always use paginated methods
699
+ 2. **Stream Large Files**: Use streaming methods instead of loading entire files into memory
700
+ 3. **Connection Pooling**: The built-in connection pooling is optimized for concurrent requests
701
+ 4. **Batch Operations**: Use `Promise.all()` for parallel operations when possible
702
+ 5. **Presigned URLs**: Generate presigned URLs for direct client uploads/downloads when appropriate
703
+
704
+ ## ๐Ÿ›ก๏ธ Error Handling
705
+
706
+ ```typescript
707
+ try {
708
+ await s3.uploadFile('/docs/file.pdf', buffer);
709
+ } catch (error) {
710
+ if (error.name === 'NotFound' || error.$metadata?.httpStatusCode === 404) {
711
+ console.error('File not found');
712
+ } else {
713
+ console.error('Upload failed:', error);
714
+ }
715
+ }
716
+ ```
717
+
718
+ ## ๐Ÿ“ TypeScript Support
719
+
720
+ This package is written in TypeScript and includes full type definitions:
721
+
722
+ ```typescript
723
+ import type {
724
+ ContentFile,
725
+ FileUploadResponse,
726
+ TreeDirectoryItem,
727
+ UploadedS3File,
728
+ S3UploadOptions
729
+ } from '@hdriel/aws-utils';
730
+ ```
731
+
732
+ ## ๐Ÿ‘ค Author
733
+
734
+ [Hadriel Benjo](https://github.com/hdriel)
735
+
736
+ ## ๐Ÿ”— Links
737
+
738
+ - [AWS S3 Documentation](https://docs.aws.amazon.com/s3/)
739
+ - [LocalStack Documentation](https://docs.localstack.cloud/user-guide/aws/s3/)
740
+ - [GitHub Repository](#)
741
+
742
+ ---
743
+
744
+ Made with โค๏ธ for developers who want powerful S3 utilities without the complexity.
package/dist/index.cjs CHANGED
@@ -882,49 +882,6 @@ var S3Directory = class extends S3Bucket {
882
882
  };
883
883
  });
884
884
  }
885
- /**
886
- * Get all files recursively (example for search/indexing)
887
- * @param directoryPath
888
- */
889
- directoryListRecursive(directoryPath) {
890
- return __async(this, null, function* () {
891
- var _a2;
892
- let normalizedPath = getNormalizedPath(directoryPath);
893
- if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
894
- else normalizedPath = "/";
895
- const allDirectories = [];
896
- const allFiles = [];
897
- let ContinuationToken = void 0;
898
- do {
899
- const result = yield this.execute(
900
- new import_client_s33.ListObjectsV2Command({
901
- Bucket: this.bucket,
902
- Prefix: normalizedPath,
903
- ContinuationToken
904
- })
905
- );
906
- if (result.Contents) {
907
- for (const content of result.Contents) {
908
- const fullPath = content.Key;
909
- const relativePath = fullPath.replace(normalizedPath, "");
910
- const filename = fullPath.split("/").pop();
911
- if (fullPath.endsWith("/")) {
912
- allDirectories.push(relativePath.slice(0, -1));
913
- } else {
914
- allFiles.push(__spreadProps(__spreadValues({}, content), {
915
- Name: filename,
916
- Path: fullPath,
917
- Location: content.Key ? `${this.link}${(_a2 = content.Key) == null ? void 0 : _a2.replace(/^\//, "")}` : "",
918
- LastModified: content.LastModified ? new Date(content.LastModified) : null
919
- }));
920
- }
921
- }
922
- }
923
- ContinuationToken = result.NextContinuationToken;
924
- } while (ContinuationToken);
925
- return { directories: allDirectories, files: allFiles };
926
- });
927
- }
928
885
  /**
929
886
  * Get tree files recursively (example for build file explorer UI)
930
887
  * @param directoryPath - the directory start from
package/dist/index.d.cts CHANGED
@@ -207,16 +207,6 @@ declare class S3Directory extends S3Bucket {
207
207
  files: ContentFile[];
208
208
  totalFetched: number;
209
209
  }>;
210
- /**
211
- * Get all files recursively (example for search/indexing)
212
- * @param directoryPath
213
- */
214
- directoryListRecursive(directoryPath?: string): Promise<{
215
- directories: string[];
216
- files: Array<ContentFile & {
217
- Name: string;
218
- }>;
219
- }>;
220
210
  /**
221
211
  * Get tree files recursively (example for build file explorer UI)
222
212
  * @param directoryPath - the directory start from
package/dist/index.d.ts CHANGED
@@ -207,16 +207,6 @@ declare class S3Directory extends S3Bucket {
207
207
  files: ContentFile[];
208
208
  totalFetched: number;
209
209
  }>;
210
- /**
211
- * Get all files recursively (example for search/indexing)
212
- * @param directoryPath
213
- */
214
- directoryListRecursive(directoryPath?: string): Promise<{
215
- directories: string[];
216
- files: Array<ContentFile & {
217
- Name: string;
218
- }>;
219
- }>;
220
210
  /**
221
211
  * Get tree files recursively (example for build file explorer UI)
222
212
  * @param directoryPath - the directory start from
package/dist/index.js CHANGED
@@ -871,49 +871,6 @@ var S3Directory = class extends S3Bucket {
871
871
  };
872
872
  });
873
873
  }
874
- /**
875
- * Get all files recursively (example for search/indexing)
876
- * @param directoryPath
877
- */
878
- directoryListRecursive(directoryPath) {
879
- return __async(this, null, function* () {
880
- var _a2;
881
- let normalizedPath = getNormalizedPath(directoryPath);
882
- if (normalizedPath !== "/" && directoryPath !== "" && directoryPath !== void 0) normalizedPath += "/";
883
- else normalizedPath = "/";
884
- const allDirectories = [];
885
- const allFiles = [];
886
- let ContinuationToken = void 0;
887
- do {
888
- const result = yield this.execute(
889
- new ListObjectsV2Command2({
890
- Bucket: this.bucket,
891
- Prefix: normalizedPath,
892
- ContinuationToken
893
- })
894
- );
895
- if (result.Contents) {
896
- for (const content of result.Contents) {
897
- const fullPath = content.Key;
898
- const relativePath = fullPath.replace(normalizedPath, "");
899
- const filename = fullPath.split("/").pop();
900
- if (fullPath.endsWith("/")) {
901
- allDirectories.push(relativePath.slice(0, -1));
902
- } else {
903
- allFiles.push(__spreadProps(__spreadValues({}, content), {
904
- Name: filename,
905
- Path: fullPath,
906
- Location: content.Key ? `${this.link}${(_a2 = content.Key) == null ? void 0 : _a2.replace(/^\//, "")}` : "",
907
- LastModified: content.LastModified ? new Date(content.LastModified) : null
908
- }));
909
- }
910
- }
911
- }
912
- ContinuationToken = result.NextContinuationToken;
913
- } while (ContinuationToken);
914
- return { directories: allDirectories, files: allFiles };
915
- });
916
- }
917
874
  /**
918
875
  * Get tree files recursively (example for build file explorer UI)
919
876
  * @param directoryPath - the directory start from
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hdriel/aws-utils",
3
- "version": "1.1.2",
3
+ "version": "1.1.3",
4
4
  "description": "Simplified AWS SDK (v3) utilities for S3 (upload, download, streaming) with TypeScript support",
5
5
  "author": "Hadriel Benjo (https://github.com/hdriel)",
6
6
  "type": "module",