@uploadista/data-store-s3 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/.turbo/turbo-build.log +5 -0
  2. package/.turbo/turbo-check.log +5 -0
  3. package/LICENSE +21 -0
  4. package/README.md +588 -0
  5. package/dist/index.d.ts +2 -0
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +1 -0
  8. package/dist/observability.d.ts +45 -0
  9. package/dist/observability.d.ts.map +1 -0
  10. package/dist/observability.js +155 -0
  11. package/dist/s3-store-old.d.ts +51 -0
  12. package/dist/s3-store-old.d.ts.map +1 -0
  13. package/dist/s3-store-old.js +765 -0
  14. package/dist/s3-store.d.ts +9 -0
  15. package/dist/s3-store.d.ts.map +1 -0
  16. package/dist/s3-store.js +666 -0
  17. package/dist/services/__mocks__/s3-client-mock.service.d.ts +44 -0
  18. package/dist/services/__mocks__/s3-client-mock.service.d.ts.map +1 -0
  19. package/dist/services/__mocks__/s3-client-mock.service.js +379 -0
  20. package/dist/services/index.d.ts +2 -0
  21. package/dist/services/index.d.ts.map +1 -0
  22. package/dist/services/index.js +1 -0
  23. package/dist/services/s3-client.service.d.ts +68 -0
  24. package/dist/services/s3-client.service.d.ts.map +1 -0
  25. package/dist/services/s3-client.service.js +209 -0
  26. package/dist/test-observability.d.ts +6 -0
  27. package/dist/test-observability.d.ts.map +1 -0
  28. package/dist/test-observability.js +62 -0
  29. package/dist/types.d.ts +81 -0
  30. package/dist/types.d.ts.map +1 -0
  31. package/dist/types.js +1 -0
  32. package/dist/utils/calculations.d.ts +7 -0
  33. package/dist/utils/calculations.d.ts.map +1 -0
  34. package/dist/utils/calculations.js +41 -0
  35. package/dist/utils/error-handling.d.ts +7 -0
  36. package/dist/utils/error-handling.d.ts.map +1 -0
  37. package/dist/utils/error-handling.js +29 -0
  38. package/dist/utils/index.d.ts +4 -0
  39. package/dist/utils/index.d.ts.map +1 -0
  40. package/dist/utils/index.js +3 -0
  41. package/dist/utils/stream-adapter.d.ts +14 -0
  42. package/dist/utils/stream-adapter.d.ts.map +1 -0
  43. package/dist/utils/stream-adapter.js +41 -0
  44. package/package.json +36 -0
  45. package/src/__tests__/integration/s3-store.integration.test.ts +548 -0
  46. package/src/__tests__/multipart-logic.test.ts +395 -0
  47. package/src/__tests__/s3-store.edge-cases.test.ts +681 -0
  48. package/src/__tests__/s3-store.performance.test.ts +622 -0
  49. package/src/__tests__/s3-store.test.ts +662 -0
  50. package/src/__tests__/utils/performance-helpers.ts +459 -0
  51. package/src/__tests__/utils/test-data-generator.ts +331 -0
  52. package/src/__tests__/utils/test-setup.ts +256 -0
  53. package/src/index.ts +1 -0
  54. package/src/s3-store.ts +1059 -0
  55. package/src/services/__mocks__/s3-client-mock.service.ts +604 -0
  56. package/src/services/index.ts +1 -0
  57. package/src/services/s3-client.service.ts +359 -0
  58. package/src/types.ts +96 -0
  59. package/src/utils/calculations.ts +61 -0
  60. package/src/utils/error-handling.ts +52 -0
  61. package/src/utils/index.ts +3 -0
  62. package/src/utils/stream-adapter.ts +50 -0
  63. package/tsconfig.json +19 -0
  64. package/tsconfig.tsbuildinfo +1 -0
  65. package/vitest.config.ts +15 -0
@@ -0,0 +1,5 @@
1
+
2
+ 
3
+ > @uploadista/data-store-s3@0.0.2 build /Users/denislaboureyras/Documents/uploadista/dev/uploadista-workspace/uploadista-sdk/packages/data-stores/s3
4
+ > tsc -b
5
+
@@ -0,0 +1,5 @@
1
+
2
+ > @uploadista/data-store-s3@ check /Users/denislaboureyras/Documents/uploadista/dev/uploadista/packages/uploadista/data-stores/s3
3
+ > biome check --write ./src
4
+
5
+ Checked 18 files in 285ms. No fixes applied.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 uploadista
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,588 @@
1
+ # @uploadista/data-store-s3
2
+
3
+ AWS S3 data store for Uploadista - Store files in Amazon S3.
4
+
5
+ Provides S3-based file storage with multipart upload support, intelligent part size optimization, resumable uploads, and comprehensive error handling. Handles S3-specific constraints (10,000 parts limit, 5TB max file size) transparently.
6
+
7
+ ## Features
8
+
9
+ - **Multipart Uploads** - Configurable part sizes (5MiB to 5GiB)
10
+ - **Intelligent Optimization** - Automatic part size calculation for optimal performance
11
+ - **Resumable Uploads** - Resume failed uploads without re-uploading
12
+ - **File Tags** - Attach metadata tags to S3 objects
13
+ - **Expiration Support** - Automatic cleanup of old incomplete uploads
14
+ - **Full Observability** - Metrics, logging, and distributed tracing
15
+ - **Error Recovery** - Automatic retry with exponential backoff
16
+ - **TypeScript** - Full type safety with comprehensive JSDoc
17
+
18
+ ## Installation
19
+
20
+ ```bash
21
+ npm install @uploadista/data-store-s3 @aws-sdk/client-s3 @uploadista/core
22
+ # or
23
+ pnpm add @uploadista/data-store-s3 @aws-sdk/client-s3 @uploadista/core
24
+ ```
25
+
26
+ ## Requirements
27
+
28
+ - Node.js 18+
29
+ - AWS account with S3 bucket
30
+ - AWS credentials (via environment variables, IAM role, or credentials file)
31
+ - TypeScript 5.0+ (optional but recommended)
32
+
33
+ ## Quick Start
34
+
35
+ ### 1. Create S3 Data Store
36
+
37
+ ```typescript
38
+ import { createS3Store } from "@uploadista/data-store-s3";
39
+ import { createUploadServerLayer } from "@uploadista/server";
40
+ import { memoryKvStore } from "@uploadista/kv-store-memory";
41
+ import { webSocketEventEmitter } from "@uploadista/event-emitter-websocket";
42
+ import { Effect } from "effect";
43
+
44
+ // Create S3 store
45
+ const s3Store = createS3Store({
46
+ deliveryUrl: "https://my-bucket.s3.amazonaws.com",
47
+ s3ClientConfig: {
48
+ region: "us-east-1",
49
+ bucket: "my-bucket",
50
+ },
51
+ kvStore: memoryKvStore,
52
+ });
53
+
54
+ // Use in upload server
55
+ const uploadLayer = createUploadServerLayer({
56
+ dataStore: s3Store,
57
+ kvStore: memoryKvStore,
58
+ eventEmitter: webSocketEventEmitter,
59
+ });
60
+ ```
61
+
62
+ ### 2. Configure AWS Credentials
63
+
64
+ ```bash
65
+ # Option 1: Environment variables
66
+ export AWS_ACCESS_KEY_ID=your-access-key
67
+ export AWS_SECRET_ACCESS_KEY=your-secret-key
68
+ export AWS_REGION=us-east-1
69
+
70
+ # Option 2: AWS credentials file (~/.aws/credentials)
71
+ [default]
72
+ aws_access_key_id = your-access-key
73
+ aws_secret_access_key = your-secret-key
74
+
75
+ # Option 3: IAM role (when running on EC2, Lambda, ECS, etc.)
76
+ # Automatically used - no configuration needed
77
+ ```
78
+
79
+ ### 3. Upload Files
80
+
81
+ ```typescript
82
+ import { createHonoUploadistaAdapter } from "@uploadista/adapters-hono";
83
+ import { createFlowsEffect } from "./flows";
84
+
85
+ const adapter = await createHonoUploadistaAdapter({
86
+ baseUrl: "uploadista",
87
+ dataStore: s3Store,
88
+ kvStore: memoryKvStore,
89
+ flows: createFlowsEffect,
90
+ });
91
+
92
+ // Files now upload to S3 automatically
93
+ ```
94
+
95
+ ## Configuration
96
+
97
+ ### `S3StoreOptions`
98
+
99
+ ```typescript
100
+ type S3StoreOptions = {
101
+ // Required
102
+ deliveryUrl: string; // URL for accessing uploaded files
103
+ s3ClientConfig: S3ClientConfig & {
104
+ bucket: string; // S3 bucket name
105
+ };
106
+ kvStore: KvStore<UploadFile>; // Metadata store
107
+
108
+ // Optional - Multipart Configuration
109
+ partSize?: number; // Preferred part size (5MiB-5GiB)
110
+ minPartSize?: number; // Minimum part size (default: 5MiB)
111
+ maxMultipartParts?: number; // Default: 10,000 (S3 limit)
112
+ maxConcurrentPartUploads?: number; // Default: 60
113
+
114
+ // Optional - Management
115
+ useTags?: boolean; // Add tags to S3 objects
116
+ expirationPeriodInMilliseconds?: number; // Default: 1 week (7 days)
117
+ };
118
+ ```
119
+
120
+ ### Part Size Strategy
121
+
122
+ S3 limits uploads to 10,000 parts. The store automatically calculates optimal part size:
123
+
124
+ ```typescript
125
+ // For a 5TB file with default 5MiB parts:
126
+ // Parts needed = 5TB / 5MiB ≈ 1,048,576 parts
127
+ // This exceeds S3's 10K limit, so part size is automatically increased
128
+
129
+ const s3Store = createS3Store({
130
+ deliveryUrl: "https://bucket.s3.amazonaws.com",
131
+ s3ClientConfig: {
132
+ region: "us-east-1",
133
+ bucket: "my-bucket",
134
+ },
135
+ kvStore: kvStore,
136
+
137
+ // For typical files (<100GB), use default 5MiB parts
138
+ // For large files (>100GB), increase part size
139
+ partSize: 100 * 1024 * 1024, // 100MiB for faster large uploads
140
+ maxConcurrentPartUploads: 10, // Tune based on network
141
+ });
142
+ ```
143
+
144
+ ### Delivery URL Configuration
145
+
146
+ The `deliveryUrl` is used to construct file URLs in responses:
147
+
148
+ ```typescript
149
+ // For public bucket with CloudFront
150
+ const s3Store = createS3Store({
151
+ deliveryUrl: "https://d123456.cloudfront.net",
152
+ // URLs will be: https://d123456.cloudfront.net/upload-123
153
+
154
+ s3ClientConfig: {
155
+ region: "us-east-1",
156
+ bucket: "my-uploads",
157
+ },
158
+ kvStore,
159
+ });
160
+
161
+ // For direct S3 access
162
+ const s3Store = createS3Store({
163
+ deliveryUrl: "https://my-uploads.s3.amazonaws.com",
164
+ // URLs will be: https://my-uploads.s3.amazonaws.com/upload-123
165
+
166
+ s3ClientConfig: {
167
+ region: "us-east-1",
168
+ bucket: "my-uploads",
169
+ },
170
+ kvStore,
171
+ });
172
+
173
+ // For S3 Transfer Acceleration
174
+ const s3Store = createS3Store({
175
+ deliveryUrl: "https://my-uploads.s3-accelerate.amazonaws.com",
176
+ s3ClientConfig: {
177
+ region: "us-east-1",
178
+ bucket: "my-uploads",
179
+ },
180
+ kvStore,
181
+ });
182
+ ```
183
+
184
+ ## AWS Setup Guide
185
+
186
+ ### 1. Create S3 Bucket
187
+
188
+ ```bash
189
+ aws s3 mb s3://my-uploads-prod --region us-east-1
190
+ ```
191
+
192
+ ### 2. Configure Bucket Policy (Public Read)
193
+
194
+ ```json
195
+ {
196
+ "Version": "2012-10-17",
197
+ "Statement": [
198
+ {
199
+ "Effect": "Allow",
200
+ "Principal": "*",
201
+ "Action": "s3:GetObject",
202
+ "Resource": "arn:aws:s3:::my-uploads-prod/*"
203
+ }
204
+ ]
205
+ }
206
+ ```
207
+
208
+ ### 3. Create IAM User for Uploads
209
+
210
+ ```bash
211
+ # Create user
212
+ aws iam create-user --user-name uploadista-service
213
+
214
+ # Attach S3 policy
215
+ aws iam attach-user-policy \
216
+ --user-name uploadista-service \
217
+ --policy-arn arn:aws:iam::aws:policy/AmazonS3FullAccess
218
+
219
+ # Create access keys
220
+ aws iam create-access-key --user-name uploadista-service
221
+ ```
222
+
223
+ ### 4. Optional: Enable CORS
224
+
225
+ ```bash
226
+ aws s3api put-bucket-cors \
227
+ --bucket my-uploads-prod \
228
+ --cors-configuration '{
229
+ "CORSRules": [
230
+ {
231
+ "AllowedOrigins": ["https://myapp.com"],
232
+ "AllowedMethods": ["PUT", "POST"],
233
+ "AllowedHeaders": ["*"]
234
+ }
235
+ ]
236
+ }'
237
+ ```
238
+
239
+ ### 5. Optional: Enable Versioning
240
+
241
+ ```bash
242
+ aws s3api put-bucket-versioning \
243
+ --bucket my-uploads-prod \
244
+ --versioning-configuration Status=Enabled
245
+ ```
246
+
247
+ ### 6. Optional: Enable Transfer Acceleration
248
+
249
+ ```bash
250
+ aws s3api put-bucket-accelerate-configuration \
251
+ --bucket my-uploads-prod \
252
+ --accelerate-configuration Status=Enabled
253
+ ```
254
+
255
+ ## Complete Server Example
256
+
257
+ ```typescript
258
+ import Fastify from "fastify";
259
+ import WebSocket from "@fastify/websocket";
260
+ import JwT from "@fastify/jwt";
261
+ import { createFastifyUploadistaAdapter } from "@uploadista/adapters-fastify";
262
+ import { createS3Store } from "@uploadista/data-store-s3";
263
+ import { redisKvStore } from "@uploadista/kv-store-redis";
264
+ import { webSocketEventEmitter } from "@uploadista/event-emitter-websocket";
265
+ import { memoryEventBroadcaster } from "@uploadista/event-broadcaster-memory";
266
+
267
+ const fastify = Fastify({ logger: true });
268
+
269
+ await fastify.register(JwT, { secret: process.env.JWT_SECRET! });
270
+ await fastify.register(WebSocket);
271
+
272
+ // Configure S3
273
+ const s3Store = createS3Store({
274
+ deliveryUrl: process.env.S3_DELIVERY_URL!,
275
+ s3ClientConfig: {
276
+ region: process.env.AWS_REGION || "us-east-1",
277
+ bucket: process.env.S3_BUCKET!,
278
+ },
279
+ kvStore: redisKvStore,
280
+ partSize: parseInt(process.env.S3_PART_SIZE || "5242880"), // 5MB default
281
+ });
282
+
283
+ // Create adapter
284
+ const adapter = await createFastifyUploadistaAdapter({
285
+ baseUrl: "uploadista",
286
+ dataStore: s3Store,
287
+ kvStore: redisKvStore,
288
+ eventEmitter: webSocketEventEmitter,
289
+ eventBroadcaster: memoryEventBroadcaster,
290
+ flows: createFlowsEffect,
291
+ authMiddleware: async (req, reply) => {
292
+ try {
293
+ await req.jwtVerify();
294
+ return {
295
+ clientId: (req.user as any).sub,
296
+ permissions: ["upload:create"],
297
+ };
298
+ } catch {
299
+ return null;
300
+ }
301
+ },
302
+ });
303
+
304
+ // Routes
305
+ fastify.all(`/${adapter.baseUrl}/*`, (req, res) => adapter.handler(req, res));
306
+ fastify.get("/ws", { websocket: true }, (socket, req) => {
307
+ adapter.websocketHandler(socket, req);
308
+ });
309
+
310
+ // Start
311
+ await fastify.listen({ port: 3000 });
312
+ console.log("Server running with S3 storage");
313
+ ```
314
+
315
+ ## Performance Tuning
316
+
317
+ ### For Small Files (<10MB)
318
+
319
+ ```typescript
320
+ const s3Store = createS3Store({
321
+ deliveryUrl,
322
+ s3ClientConfig: { region, bucket },
323
+ kvStore,
324
+ partSize: 5 * 1024 * 1024, // 5MB (minimum)
325
+ maxConcurrentPartUploads: 20, // Higher concurrency
326
+ });
327
+ ```
328
+
329
+ ### For Medium Files (10MB - 1GB)
330
+
331
+ ```typescript
332
+ const s3Store = createS3Store({
333
+ deliveryUrl,
334
+ s3ClientConfig: { region, bucket },
335
+ kvStore,
336
+ partSize: 10 * 1024 * 1024, // 10MB
337
+ maxConcurrentPartUploads: 10,
338
+ });
339
+ ```
340
+
341
+ ### For Large Files (>1GB)
342
+
343
+ ```typescript
344
+ const s3Store = createS3Store({
345
+ deliveryUrl,
346
+ s3ClientConfig: { region, bucket },
347
+ kvStore,
348
+ partSize: 100 * 1024 * 1024, // 100MB
349
+ maxConcurrentPartUploads: 5, // Lower concurrency for stability
350
+ });
351
+ ```
352
+
353
+ ### For Edge Locations (Using Transfer Acceleration)
354
+
355
+ ```typescript
356
+ const s3Store = createS3Store({
357
+ deliveryUrl: "https://bucket.s3-accelerate.amazonaws.com",
358
+ s3ClientConfig: {
359
+ region: "us-east-1",
360
+ bucket: "my-bucket",
361
+ useAccelerateEndpoint: true,
362
+ },
363
+ kvStore,
364
+ });
365
+ ```
366
+
367
+ ## Environment Configuration
368
+
369
+ ### .env File
370
+
371
+ ```env
372
+ # AWS Configuration
373
+ AWS_ACCESS_KEY_ID=your-access-key
374
+ AWS_SECRET_ACCESS_KEY=your-secret-key
375
+ AWS_REGION=us-east-1
376
+
377
+ # S3 Configuration
378
+ S3_BUCKET=my-uploads-prod
379
+ S3_DELIVERY_URL=https://my-uploads.s3.amazonaws.com
380
+ S3_PART_SIZE=5242880
381
+
382
+ # Optional: Transfer Acceleration
383
+ S3_USE_ACCELERATE=true
384
+
385
+ # Optional: Tags
386
+ S3_USE_TAGS=true
387
+ ```
388
+
389
+ ### programmatic Configuration
390
+
391
+ ```typescript
392
+ import { createS3Store } from "@uploadista/data-store-s3";
393
+ import { S3Client } from "@aws-sdk/client-s3";
394
+
395
+ // Advanced S3 client configuration
396
+ const customS3Store = createS3Store({
397
+ deliveryUrl: process.env.S3_DELIVERY_URL!,
398
+ s3ClientConfig: {
399
+ region: process.env.AWS_REGION!,
400
+ bucket: process.env.S3_BUCKET!,
401
+
402
+ // Optional: Advanced S3 client options
403
+ maxAttempts: 5,
404
+ requestHandler: {
405
+ httpsAgent: new https.Agent({
406
+ keepAlive: true,
407
+ timeout: 30000,
408
+ }),
409
+ },
410
+ },
411
+ kvStore,
412
+ partSize: 50 * 1024 * 1024, // 50MB
413
+ });
414
+ ```
415
+
416
+ ## Error Handling
417
+
418
+ Common S3 errors and their causes:
419
+
420
+ | Error | Cause | Solution |
421
+ |-------|-------|----------|
422
+ | NoSuchBucket | Bucket doesn't exist | Verify bucket name and region |
423
+ | AccessDenied | Insufficient IAM permissions | Check IAM policy for PutObject, GetObject |
424
+ | InvalidBucketName | Invalid bucket name | Use lowercase, 3-63 chars, no special chars |
425
+ | EntityTooLarge | File exceeds S3 limits | Max 5TB per file |
426
+ | InvalidPartOrder | Parts uploaded out of order | Upload parts sequentially |
427
+ | NoSuchUpload | Multipart upload doesn't exist | Session expired - restart upload |
428
+
429
+ ## Monitoring & Observability
430
+
431
+ S3 store includes built-in observability:
432
+
433
+ ```typescript
434
+ import { MetricsClient } from "@uploadista/observability";
435
+
436
+ // Metrics automatically tracked:
437
+ // - s3.upload.started
438
+ // - s3.upload.progress
439
+ // - s3.upload.completed
440
+ // - s3.upload.failed
441
+ // - s3.part.uploaded
442
+ // - s3.metadata.operations
443
+ ```
444
+
445
+ ## CloudFront Integration (Optional)
446
+
447
+ For better performance, distribute files through CloudFront:
448
+
449
+ ```typescript
450
+ // Create CloudFront distribution pointing to S3 bucket
451
+ const s3Store = createS3Store({
452
+ deliveryUrl: "https://d123456789.cloudfront.net", // CloudFront URL
453
+ s3ClientConfig: {
454
+ region: "us-east-1",
455
+ bucket: "my-bucket",
456
+ },
457
+ kvStore,
458
+ });
459
+
460
+ // Files are now served through CloudFront edge locations globally
461
+ ```
462
+
463
+ ## Deployment Examples
464
+
465
+ ### Docker
466
+
467
+ ```dockerfile
468
+ FROM node:20-alpine
469
+ WORKDIR /app
470
+ COPY package*.json ./
471
+ RUN npm ci --only=production
472
+ COPY dist ./dist
473
+
474
+ ENV NODE_ENV=production
475
+ ENV AWS_REGION=us-east-1
476
+
477
+ EXPOSE 3000
478
+ CMD ["node", "dist/server.js"]
479
+ ```
480
+
481
+ ### AWS Lambda (with Serverless Framework)
482
+
483
+ ```yaml
484
+ service: uploadista-s3-server
485
+
486
+ provider:
487
+ name: aws
488
+ runtime: nodejs20.x
489
+ region: us-east-1
490
+ environment:
491
+ S3_BUCKET: my-uploads-prod
492
+
493
+ functions:
494
+ api:
495
+ handler: dist/handler.default
496
+ events:
497
+ - http:
498
+ path: /{proxy+}
499
+ method: ANY
500
+ timeout: 300
501
+ memorySize: 2048
502
+
503
+ websocket:
504
+ handler: dist/websocket.default
505
+ events:
506
+ - websocket:
507
+ route: $default
508
+
509
+ resources:
510
+ Resources:
511
+ UploadsBucket:
512
+ Type: AWS::S3::Bucket
513
+ Properties:
514
+ BucketName: my-uploads-prod
515
+ ```
516
+
517
+ ### Heroku
518
+
519
+ ```bash
520
+ # Create and configure app
521
+ heroku create my-uploadista-s3
522
+ heroku addons:create heroku-postgresql:standard-0
523
+ heroku config:set AWS_ACCESS_KEY_ID=your-key
524
+ heroku config:set AWS_SECRET_ACCESS_KEY=your-secret
525
+ heroku config:set S3_BUCKET=my-uploads-prod
526
+
527
+ # Deploy
528
+ git push heroku main
529
+ ```
530
+
531
+ ## Related Packages
532
+
533
+ - **[@uploadista/data-store-azure](../azure/)** - Azure Blob Storage
534
+ - **[@uploadista/data-store-gcs](../gcs/)** - Google Cloud Storage
535
+ - **[@uploadista/data-store-filesystem](../filesystem/)** - Local filesystem
536
+ - **[@uploadista/server](../../servers/server/)** - Core server utilities
537
+ - **[@uploadista/kv-store-redis](../../kv-stores/redis/)** - Redis KV store
538
+ - **[@uploadista/core](../../core/)** - Core engine
539
+
540
+ ## TypeScript Support
541
+
542
+ Full TypeScript support with comprehensive types:
543
+
544
+ ```typescript
545
+ import type { S3StoreOptions, S3Store } from "@uploadista/data-store-s3";
546
+ import { createS3Store } from "@uploadista/data-store-s3";
547
+ ```
548
+
549
+ ## Troubleshooting
550
+
551
+ ### NoSuchBucket Error
552
+
553
+ ```bash
554
+ # Verify bucket exists in correct region
555
+ aws s3 ls --region us-east-1 | grep my-bucket
556
+
557
+ # Create bucket if missing
558
+ aws s3 mb s3://my-bucket --region us-east-1
559
+ ```
560
+
561
+ ### AccessDenied Errors
562
+
563
+ ```bash
564
+ # Check IAM permissions
565
+ aws iam get-user-policy --user-name uploadista-service --policy-name ...
566
+
567
+ # Grant S3 permissions
568
+ aws iam attach-user-policy \
569
+ --user-name uploadista-service \
570
+ --policy-arn arn:aws:iam::aws:policy/AmazonS3FullAccess
571
+ ```
572
+
573
+ ### Slow Uploads
574
+
575
+ - Increase `partSize` for faster processing
576
+ - Reduce `maxConcurrentPartUploads` if network unstable
577
+ - Enable Transfer Acceleration for edge clients
578
+ - Use larger chunk sizes in client
579
+
580
+ ### Memory Issues
581
+
582
+ - Reduce `maxConcurrentPartUploads`
583
+ - Decrease `partSize`
584
+ - Increase server memory allocation
585
+
586
+ ## License
587
+
588
+ MIT
@@ -0,0 +1,2 @@
1
+ export * from "./s3-store";
2
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1 @@
1
+ export * from "./s3-store";
@@ -0,0 +1,45 @@
1
+ import { Context, Effect, Layer, Metric } from "effect";
2
+ export declare const uploadRequestsTotal: Metric.Metric.Counter<number>;
3
+ export declare const uploadPartsTotal: Metric.Metric.Counter<number>;
4
+ export declare const uploadSuccessTotal: Metric.Metric.Counter<number>;
5
+ export declare const uploadErrorsTotal: Metric.Metric.Counter<number>;
6
+ export declare const s3ApiCallsTotal: Metric.Metric.Counter<number>;
7
+ export declare const uploadDurationHistogram: Metric.Metric<import("effect/MetricKeyType").MetricKeyType.Histogram, number, import("effect/MetricState").MetricState.Histogram>;
8
+ export declare const partUploadDurationHistogram: Metric.Metric<import("effect/MetricKeyType").MetricKeyType.Histogram, number, import("effect/MetricState").MetricState.Histogram>;
9
+ export declare const fileSizeHistogram: Metric.Metric<import("effect/MetricKeyType").MetricKeyType.Histogram, number, import("effect/MetricState").MetricState.Histogram>;
10
+ export declare const partSizeHistogram: Metric.Metric<import("effect/MetricKeyType").MetricKeyType.Histogram, number, import("effect/MetricState").MetricState.Histogram>;
11
+ export declare const activeUploadsGauge: Metric.Metric.Gauge<number>;
12
+ export declare const uploadThroughputGauge: Metric.Metric.Gauge<number>;
13
+ export declare const uploadLatencySummary: Metric.Metric.Summary<number>;
14
+ export declare const TracingService: Context.Tag<{
15
+ serviceName: string;
16
+ }, {
17
+ serviceName: string;
18
+ }>;
19
+ export declare const createTracingLayer: (options?: {
20
+ serviceName?: string;
21
+ }) => Layer.Layer<{
22
+ serviceName: string;
23
+ }, never, never>;
24
+ export declare const TracingLayerLive: Layer.Layer<{
25
+ serviceName: string;
26
+ }, never, never>;
27
+ export declare const ObservabilityLayer: Layer.Layer<{
28
+ serviceName: string;
29
+ }, never, never>;
30
+ export declare const withUploadMetrics: <A, E, R>(uploadId: string, effect: Effect.Effect<A, E, R>) => Effect.Effect<A, E, R>;
31
+ export declare const withS3ApiMetrics: <A, E, R>(operation: string, effect: Effect.Effect<A, E, R>) => Effect.Effect<A, E, R>;
32
+ export declare const withTimingMetrics: <A, E, R>(metric: Metric.Metric.Histogram<number>, effect: Effect.Effect<A, E, R>) => Effect.Effect<A, E, R>;
33
+ export type S3ErrorCategory = "network_error" | "authentication_error" | "authorization_error" | "throttling_error" | "server_error" | "client_error" | "unknown_error";
34
+ export declare const classifyS3Error: (error: unknown) => S3ErrorCategory;
35
+ export declare const trackS3Error: (operation: string, error: unknown, context?: Record<string, unknown>) => Effect.Effect<void, never, never>;
36
+ export declare const logWithContext: (message: string, context: Record<string, unknown>) => Effect.Effect<void, never, never>;
37
+ export declare const logUploadProgress: (uploadId: string, progress: {
38
+ uploadedBytes: number;
39
+ totalBytes: number;
40
+ partNumber?: number;
41
+ speed?: number;
42
+ }) => Effect.Effect<void, never, never>;
43
+ export declare const logS3Operation: (operation: string, uploadId: string, metadata?: Record<string, unknown>) => Effect.Effect<void, never, never>;
44
+ export declare const logS3Error: (operation: string, error: unknown, context?: Record<string, unknown>) => Effect.Effect<void, never, never>;
45
+ //# sourceMappingURL=observability.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"observability.d.ts","sourceRoot":"","sources":["../src/observability.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAoB,MAAM,QAAQ,CAAA;AAOzE,eAAO,MAAM,mBAAmB,+BAE9B,CAAA;AAEF,eAAO,MAAM,gBAAgB,+BAE3B,CAAA;AAEF,eAAO,MAAM,kBAAkB,+BAE7B,CAAA;AAEF,eAAO,MAAM,iBAAiB,+BAE5B,CAAA;AAEF,eAAO,MAAM,eAAe,+BAE1B,CAAA;AAGF,eAAO,MAAM,uBAAuB,mIAQnC,CAAA;AAED,eAAO,MAAM,2BAA2B,mIAQvC,CAAA;AAED,eAAO,MAAM,iBAAiB,mIAQ7B,CAAA;AAED,eAAO,MAAM,iBAAiB,mIAQ7B,CAAA;AAGD,eAAO,MAAM,kBAAkB,6BAE7B,CAAA;AAEF,eAAO,MAAM,qBAAqB,6BAEhC,CAAA;AAGF,eAAO,MAAM,oBAAoB,+BAO/B,CAAA;AAOF,eAAO,MAAM,cAAc;iBAAqC,MAAM;;iBAAN,MAAM;EAAqB,CAAA;AAG3F,eAAO,MAAM,kBAAkB,GAAI,UAAU;IAC3C,WAAW,CAAC,EAAE,MAAM,CAAA;CACrB;iBAL+D,MAAM;gBAUrE,CAAA;AAGD,eAAO,MAAM,gBAAgB;iBAbmC,MAAM;gBAepE,CAAA;AAMF,eAAO,MAAM,kBAAkB;iBArBiC,MAAM;gBAwBrE,CAAA;AAMD,eAAO,MAAM,iBAAiB,GAAI,CAAC,EAAE,CAAC,EAAE,CAAC,EACvC,UAAU,MAAM,EAChB,QAAQ,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,KAC7B,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAWrB,CAAA;AAEH,eAAO,MAAM,gBAAgB,GAAI,CAAC,EAAE,CAAC,EAAE,CAAC,EACtC,WAAW,MAAM,EACjB,QAAQ,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,KAC7B,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAKrB,CAAA;AAEH,eAAO,MAAM,iBAAiB,GAAI,CAAC,EAAE,CAAC,EAAE,CAAC,EACvC,QAAQ,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,EACvC,QAAQ,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,KAC7B,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAUpB,CAAA;AAMJ,MAAM,MAAM,eAAe,GACvB,eAAe,GACf,sBAAsB,GACtB,qBAAqB,GACrB,kBAAkB,GAClB,cAAc,GACd,cAAc,GACd,eAAe,CAAA;AAEnB,eAAO,MAAM,eAAe,GAAI,OAAO,OAAO,KAAG,eAgChD,CAAA;AAED,eAAO,MAAM,YAAY,GACvB,WAAW,MAAM,EACjB,OAAO,OAAO,EACd,UAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAM,sCAqBrC,CAAA;AAMF,eAAO,MAAM,cAAc,GACzB,SAAS,MAAM,EACf,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,sCAGjC,CAAA;AAED,eAAO,MAAM,iBAAiB,GAC5B,UAAU,MAAM,EAChB,UAAU;IACR,aAAa,EAAE,MAAM,CAAA;IACrB,UAAU,EAAE,MAAM,CAAA;IAClB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,sCAWF,CAAA;AAED,eAAO,MAAM,cAAc,GACzB,WAAW,MAAM,EACjB,UAAU,MAAM,EAChB,WAAW,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,sCAQnC,CAAA;AAED,eAAO,MAAM,UAAU,GACrB,WAAW,MAAM,EACjB,OAAO,OAAO,EACd,UAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAM,sCACK,CAAA"}