@uploadista/data-store-s3 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/.turbo/turbo-build.log +5 -0
  2. package/.turbo/turbo-check.log +5 -0
  3. package/LICENSE +21 -0
  4. package/README.md +588 -0
  5. package/dist/index.d.ts +2 -0
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +1 -0
  8. package/dist/observability.d.ts +45 -0
  9. package/dist/observability.d.ts.map +1 -0
  10. package/dist/observability.js +155 -0
  11. package/dist/s3-store-old.d.ts +51 -0
  12. package/dist/s3-store-old.d.ts.map +1 -0
  13. package/dist/s3-store-old.js +765 -0
  14. package/dist/s3-store.d.ts +9 -0
  15. package/dist/s3-store.d.ts.map +1 -0
  16. package/dist/s3-store.js +666 -0
  17. package/dist/services/__mocks__/s3-client-mock.service.d.ts +44 -0
  18. package/dist/services/__mocks__/s3-client-mock.service.d.ts.map +1 -0
  19. package/dist/services/__mocks__/s3-client-mock.service.js +379 -0
  20. package/dist/services/index.d.ts +2 -0
  21. package/dist/services/index.d.ts.map +1 -0
  22. package/dist/services/index.js +1 -0
  23. package/dist/services/s3-client.service.d.ts +68 -0
  24. package/dist/services/s3-client.service.d.ts.map +1 -0
  25. package/dist/services/s3-client.service.js +209 -0
  26. package/dist/test-observability.d.ts +6 -0
  27. package/dist/test-observability.d.ts.map +1 -0
  28. package/dist/test-observability.js +62 -0
  29. package/dist/types.d.ts +81 -0
  30. package/dist/types.d.ts.map +1 -0
  31. package/dist/types.js +1 -0
  32. package/dist/utils/calculations.d.ts +7 -0
  33. package/dist/utils/calculations.d.ts.map +1 -0
  34. package/dist/utils/calculations.js +41 -0
  35. package/dist/utils/error-handling.d.ts +7 -0
  36. package/dist/utils/error-handling.d.ts.map +1 -0
  37. package/dist/utils/error-handling.js +29 -0
  38. package/dist/utils/index.d.ts +4 -0
  39. package/dist/utils/index.d.ts.map +1 -0
  40. package/dist/utils/index.js +3 -0
  41. package/dist/utils/stream-adapter.d.ts +14 -0
  42. package/dist/utils/stream-adapter.d.ts.map +1 -0
  43. package/dist/utils/stream-adapter.js +41 -0
  44. package/package.json +36 -0
  45. package/src/__tests__/integration/s3-store.integration.test.ts +548 -0
  46. package/src/__tests__/multipart-logic.test.ts +395 -0
  47. package/src/__tests__/s3-store.edge-cases.test.ts +681 -0
  48. package/src/__tests__/s3-store.performance.test.ts +622 -0
  49. package/src/__tests__/s3-store.test.ts +662 -0
  50. package/src/__tests__/utils/performance-helpers.ts +459 -0
  51. package/src/__tests__/utils/test-data-generator.ts +331 -0
  52. package/src/__tests__/utils/test-setup.ts +256 -0
  53. package/src/index.ts +1 -0
  54. package/src/s3-store.ts +1059 -0
  55. package/src/services/__mocks__/s3-client-mock.service.ts +604 -0
  56. package/src/services/index.ts +1 -0
  57. package/src/services/s3-client.service.ts +359 -0
  58. package/src/types.ts +96 -0
  59. package/src/utils/calculations.ts +61 -0
  60. package/src/utils/error-handling.ts +52 -0
  61. package/src/utils/index.ts +3 -0
  62. package/src/utils/stream-adapter.ts +50 -0
  63. package/tsconfig.json +19 -0
  64. package/tsconfig.tsbuildinfo +1 -0
  65. package/vitest.config.ts +15 -0
@@ -0,0 +1,622 @@
1
+ import { UploadFileKVStore } from "@uploadista/core/types";
2
+ import { Effect } from "effect";
3
+ import { afterEach, beforeEach, describe, expect, it } from "vitest";
4
+ import { createS3StoreImplementation } from "../s3-store";
5
+ import type { S3Store } from "../types";
6
+ import {
7
+ benchmarkUpload,
8
+ createPerformanceBenchmarks,
9
+ formatConcurrentMetrics,
10
+ formatMemoryMetrics,
11
+ formatMetrics,
12
+ measureConcurrentOps,
13
+ measureMemory,
14
+ measurePerformance,
15
+ ProgressTracker,
16
+ runStressTest,
17
+ } from "./utils/performance-helpers";
18
+ import {
19
+ createTestDataStream,
20
+ TEST_FILE_SIZES,
21
+ } from "./utils/test-data-generator";
22
+ import {
23
+ createTestS3StoreConfig,
24
+ createTestUploadFile,
25
+ type MockS3TestMethods,
26
+ runTestWithTimeout,
27
+ setupTestEnvironment,
28
+ TestLayersWithMockS3,
29
+ } from "./utils/test-setup";
30
+
31
+ describe("S3Store - Performance Tests", () => {
32
+ let s3Store: S3Store;
33
+ let mockService: MockS3TestMethods;
34
+ const benchmarks = createPerformanceBenchmarks();
35
+
36
+ beforeEach(async () => {
37
+ await runTestWithTimeout(
38
+ Effect.gen(function* () {
39
+ mockService = yield* setupTestEnvironment();
40
+
41
+ const kvStore = yield* UploadFileKVStore;
42
+ const config = createTestS3StoreConfig();
43
+
44
+ s3Store = yield* createS3StoreImplementation({
45
+ ...config,
46
+ kvStore,
47
+ });
48
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
49
+ );
50
+ });
51
+
52
+ afterEach(async () => {
53
+ await runTestWithTimeout(
54
+ Effect.gen(function* () {
55
+ yield* mockService.clearStorage();
56
+ }),
57
+ );
58
+ });
59
+
60
+ describe("Upload Speed Benchmarks", () => {
61
+ it("should meet performance benchmarks for tiny files", async () => {
62
+ const testFile = createTestUploadFile(
63
+ "perf-tiny",
64
+ TEST_FILE_SIZES.TINY.size,
65
+ );
66
+ const size = testFile.size ?? 0;
67
+ const testData = createTestDataStream(size);
68
+
69
+ await runTestWithTimeout(
70
+ Effect.gen(function* () {
71
+ yield* s3Store.create(testFile);
72
+
73
+ const uploadOperation = s3Store.write(
74
+ {
75
+ file_id: testFile.id,
76
+ stream: testData,
77
+ offset: 0,
78
+ },
79
+ { onProgress: undefined },
80
+ );
81
+
82
+ const { success, metrics, memory, issues } = yield* benchmarkUpload(
83
+ uploadOperation,
84
+ benchmarks.tiny,
85
+ );
86
+
87
+ console.log(`Tiny file upload: ${formatMetrics(metrics)}`);
88
+ console.log(`Memory usage: ${formatMemoryMetrics(memory)}`);
89
+
90
+ if (!success) {
91
+ console.warn("Benchmark issues:", issues);
92
+ }
93
+
94
+ // We expect the upload to succeed even if it doesn't meet all benchmarks
95
+ expect(metrics.durationMs).toBeLessThan(1000); // Should be very fast
96
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
97
+ );
98
+ });
99
+
100
+ it("should meet performance benchmarks for small files", async () => {
101
+ const testFile = createTestUploadFile(
102
+ "perf-small",
103
+ TEST_FILE_SIZES.SMALL_BASIC.size,
104
+ );
105
+ const size = testFile.size ?? 0;
106
+ const testData = createTestDataStream(size);
107
+
108
+ await runTestWithTimeout(
109
+ Effect.gen(function* () {
110
+ yield* s3Store.create(testFile);
111
+
112
+ const uploadOperation = s3Store.write(
113
+ {
114
+ file_id: testFile.id,
115
+ stream: testData,
116
+ offset: 0,
117
+ },
118
+ { onProgress: undefined },
119
+ );
120
+
121
+ const { success, metrics, memory, issues } = yield* benchmarkUpload(
122
+ uploadOperation,
123
+ benchmarks.small,
124
+ );
125
+
126
+ console.log(`Small file upload: ${formatMetrics(metrics)}`);
127
+ console.log(`Memory usage: ${formatMemoryMetrics(memory)}`);
128
+
129
+ if (!success) {
130
+ console.warn("Benchmark issues:", issues);
131
+ }
132
+
133
+ expect(metrics.bytesProcessed).toBe(size);
134
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
135
+ 15000,
136
+ );
137
+ });
138
+
139
+ it("should meet performance benchmarks for medium files", async () => {
140
+ const testFile = createTestUploadFile(
141
+ "perf-medium",
142
+ TEST_FILE_SIZES.MEDIUM.size,
143
+ );
144
+ const size = testFile.size ?? 0;
145
+ const testData = createTestDataStream(size);
146
+
147
+ await runTestWithTimeout(
148
+ Effect.gen(function* () {
149
+ yield* s3Store.create(testFile);
150
+
151
+ const uploadOperation = s3Store.write(
152
+ {
153
+ file_id: testFile.id,
154
+ stream: testData,
155
+ offset: 0,
156
+ },
157
+ { onProgress: undefined },
158
+ );
159
+
160
+ const { success, metrics, memory, issues } = yield* benchmarkUpload(
161
+ uploadOperation,
162
+ benchmarks.medium,
163
+ );
164
+
165
+ console.log(`Medium file upload: ${formatMetrics(metrics)}`);
166
+ console.log(`Memory usage: ${formatMemoryMetrics(memory)}`);
167
+
168
+ if (!success) {
169
+ console.warn("Benchmark issues:", issues);
170
+ }
171
+
172
+ expect(metrics.bytesProcessed).toBe(size);
173
+ expect(metrics.throughputMbps).toBeGreaterThan(1); // At least 1 Mbps
174
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
175
+ 30000,
176
+ );
177
+ });
178
+
179
+ it("should meet performance benchmarks for large files", async () => {
180
+ const testFile = createTestUploadFile(
181
+ "perf-large",
182
+ TEST_FILE_SIZES.LARGE.size,
183
+ );
184
+ const size = testFile.size ?? 0;
185
+ const testData = createTestDataStream(size);
186
+
187
+ await runTestWithTimeout(
188
+ Effect.gen(function* () {
189
+ yield* s3Store.create(testFile);
190
+
191
+ const uploadOperation = s3Store.write(
192
+ {
193
+ file_id: testFile.id,
194
+ stream: testData,
195
+ offset: 0,
196
+ },
197
+ { onProgress: undefined },
198
+ );
199
+
200
+ const { success, metrics, memory, issues } = yield* benchmarkUpload(
201
+ uploadOperation,
202
+ benchmarks.large,
203
+ );
204
+
205
+ console.log(`Large file upload: ${formatMetrics(metrics)}`);
206
+ console.log(`Memory usage: ${formatMemoryMetrics(memory)}`);
207
+
208
+ if (!success) {
209
+ console.warn("Benchmark issues:", issues);
210
+ }
211
+
212
+ expect(metrics.bytesProcessed).toBe(size);
213
+ expect(metrics.throughputMbps).toBeGreaterThan(5); // At least 5 Mbps for large files
214
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
215
+ 60000,
216
+ );
217
+ });
218
+ });
219
+
220
+ describe("Memory Usage Tests", () => {
221
+ it("should use reasonable memory for small files", async () => {
222
+ const testFile = createTestUploadFile(
223
+ "memory-small",
224
+ TEST_FILE_SIZES.SMALL_BASIC.size,
225
+ );
226
+ const size = testFile.size ?? 0;
227
+ const testData = createTestDataStream(size);
228
+
229
+ await runTestWithTimeout(
230
+ Effect.gen(function* () {
231
+ yield* s3Store.create(testFile);
232
+
233
+ const uploadOperation = s3Store.write(
234
+ {
235
+ file_id: testFile.id,
236
+ stream: testData,
237
+ offset: 0,
238
+ },
239
+ { onProgress: undefined },
240
+ );
241
+
242
+ const { memory } = yield* measureMemory(uploadOperation);
243
+
244
+ console.log(
245
+ `Memory usage for small file: ${formatMemoryMetrics(memory)}`,
246
+ );
247
+
248
+ // Memory usage should be reasonable (not more than 2x file size)
249
+ const memoryUsageMB = memory.heapUsedDelta / (1024 * 1024);
250
+ const fileSizeMB = size / (1024 * 1024);
251
+
252
+ expect(memoryUsageMB).toBeLessThan(fileSizeMB * 3); // Allow some overhead
253
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
254
+ );
255
+ });
256
+
257
+ it("should use reasonable memory for large files", async () => {
258
+ const testFile = createTestUploadFile(
259
+ "memory-large",
260
+ TEST_FILE_SIZES.LARGE.size,
261
+ );
262
+ const size = testFile.size ?? 0;
263
+ const testData = createTestDataStream(size);
264
+
265
+ await runTestWithTimeout(
266
+ Effect.gen(function* () {
267
+ yield* s3Store.create(testFile);
268
+
269
+ const uploadOperation = s3Store.write(
270
+ {
271
+ file_id: testFile.id,
272
+ stream: testData,
273
+ offset: 0,
274
+ },
275
+ { onProgress: undefined },
276
+ );
277
+
278
+ const { memory } = yield* measureMemory(uploadOperation);
279
+
280
+ console.log(
281
+ `Memory usage for large file: ${formatMemoryMetrics(memory)}`,
282
+ );
283
+
284
+ // Memory usage should be bounded (streaming should prevent loading entire file)
285
+ const memoryUsageMB = memory.heapUsedDelta / (1024 * 1024);
286
+ const fileSizeMB = size / (1024 * 1024);
287
+
288
+ // Memory usage should be much less than file size due to streaming
289
+ expect(memoryUsageMB).toBeLessThan(fileSizeMB * 0.5); // Should use less than 50% of file size
290
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
291
+ 60000,
292
+ );
293
+ });
294
+ });
295
+
296
+ describe("Concurrent Upload Tests", () => {
297
+ it("should handle multiple small concurrent uploads efficiently", async () => {
298
+ const fileCount = 5;
299
+ const fileSize = TEST_FILE_SIZES.SMALL_BASIC.size;
300
+
301
+ const uploadOperations = Array.from({ length: fileCount }, (_, i) =>
302
+ Effect.gen(function* () {
303
+ const testFile = createTestUploadFile(
304
+ `concurrent-small-${i}`,
305
+ fileSize,
306
+ );
307
+ const testData = createTestDataStream(fileSize ?? 0, {
308
+ type: "random",
309
+ seed: i,
310
+ });
311
+
312
+ yield* s3Store.create(testFile);
313
+
314
+ return yield* s3Store.write(
315
+ {
316
+ file_id: testFile.id,
317
+ stream: testData,
318
+ offset: 0,
319
+ },
320
+ { onProgress: undefined },
321
+ );
322
+ }),
323
+ );
324
+
325
+ await runTestWithTimeout(
326
+ Effect.gen(function* () {
327
+ const { results, metrics } = yield* measureConcurrentOps(
328
+ uploadOperations,
329
+ 3, // 3 concurrent uploads
330
+ );
331
+
332
+ console.log(
333
+ `Concurrent uploads: ${formatConcurrentMetrics(metrics)}`,
334
+ );
335
+
336
+ expect(results).toHaveLength(fileCount);
337
+ expect(metrics.successfulOperations).toBe(fileCount);
338
+ expect(metrics.failedOperations).toBe(0);
339
+
340
+ // All uploads should complete relatively quickly
341
+ expect(metrics.maxDuration).toBeLessThan(10000); // 10 seconds max
342
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
343
+ 30000,
344
+ );
345
+ });
346
+
347
+ it("should handle multiple medium concurrent uploads", async () => {
348
+ const fileCount = 3;
349
+ const fileSize = TEST_FILE_SIZES.MEDIUM.size;
350
+
351
+ const uploadOperations = Array.from({ length: fileCount }, (_, i) =>
352
+ Effect.gen(function* () {
353
+ const testFile = createTestUploadFile(
354
+ `concurrent-medium-${i}`,
355
+ fileSize,
356
+ );
357
+ const testData = createTestDataStream(fileSize ?? 0, {
358
+ type: "random",
359
+ seed: i + 100,
360
+ });
361
+
362
+ yield* s3Store.create(testFile);
363
+
364
+ return yield* s3Store.write(
365
+ {
366
+ file_id: testFile.id,
367
+ stream: testData,
368
+ offset: 0,
369
+ },
370
+ { onProgress: undefined },
371
+ );
372
+ }),
373
+ );
374
+
375
+ await runTestWithTimeout(
376
+ Effect.gen(function* () {
377
+ const { results, metrics } = yield* measureConcurrentOps(
378
+ uploadOperations,
379
+ 2, // 2 concurrent uploads to avoid overwhelming mock
380
+ );
381
+
382
+ console.log(
383
+ `Concurrent medium uploads: ${formatConcurrentMetrics(metrics)}`,
384
+ );
385
+
386
+ expect(results).toHaveLength(fileCount);
387
+ expect(metrics.successfulOperations).toBe(fileCount);
388
+
389
+ // Should complete in reasonable time
390
+ expect(metrics.maxDuration).toBeLessThan(30000); // 30 seconds max
391
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
392
+ 60000,
393
+ );
394
+ });
395
+ });
396
+
397
+ describe("Progress Tracking Performance", () => {
398
+ it("should provide smooth progress updates for large files", async () => {
399
+ const testFile = createTestUploadFile(
400
+ "progress-perf",
401
+ TEST_FILE_SIZES.LARGE.size,
402
+ );
403
+ const testData = createTestDataStream(testFile.size ?? 0);
404
+ const progressTracker = new ProgressTracker();
405
+
406
+ await runTestWithTimeout(
407
+ Effect.gen(function* () {
408
+ yield* s3Store.create(testFile);
409
+
410
+ const { metrics } = yield* measurePerformance(
411
+ s3Store.write(
412
+ {
413
+ file_id: testFile.id,
414
+ stream: testData,
415
+ offset: 0,
416
+ },
417
+ { onProgress: progressTracker.onProgress },
418
+ ),
419
+ testFile.size,
420
+ );
421
+
422
+ console.log(
423
+ `Upload with progress tracking: ${formatMetrics(metrics)}`,
424
+ );
425
+
426
+ const progressRate = progressTracker.getProgressRate();
427
+ const updateCount = progressTracker.getUpdateCount();
428
+ const totalTracked = progressTracker.getTotalBytesTracked();
429
+
430
+ console.log(
431
+ `Progress updates: ${updateCount}, Rate: ${(progressRate / (1024 * 1024)).toFixed(2)} MB/s`,
432
+ );
433
+
434
+ expect(updateCount).toBeGreaterThan(10); // Should have many progress updates
435
+ expect(totalTracked).toBe(testFile.size);
436
+ expect(progressRate).toBeGreaterThan(0);
437
+
438
+ // Progress tracking shouldn't significantly slow down upload
439
+ expect(metrics.durationMs).toBeLessThan(60000); // Should complete in reasonable time
440
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
441
+ 60000,
442
+ );
443
+ });
444
+ });
445
+
446
+ describe("Stress Tests", () => {
447
+ it("should handle stress test with multiple concurrent uploads", async () => {
448
+ const stressConfig = {
449
+ concurrentUploads: 3,
450
+ fileSize: TEST_FILE_SIZES.SMALL_BASIC.size,
451
+ totalFiles: 10,
452
+ maxErrorRate: 0.1, // Allow 10% failures
453
+ minThroughputMbps: 0.1, // Much more relaxed for test environment
454
+ maxTestDurationMs: 30000,
455
+ };
456
+
457
+ const createUpload = () =>
458
+ Effect.gen(function* () {
459
+ const testFile = createTestUploadFile(
460
+ `stress-${Math.random().toString(36).substring(7)}`,
461
+ stressConfig.fileSize,
462
+ );
463
+ const testData = createTestDataStream(stressConfig.fileSize);
464
+
465
+ yield* s3Store.create(testFile);
466
+
467
+ return yield* s3Store.write(
468
+ {
469
+ file_id: testFile.id,
470
+ stream: testData,
471
+ offset: 0,
472
+ },
473
+ { onProgress: undefined },
474
+ );
475
+ });
476
+
477
+ await runTestWithTimeout(
478
+ Effect.gen(function* () {
479
+ const { success, metrics, errorRate, totalThroughputMbps, issues } =
480
+ yield* runStressTest(createUpload, stressConfig);
481
+
482
+ console.log(`Stress test: ${formatConcurrentMetrics(metrics)}`);
483
+ console.log(`Error rate: ${(errorRate * 100).toFixed(1)}%`);
484
+ console.log(
485
+ `Total throughput: ${totalThroughputMbps.toFixed(2)} Mbps`,
486
+ );
487
+
488
+ if (!success) {
489
+ console.warn("Stress test issues:", issues);
490
+ }
491
+
492
+ expect(metrics.successfulOperations).toBeGreaterThan(0);
493
+ expect(errorRate).toBeLessThanOrEqual(stressConfig.maxErrorRate);
494
+
495
+ // Should achieve reasonable throughput
496
+ expect(totalThroughputMbps).toBeGreaterThan(0.05); // Much more relaxed for test environment
497
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
498
+ 45000,
499
+ );
500
+ });
501
+
502
+ it("should handle stress test with larger files", async () => {
503
+ const stressConfig = {
504
+ concurrentUploads: 2,
505
+ fileSize: TEST_FILE_SIZES.MEDIUM.size,
506
+ totalFiles: 4,
507
+ maxErrorRate: 0.1,
508
+ minThroughputMbps: 0.1, // Even more relaxed for test environment
509
+ maxTestDurationMs: 60000,
510
+ };
511
+
512
+ const createUpload = () =>
513
+ Effect.gen(function* () {
514
+ const testFile = createTestUploadFile(
515
+ `stress-large-${Math.random().toString(36).substring(7)}`,
516
+ stressConfig.fileSize,
517
+ );
518
+ const testData = createTestDataStream(stressConfig.fileSize);
519
+
520
+ yield* s3Store.create(testFile);
521
+
522
+ return yield* s3Store.write(
523
+ {
524
+ file_id: testFile.id,
525
+ stream: testData,
526
+ offset: 0,
527
+ },
528
+ { onProgress: undefined },
529
+ );
530
+ });
531
+
532
+ await runTestWithTimeout(
533
+ Effect.gen(function* () {
534
+ const { success, metrics, errorRate, totalThroughputMbps, issues } =
535
+ yield* runStressTest(createUpload, stressConfig);
536
+
537
+ console.log(
538
+ `Large files stress test: ${formatConcurrentMetrics(metrics)}`,
539
+ );
540
+ console.log(`Error rate: ${(errorRate * 100).toFixed(1)}%`);
541
+ console.log(
542
+ `Total throughput: ${totalThroughputMbps.toFixed(2)} Mbps`,
543
+ );
544
+
545
+ if (!success) {
546
+ console.warn("Large files stress test issues:", issues);
547
+ }
548
+
549
+ expect(metrics.successfulOperations).toBeGreaterThan(0);
550
+ expect(errorRate).toBeLessThanOrEqual(stressConfig.maxErrorRate);
551
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
552
+ 75000,
553
+ );
554
+ });
555
+ });
556
+
557
+ describe("Part Size Optimization", () => {
558
+ it("should use optimal part sizes for different file sizes", async () => {
559
+ const testCases = [
560
+ { size: TEST_FILE_SIZES.MEDIUM_MIN.size, expectedParts: 1 },
561
+ {
562
+ size: TEST_FILE_SIZES.MEDIUM.size,
563
+ expectedMinParts: 1,
564
+ expectedMaxParts: 3, // More flexible to account for part size optimization
565
+ },
566
+ {
567
+ size: TEST_FILE_SIZES.LARGE.size,
568
+ expectedMinParts: 6,
569
+ expectedMaxParts: 8,
570
+ },
571
+ ];
572
+
573
+ await runTestWithTimeout(
574
+ Effect.gen(function* () {
575
+ for (const testCase of testCases) {
576
+ const testFile = createTestUploadFile(
577
+ `optimization-${testCase.size}`,
578
+ testCase.size,
579
+ );
580
+ const testData = createTestDataStream(testCase.size);
581
+
582
+ yield* s3Store.create(testFile);
583
+
584
+ // Clear metrics before each test
585
+ yield* mockService.clearStorage();
586
+ yield* mockService.setConfig({ simulateLatency: 0 });
587
+
588
+ yield* s3Store.create(testFile);
589
+
590
+ yield* s3Store.write(
591
+ {
592
+ file_id: testFile.id,
593
+ stream: testData,
594
+ offset: 0,
595
+ },
596
+ { onProgress: undefined },
597
+ );
598
+
599
+ const metrics = yield* mockService.getMetrics();
600
+ const partUploads = metrics.operationCounts.get("uploadPart") || 0;
601
+
602
+ console.log(
603
+ `File size: ${(testCase.size / (1024 * 1024)).toFixed(1)}MB, Parts: ${partUploads}`,
604
+ );
605
+
606
+ if ("expectedParts" in testCase) {
607
+ expect(partUploads).toBe(testCase.expectedParts);
608
+ } else {
609
+ expect(partUploads).toBeGreaterThanOrEqual(
610
+ testCase.expectedMinParts,
611
+ );
612
+ expect(partUploads).toBeLessThanOrEqual(
613
+ testCase.expectedMaxParts,
614
+ );
615
+ }
616
+ }
617
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
618
+ 60000,
619
+ );
620
+ });
621
+ });
622
+ });