@uploadista/data-store-s3 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/.turbo/turbo-build.log +5 -0
  2. package/.turbo/turbo-check.log +5 -0
  3. package/LICENSE +21 -0
  4. package/README.md +588 -0
  5. package/dist/index.d.ts +2 -0
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +1 -0
  8. package/dist/observability.d.ts +45 -0
  9. package/dist/observability.d.ts.map +1 -0
  10. package/dist/observability.js +155 -0
  11. package/dist/s3-store-old.d.ts +51 -0
  12. package/dist/s3-store-old.d.ts.map +1 -0
  13. package/dist/s3-store-old.js +765 -0
  14. package/dist/s3-store.d.ts +9 -0
  15. package/dist/s3-store.d.ts.map +1 -0
  16. package/dist/s3-store.js +666 -0
  17. package/dist/services/__mocks__/s3-client-mock.service.d.ts +44 -0
  18. package/dist/services/__mocks__/s3-client-mock.service.d.ts.map +1 -0
  19. package/dist/services/__mocks__/s3-client-mock.service.js +379 -0
  20. package/dist/services/index.d.ts +2 -0
  21. package/dist/services/index.d.ts.map +1 -0
  22. package/dist/services/index.js +1 -0
  23. package/dist/services/s3-client.service.d.ts +68 -0
  24. package/dist/services/s3-client.service.d.ts.map +1 -0
  25. package/dist/services/s3-client.service.js +209 -0
  26. package/dist/test-observability.d.ts +6 -0
  27. package/dist/test-observability.d.ts.map +1 -0
  28. package/dist/test-observability.js +62 -0
  29. package/dist/types.d.ts +81 -0
  30. package/dist/types.d.ts.map +1 -0
  31. package/dist/types.js +1 -0
  32. package/dist/utils/calculations.d.ts +7 -0
  33. package/dist/utils/calculations.d.ts.map +1 -0
  34. package/dist/utils/calculations.js +41 -0
  35. package/dist/utils/error-handling.d.ts +7 -0
  36. package/dist/utils/error-handling.d.ts.map +1 -0
  37. package/dist/utils/error-handling.js +29 -0
  38. package/dist/utils/index.d.ts +4 -0
  39. package/dist/utils/index.d.ts.map +1 -0
  40. package/dist/utils/index.js +3 -0
  41. package/dist/utils/stream-adapter.d.ts +14 -0
  42. package/dist/utils/stream-adapter.d.ts.map +1 -0
  43. package/dist/utils/stream-adapter.js +41 -0
  44. package/package.json +36 -0
  45. package/src/__tests__/integration/s3-store.integration.test.ts +548 -0
  46. package/src/__tests__/multipart-logic.test.ts +395 -0
  47. package/src/__tests__/s3-store.edge-cases.test.ts +681 -0
  48. package/src/__tests__/s3-store.performance.test.ts +622 -0
  49. package/src/__tests__/s3-store.test.ts +662 -0
  50. package/src/__tests__/utils/performance-helpers.ts +459 -0
  51. package/src/__tests__/utils/test-data-generator.ts +331 -0
  52. package/src/__tests__/utils/test-setup.ts +256 -0
  53. package/src/index.ts +1 -0
  54. package/src/s3-store.ts +1059 -0
  55. package/src/services/__mocks__/s3-client-mock.service.ts +604 -0
  56. package/src/services/index.ts +1 -0
  57. package/src/services/s3-client.service.ts +359 -0
  58. package/src/types.ts +96 -0
  59. package/src/utils/calculations.ts +61 -0
  60. package/src/utils/error-handling.ts +52 -0
  61. package/src/utils/index.ts +3 -0
  62. package/src/utils/stream-adapter.ts +50 -0
  63. package/tsconfig.json +19 -0
  64. package/tsconfig.tsbuildinfo +1 -0
  65. package/vitest.config.ts +15 -0
@@ -0,0 +1,662 @@
1
+ import { UploadFileKVStore } from "@uploadista/core/types";
2
+ import { Effect } from "effect";
3
+ import { afterEach, beforeEach, describe, expect, it } from "vitest";
4
+ import { createS3StoreImplementation } from "../s3-store";
5
+ import type { S3ClientService } from "../services";
6
+ import type { S3Store } from "../types";
7
+ import {
8
+ compareArrays,
9
+ createStandardTestFiles,
10
+ createTestDataStream,
11
+ generateData,
12
+ streamToArray,
13
+ TEST_FILE_SIZES,
14
+ } from "./utils/test-data-generator";
15
+ import {
16
+ assertFileUploaded,
17
+ assertMetricsRecorded,
18
+ createTestS3StoreConfig,
19
+ createTestUploadFile,
20
+ type MockS3TestMethods,
21
+ runTestWithTimeout,
22
+ setupTestEnvironment,
23
+ TestLayersWithMockS3,
24
+ } from "./utils/test-setup";
25
+
26
+ describe("S3Store - Basic Upload Tests", () => {
27
+ let s3Store: S3Store;
28
+ let mockService: S3ClientService["Type"] & MockS3TestMethods;
29
+
30
+ beforeEach(async () => {
31
+ await runTestWithTimeout(
32
+ Effect.gen(function* () {
33
+ // Setup test environment with mock S3
34
+ mockService = yield* setupTestEnvironment();
35
+
36
+ // Create S3 store with test configuration
37
+ const kvStore = yield* UploadFileKVStore;
38
+ const config = createTestS3StoreConfig();
39
+
40
+ s3Store = yield* createS3StoreImplementation({
41
+ ...config,
42
+ kvStore,
43
+ });
44
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
45
+ );
46
+ });
47
+
48
+ afterEach(async () => {
49
+ await runTestWithTimeout(
50
+ Effect.gen(function* () {
51
+ // Clear both S3 mock storage and KV store
52
+ yield* mockService.clearStorage();
53
+
54
+ // Clear all entries from KV store
55
+ const kvStore = yield* UploadFileKVStore;
56
+ if (!kvStore.list) {
57
+ return;
58
+ }
59
+ const keys = yield* kvStore.list();
60
+ if (keys.length > 0) {
61
+ yield* Effect.all(
62
+ keys.map((key) => kvStore.delete(key)),
63
+ { concurrency: "unbounded" },
64
+ );
65
+ }
66
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
67
+ );
68
+ });
69
+
70
+ describe("Small File Uploads (< 5MB)", () => {
71
+ it("should upload tiny files successfully", async () => {
72
+ const testFile = createTestUploadFile(
73
+ "tiny-test",
74
+ TEST_FILE_SIZES.TINY.size,
75
+ );
76
+ const testData = createTestDataStream(testFile.size ?? 0);
77
+
78
+ await runTestWithTimeout(
79
+ Effect.gen(function* () {
80
+ // Create upload
81
+ const createdFile = yield* s3Store.create(testFile);
82
+ expect(createdFile.id).toBe(testFile.id);
83
+ expect(createdFile.size).toBe(testFile.size);
84
+
85
+ // Write data
86
+ const finalOffset = yield* s3Store.write(
87
+ {
88
+ file_id: testFile.id,
89
+ stream: testData,
90
+ offset: 0,
91
+ },
92
+ { onProgress: undefined },
93
+ );
94
+
95
+ expect(finalOffset).toBe(testFile.size);
96
+
97
+ // Verify file was uploaded
98
+ yield* assertFileUploaded(
99
+ mockService,
100
+ testFile.id,
101
+ testFile.size ?? 0,
102
+ );
103
+
104
+ // Verify metrics
105
+ yield* assertMetricsRecorded(mockService, "createMultipartUpload", 1);
106
+ yield* assertMetricsRecorded(
107
+ mockService,
108
+ "completeMultipartUpload",
109
+ 1,
110
+ );
111
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
112
+ );
113
+ });
114
+
115
+ it("should upload small files (1MB) successfully", async () => {
116
+ const testFile = createTestUploadFile(
117
+ "small-test",
118
+ TEST_FILE_SIZES.SMALL_BASIC.size,
119
+ );
120
+ const testData = createTestDataStream(testFile.size ?? 0, {
121
+ type: "random",
122
+ seed: 12345,
123
+ });
124
+
125
+ await runTestWithTimeout(
126
+ Effect.gen(function* () {
127
+ yield* s3Store.create(testFile);
128
+
129
+ const finalOffset = yield* s3Store.write(
130
+ {
131
+ file_id: testFile.id,
132
+ stream: testData,
133
+ offset: 0,
134
+ },
135
+ { onProgress: undefined },
136
+ );
137
+
138
+ expect(finalOffset).toBe(testFile.size);
139
+
140
+ // Verify the uploaded data matches the original
141
+ const uploadedData = yield* assertFileUploaded(
142
+ mockService,
143
+ testFile.id,
144
+ testFile.size ?? 0,
145
+ );
146
+ const originalData = generateData(testFile.size ?? 0, {
147
+ type: "random",
148
+ seed: 12345,
149
+ });
150
+
151
+ expect(compareArrays(uploadedData, originalData)).toBe(true);
152
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
153
+ );
154
+ });
155
+
156
+ it("should upload files just under multipart threshold (4.9MB)", async () => {
157
+ const testFile = createTestUploadFile(
158
+ "large-small-test",
159
+ TEST_FILE_SIZES.SMALL_LARGE.size,
160
+ );
161
+ const testData = createTestDataStream(testFile.size ?? 0);
162
+
163
+ await runTestWithTimeout(
164
+ Effect.gen(function* () {
165
+ yield* s3Store.create(testFile);
166
+
167
+ const finalOffset = yield* s3Store.write(
168
+ {
169
+ file_id: testFile.id,
170
+ stream: testData,
171
+ offset: 0,
172
+ },
173
+ { onProgress: undefined },
174
+ );
175
+
176
+ expect(finalOffset).toBe(testFile.size);
177
+ yield* assertFileUploaded(
178
+ mockService,
179
+ testFile.id,
180
+ testFile.size ?? 0,
181
+ );
182
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
183
+ );
184
+ });
185
+
186
+ it("should handle metadata for small files", async () => {
187
+ const testFile = createTestUploadFile(
188
+ "metadata-test",
189
+ TEST_FILE_SIZES.SMALL_BASIC.size,
190
+ {
191
+ metadata: {
192
+ contentType: "text/plain",
193
+ cacheControl: "no-cache, max-age=0",
194
+ },
195
+ },
196
+ );
197
+ const testData = createTestDataStream(testFile.size ?? 0);
198
+
199
+ await runTestWithTimeout(
200
+ Effect.gen(function* () {
201
+ yield* s3Store.create(testFile);
202
+
203
+ const finalOffset = yield* s3Store.write(
204
+ {
205
+ file_id: testFile.id,
206
+ stream: testData,
207
+ offset: 0,
208
+ },
209
+ { onProgress: undefined },
210
+ );
211
+
212
+ expect(finalOffset).toBe(testFile.size);
213
+
214
+ // Verify metadata was stored (check multipart upload creation)
215
+ const storage = yield* mockService.getStorage();
216
+ const uploads = Array.from(storage.multipartUploads.values());
217
+ expect(uploads).toHaveLength(0); // Should be completed and removed
218
+
219
+ // File should exist with correct size
220
+ yield* assertFileUploaded(
221
+ mockService,
222
+ testFile.id,
223
+ testFile.size ?? 0,
224
+ );
225
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
226
+ );
227
+ });
228
+ });
229
+
230
+ describe("Medium File Uploads (5MB - 50MB)", () => {
231
+ it("should upload files at multipart threshold (5MB)", async () => {
232
+ const testFile = createTestUploadFile(
233
+ "medium-min-test",
234
+ TEST_FILE_SIZES.MEDIUM_MIN.size,
235
+ );
236
+ const testData = createTestDataStream(testFile.size ?? 0);
237
+
238
+ await runTestWithTimeout(
239
+ Effect.gen(function* () {
240
+ yield* s3Store.create(testFile);
241
+
242
+ const finalOffset = yield* s3Store.write(
243
+ {
244
+ file_id: testFile.id,
245
+ stream: testData,
246
+ offset: 0,
247
+ },
248
+ { onProgress: undefined },
249
+ );
250
+
251
+ expect(finalOffset).toBe(testFile.size);
252
+ yield* assertFileUploaded(
253
+ mockService,
254
+ testFile.id,
255
+ testFile.size ?? 0,
256
+ );
257
+
258
+ // Should use multipart upload
259
+ yield* assertMetricsRecorded(mockService, "uploadPart", 1);
260
+ yield* assertMetricsRecorded(
261
+ mockService,
262
+ "completeMultipartUpload",
263
+ 1,
264
+ );
265
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
266
+ 15000, // Longer timeout for larger files
267
+ );
268
+ });
269
+
270
+ it("should upload medium files (10MB) with multiple parts", async () => {
271
+ const testFile = createTestUploadFile(
272
+ "medium-test",
273
+ TEST_FILE_SIZES.MEDIUM.size,
274
+ );
275
+ const testData = createTestDataStream(testFile.size ?? 0, {
276
+ type: "pattern",
277
+ pattern: new Uint8Array([0xab, 0xcd, 0xef]),
278
+ });
279
+
280
+ await runTestWithTimeout(
281
+ Effect.gen(function* () {
282
+ yield* s3Store.create(testFile);
283
+
284
+ const finalOffset = yield* s3Store.write(
285
+ {
286
+ file_id: testFile.id,
287
+ stream: testData,
288
+ offset: 0,
289
+ },
290
+ { onProgress: undefined },
291
+ );
292
+
293
+ expect(finalOffset).toBe(testFile.size);
294
+
295
+ const uploadedData = yield* assertFileUploaded(
296
+ mockService,
297
+ testFile.id,
298
+ testFile.size ?? 0,
299
+ );
300
+
301
+ // Verify the data integrity
302
+ const originalData = generateData(testFile.size ?? 0, {
303
+ type: "pattern",
304
+ pattern: new Uint8Array([0xab, 0xcd, 0xef]),
305
+ });
306
+ expect(compareArrays(uploadedData, originalData)).toBe(true);
307
+
308
+ // Should have used multiple parts for 10MB with 8MB part size
309
+ const metrics = yield* mockService.getMetrics();
310
+ const partUploads = metrics.operationCounts.get("uploadPart") || 0;
311
+ expect(partUploads).toBeGreaterThan(1);
312
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
313
+ 20000,
314
+ );
315
+ });
316
+
317
+ it("should upload large medium files (49MB)", async () => {
318
+ const testFile = createTestUploadFile(
319
+ "large-medium-test",
320
+ TEST_FILE_SIZES.MEDIUM_LARGE.size,
321
+ );
322
+ const testData = createTestDataStream(testFile.size ?? 0);
323
+
324
+ await runTestWithTimeout(
325
+ Effect.gen(function* () {
326
+ yield* s3Store.create(testFile);
327
+
328
+ const finalOffset = yield* s3Store.write(
329
+ {
330
+ file_id: testFile.id,
331
+ stream: testData,
332
+ offset: 0,
333
+ },
334
+ { onProgress: undefined },
335
+ );
336
+
337
+ expect(finalOffset).toBe(testFile.size);
338
+ yield* assertFileUploaded(
339
+ mockService,
340
+ testFile.id,
341
+ testFile.size ?? 0,
342
+ );
343
+
344
+ // Should have used multiple parts
345
+ const metrics = yield* mockService.getMetrics();
346
+ const partUploads = metrics.operationCounts.get("uploadPart") || 0;
347
+ expect(partUploads).toBeGreaterThanOrEqual(6); // ~6 parts for 49MB with 8MB parts
348
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
349
+ 30000,
350
+ );
351
+ });
352
+ });
353
+
354
+ describe("Large File Uploads (50MB+)", () => {
355
+ it("should upload large files (50MB) efficiently", async () => {
356
+ const testFile = createTestUploadFile(
357
+ "large-test",
358
+ TEST_FILE_SIZES.LARGE.size,
359
+ );
360
+ const testData = createTestDataStream(testFile.size ?? 0);
361
+
362
+ await runTestWithTimeout(
363
+ Effect.gen(function* () {
364
+ yield* s3Store.create(testFile);
365
+
366
+ const finalOffset = yield* s3Store.write(
367
+ {
368
+ file_id: testFile.id,
369
+ stream: testData,
370
+ offset: 0,
371
+ },
372
+ { onProgress: undefined },
373
+ );
374
+
375
+ expect(finalOffset).toBe(testFile.size);
376
+ yield* assertFileUploaded(
377
+ mockService,
378
+ testFile.id,
379
+ testFile.size ?? 0,
380
+ );
381
+
382
+ // Should have used multiple parts optimally
383
+ const metrics = yield* mockService.getMetrics();
384
+ const partUploads = metrics.operationCounts.get("uploadPart") || 0;
385
+ expect(partUploads).toBeGreaterThanOrEqual(6); // ~7 parts for 50MB with 8MB parts
386
+ expect(partUploads).toBeLessThanOrEqual(8);
387
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
388
+ 45000,
389
+ );
390
+ });
391
+
392
+ it("should upload extra large files (100MB) with optimal part sizing", async () => {
393
+ const testFile = createTestUploadFile(
394
+ "xl-test",
395
+ TEST_FILE_SIZES.LARGE_XL.size,
396
+ );
397
+ const testData = createTestDataStream(testFile.size ?? 0);
398
+
399
+ await runTestWithTimeout(
400
+ Effect.gen(function* () {
401
+ yield* s3Store.create(testFile);
402
+
403
+ const finalOffset = yield* s3Store.write(
404
+ {
405
+ file_id: testFile.id,
406
+ stream: testData,
407
+ offset: 0,
408
+ },
409
+ { onProgress: undefined },
410
+ );
411
+
412
+ expect(finalOffset).toBe(testFile.size);
413
+ yield* assertFileUploaded(
414
+ mockService,
415
+ testFile.id,
416
+ testFile.size ?? 0,
417
+ );
418
+
419
+ const metrics = yield* mockService.getMetrics();
420
+ const partUploads = metrics.operationCounts.get("uploadPart") || 0;
421
+ expect(partUploads).toBeGreaterThanOrEqual(12); // ~13 parts for 100MB with 8MB parts
422
+ expect(partUploads).toBeLessThanOrEqual(15);
423
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
424
+ 60000,
425
+ );
426
+ });
427
+ });
428
+
429
+ describe("Upload Progress Tracking", () => {
430
+ it("should track progress for small files", async () => {
431
+ const testFile = createTestUploadFile(
432
+ "progress-small",
433
+ TEST_FILE_SIZES.SMALL_BASIC.size,
434
+ );
435
+ const testData = createTestDataStream(testFile.size ?? 0);
436
+ const progressUpdates: number[] = [];
437
+
438
+ await runTestWithTimeout(
439
+ Effect.gen(function* () {
440
+ yield* s3Store.create(testFile);
441
+
442
+ const finalOffset = yield* s3Store.write(
443
+ {
444
+ file_id: testFile.id,
445
+ stream: testData,
446
+ offset: 0,
447
+ },
448
+ {
449
+ onProgress: (offset) => progressUpdates.push(offset),
450
+ },
451
+ );
452
+
453
+ expect(finalOffset).toBe(testFile.size);
454
+ expect(progressUpdates.length).toBeGreaterThan(0);
455
+ expect(progressUpdates[progressUpdates.length - 1]).toBe(
456
+ testFile.size,
457
+ );
458
+
459
+ // Progress should be monotonically increasing
460
+ for (let i = 1; i < progressUpdates.length; i++) {
461
+ expect(progressUpdates[i]).toBeGreaterThanOrEqual(
462
+ progressUpdates[i - 1],
463
+ );
464
+ }
465
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
466
+ );
467
+ });
468
+
469
+ it("should track progress for large files", async () => {
470
+ const testFile = createTestUploadFile(
471
+ "progress-large",
472
+ TEST_FILE_SIZES.LARGE.size,
473
+ );
474
+ const testData = createTestDataStream(testFile.size ?? 0);
475
+ const progressUpdates: number[] = [];
476
+
477
+ await runTestWithTimeout(
478
+ Effect.gen(function* () {
479
+ yield* s3Store.create(testFile);
480
+
481
+ const finalOffset = yield* s3Store.write(
482
+ {
483
+ file_id: testFile.id,
484
+ stream: testData,
485
+ offset: 0,
486
+ },
487
+ {
488
+ onProgress: (offset) => progressUpdates.push(offset),
489
+ },
490
+ );
491
+
492
+ expect(finalOffset).toBe(testFile.size);
493
+ expect(progressUpdates.length).toBeGreaterThan(10); // Should have many progress updates
494
+ expect(progressUpdates[progressUpdates.length - 1]).toBe(
495
+ testFile.size,
496
+ );
497
+
498
+ // Progress should be monotonically increasing
499
+ for (let i = 1; i < progressUpdates.length; i++) {
500
+ expect(progressUpdates[i]).toBeGreaterThanOrEqual(
501
+ progressUpdates[i - 1],
502
+ );
503
+ }
504
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
505
+ 45000,
506
+ );
507
+ });
508
+ });
509
+
510
+ describe("Upload Retrieval", () => {
511
+ it("should retrieve upload information accurately", async () => {
512
+ const testFile = createTestUploadFile(
513
+ "retrieve-test",
514
+ TEST_FILE_SIZES.MEDIUM.size,
515
+ );
516
+ const testData = createTestDataStream(testFile.size ?? 0);
517
+
518
+ await runTestWithTimeout(
519
+ Effect.gen(function* () {
520
+ yield* s3Store.create(testFile);
521
+
522
+ // Upload file
523
+ yield* s3Store.write(
524
+ {
525
+ file_id: testFile.id,
526
+ stream: testData,
527
+ offset: 0,
528
+ },
529
+ { onProgress: undefined },
530
+ );
531
+
532
+ // Retrieve upload info
533
+ const uploadInfo = yield* s3Store.getUpload(testFile.id);
534
+
535
+ expect(uploadInfo.id).toBe(testFile.id);
536
+ expect(uploadInfo.size).toBe(testFile.size);
537
+ expect(uploadInfo.offset).toBe(testFile.size); // Should be fully uploaded
538
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
539
+ 20000,
540
+ );
541
+ });
542
+
543
+ it("should read uploaded file data", async () => {
544
+ const testFile = createTestUploadFile(
545
+ "read-test",
546
+ TEST_FILE_SIZES.SMALL_BASIC.size,
547
+ );
548
+ const originalData = generateData(testFile.size ?? 0, {
549
+ type: "text",
550
+ });
551
+ const testData = createTestDataStream(testFile.size ?? 0, {
552
+ type: "text",
553
+ });
554
+
555
+ await runTestWithTimeout(
556
+ Effect.gen(function* () {
557
+ yield* s3Store.create(testFile);
558
+
559
+ // Upload file
560
+ yield* s3Store.write(
561
+ {
562
+ file_id: testFile.id,
563
+ stream: testData,
564
+ offset: 0,
565
+ },
566
+ { onProgress: undefined },
567
+ );
568
+
569
+ // Read file data
570
+ const readStream = yield* s3Store.read(testFile.id);
571
+ const readData = yield* Effect.promise(
572
+ async () => await streamToArray(readStream),
573
+ );
574
+
575
+ expect(compareArrays(readData, originalData)).toBe(true);
576
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
577
+ );
578
+ });
579
+ });
580
+
581
+ describe("File Deletion", () => {
582
+ it("should remove uploaded files", async () => {
583
+ const testFile = createTestUploadFile(
584
+ "delete-test",
585
+ TEST_FILE_SIZES.SMALL_BASIC.size,
586
+ );
587
+ const testData = createTestDataStream(testFile.size ?? 0);
588
+
589
+ await runTestWithTimeout(
590
+ Effect.gen(function* () {
591
+ yield* s3Store.create(testFile);
592
+
593
+ // Upload file
594
+ yield* s3Store.write(
595
+ {
596
+ file_id: testFile.id,
597
+ stream: testData,
598
+ offset: 0,
599
+ },
600
+ { onProgress: undefined },
601
+ );
602
+
603
+ // Verify file exists
604
+ yield* assertFileUploaded(
605
+ mockService,
606
+ testFile.id,
607
+ testFile.size ?? 0,
608
+ );
609
+
610
+ // Remove file
611
+ yield* s3Store.remove(testFile.id);
612
+
613
+ // Verify file is deleted
614
+ const storage = yield* mockService.getStorage();
615
+ expect(storage.objects.has(testFile.id)).toBe(false);
616
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
617
+ );
618
+ });
619
+ });
620
+
621
+ describe("Standard Test Files", () => {
622
+ it("should handle all standard test file types", async () => {
623
+ const testFiles = createStandardTestFiles();
624
+
625
+ await runTestWithTimeout(
626
+ Effect.gen(function* () {
627
+ for (const testFileData of testFiles.slice(0, 5)) {
628
+ // Test first 5 to keep test time reasonable
629
+ const testFile = createTestUploadFile(
630
+ testFileData.id,
631
+ testFileData.size,
632
+ {
633
+ metadata: testFileData.metadata,
634
+ },
635
+ );
636
+
637
+ yield* s3Store.create(testFile);
638
+
639
+ const finalOffset = yield* s3Store.write(
640
+ {
641
+ file_id: testFile.id,
642
+ stream: testFileData.stream,
643
+ offset: 0,
644
+ },
645
+ { onProgress: undefined },
646
+ );
647
+
648
+ expect(finalOffset).toBe(testFile.size);
649
+
650
+ const uploadedData = yield* assertFileUploaded(
651
+ mockService,
652
+ testFile.id,
653
+ testFile.size ?? 0,
654
+ );
655
+ expect(compareArrays(uploadedData, testFileData.data)).toBe(true);
656
+ }
657
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
658
+ 60000, // Longer timeout for multiple files
659
+ );
660
+ });
661
+ });
662
+ });