@uploadista/data-store-filesystem 0.0.13-beta.4 → 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +15 -9
- package/tests/filesystem-store.test.ts +668 -0
- package/tests/utils/test-data-generator.ts +246 -0
- package/tests/utils/test-setup.ts +144 -0
- package/vitest.config.ts +39 -0
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@uploadista/data-store-filesystem",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0.13
|
|
4
|
+
"version": "0.0.13",
|
|
5
5
|
"description": "File system data store for Uploadista",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"author": "Uploadista",
|
|
@@ -14,20 +14,26 @@
|
|
|
14
14
|
}
|
|
15
15
|
},
|
|
16
16
|
"dependencies": {
|
|
17
|
-
"effect": "3.19.
|
|
18
|
-
"@uploadista/core": "0.0.13
|
|
19
|
-
"@uploadista/observability": "0.0.13
|
|
17
|
+
"effect": "3.19.3",
|
|
18
|
+
"@uploadista/core": "0.0.13",
|
|
19
|
+
"@uploadista/observability": "0.0.13"
|
|
20
20
|
},
|
|
21
21
|
"devDependencies": {
|
|
22
|
-
"@
|
|
23
|
-
"
|
|
24
|
-
"
|
|
22
|
+
"@effect/vitest": "0.27.0",
|
|
23
|
+
"@types/node": "24.10.1",
|
|
24
|
+
"tsdown": "0.16.3",
|
|
25
|
+
"vitest": "4.0.8",
|
|
26
|
+
"@uploadista/typescript-config": "0.0.13",
|
|
27
|
+
"@uploadista/kv-store-memory": "0.0.13"
|
|
25
28
|
},
|
|
26
29
|
"scripts": {
|
|
27
|
-
"dev": "tsc -b",
|
|
28
30
|
"build": "tsdown",
|
|
31
|
+
"check": "biome check --write ./src",
|
|
32
|
+
"dev": "tsc -b",
|
|
29
33
|
"format": "biome format --write ./src",
|
|
30
34
|
"lint": "biome lint --write ./src",
|
|
31
|
-
"
|
|
35
|
+
"test": "vitest",
|
|
36
|
+
"test:run": "vitest run",
|
|
37
|
+
"test:watch": "vitest --watch"
|
|
32
38
|
}
|
|
33
39
|
}
|
|
@@ -0,0 +1,668 @@
|
|
|
1
|
+
import type { DataStore, UploadFile } from "@uploadista/core/types";
|
|
2
|
+
import { UploadFileKVStore } from "@uploadista/core/types";
|
|
3
|
+
import { Effect } from "effect";
|
|
4
|
+
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
|
5
|
+
import { fileStore } from "../src/file-store";
|
|
6
|
+
import {
|
|
7
|
+
compareArrays,
|
|
8
|
+
createStandardTestFiles,
|
|
9
|
+
createTestDataStream,
|
|
10
|
+
generateData,
|
|
11
|
+
TEST_FILE_SIZES,
|
|
12
|
+
} from "./utils/test-data-generator";
|
|
13
|
+
import {
|
|
14
|
+
assertFileUploaded,
|
|
15
|
+
cleanupTestDirectory,
|
|
16
|
+
createTestDirectory,
|
|
17
|
+
createTestFilesystemStoreConfig,
|
|
18
|
+
createTestUploadFile,
|
|
19
|
+
getFileSize,
|
|
20
|
+
listFiles,
|
|
21
|
+
runTestWithTimeout,
|
|
22
|
+
TestLayersWithMemoryKV,
|
|
23
|
+
} from "./utils/test-setup";
|
|
24
|
+
|
|
25
|
+
describe("FilesystemStore - Basic Upload Tests", () => {
|
|
26
|
+
let filesystemStore: DataStore<UploadFile>;
|
|
27
|
+
let testDirectory: string;
|
|
28
|
+
|
|
29
|
+
beforeEach(async () => {
|
|
30
|
+
await runTestWithTimeout(
|
|
31
|
+
Effect.gen(function* () {
|
|
32
|
+
// Create a temporary test directory
|
|
33
|
+
testDirectory = yield* createTestDirectory();
|
|
34
|
+
|
|
35
|
+
// Create filesystem store with test configuration
|
|
36
|
+
const config = createTestFilesystemStoreConfig(testDirectory);
|
|
37
|
+
|
|
38
|
+
filesystemStore = (yield* fileStore(config)) as DataStore<UploadFile>;
|
|
39
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
40
|
+
);
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
afterEach(async () => {
|
|
44
|
+
await runTestWithTimeout(
|
|
45
|
+
Effect.gen(function* () {
|
|
46
|
+
// Clear all entries from KV store
|
|
47
|
+
const kvStore = yield* UploadFileKVStore;
|
|
48
|
+
if (!kvStore.list) {
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
const keys = yield* kvStore.list();
|
|
52
|
+
if (keys.length > 0) {
|
|
53
|
+
yield* Effect.all(
|
|
54
|
+
keys.map((key) => kvStore.delete(key)),
|
|
55
|
+
{ concurrency: "unbounded" },
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Clean up test directory
|
|
60
|
+
yield* cleanupTestDirectory(testDirectory);
|
|
61
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
62
|
+
);
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
describe("Store Creation", () => {
|
|
66
|
+
it("should create filesystem store successfully", async () => {
|
|
67
|
+
await runTestWithTimeout(
|
|
68
|
+
Effect.sync(() => {
|
|
69
|
+
expect(filesystemStore).toBeDefined();
|
|
70
|
+
expect(filesystemStore.bucket).toBeDefined();
|
|
71
|
+
expect(filesystemStore.create).toBeDefined();
|
|
72
|
+
expect(filesystemStore.write).toBeDefined();
|
|
73
|
+
expect(filesystemStore.read).toBeDefined();
|
|
74
|
+
expect(filesystemStore.remove).toBeDefined();
|
|
75
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
76
|
+
);
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
it("should have correct capabilities", async () => {
|
|
80
|
+
await runTestWithTimeout(
|
|
81
|
+
Effect.sync(() => {
|
|
82
|
+
const capabilities = filesystemStore.getCapabilities();
|
|
83
|
+
|
|
84
|
+
expect(capabilities.supportsParallelUploads).toBe(false);
|
|
85
|
+
expect(capabilities.supportsConcatenation).toBe(false);
|
|
86
|
+
expect(capabilities.supportsDeferredLength).toBe(false);
|
|
87
|
+
expect(capabilities.supportsResumableUploads).toBe(true);
|
|
88
|
+
expect(capabilities.requiresOrderedChunks).toBe(true);
|
|
89
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
90
|
+
);
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
it("should validate upload strategies correctly", async () => {
|
|
94
|
+
await runTestWithTimeout(
|
|
95
|
+
Effect.gen(function* () {
|
|
96
|
+
const singleStrategy = yield* filesystemStore.validateUploadStrategy(
|
|
97
|
+
"single",
|
|
98
|
+
);
|
|
99
|
+
expect(singleStrategy).toBe(true);
|
|
100
|
+
|
|
101
|
+
const parallelStrategy = yield* filesystemStore.validateUploadStrategy(
|
|
102
|
+
"parallel",
|
|
103
|
+
);
|
|
104
|
+
expect(parallelStrategy).toBe(false);
|
|
105
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
106
|
+
);
|
|
107
|
+
});
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
describe("Small File Uploads", () => {
|
|
111
|
+
it("should upload tiny files successfully", async () => {
|
|
112
|
+
const testFile = createTestUploadFile(
|
|
113
|
+
"tiny-test",
|
|
114
|
+
TEST_FILE_SIZES.TINY.size,
|
|
115
|
+
);
|
|
116
|
+
const testData = createTestDataStream(testFile.size ?? 0);
|
|
117
|
+
|
|
118
|
+
await runTestWithTimeout(
|
|
119
|
+
Effect.gen(function* () {
|
|
120
|
+
// Create upload
|
|
121
|
+
const createdFile = yield* filesystemStore.create(testFile);
|
|
122
|
+
expect(createdFile.id).toBe(testFile.id);
|
|
123
|
+
expect(createdFile.size).toBe(testFile.size);
|
|
124
|
+
|
|
125
|
+
// Write data
|
|
126
|
+
const finalOffset = yield* filesystemStore.write(
|
|
127
|
+
{
|
|
128
|
+
file_id: testFile.id,
|
|
129
|
+
stream: testData,
|
|
130
|
+
offset: 0,
|
|
131
|
+
},
|
|
132
|
+
{ onProgress: undefined },
|
|
133
|
+
);
|
|
134
|
+
|
|
135
|
+
expect(finalOffset).toBe(testFile.size);
|
|
136
|
+
|
|
137
|
+
// Verify file was uploaded
|
|
138
|
+
yield* assertFileUploaded(
|
|
139
|
+
testDirectory,
|
|
140
|
+
testFile.id,
|
|
141
|
+
testFile.size ?? 0,
|
|
142
|
+
);
|
|
143
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
144
|
+
);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
it("should upload small files (1MB) successfully", async () => {
|
|
148
|
+
const testFile = createTestUploadFile(
|
|
149
|
+
"small-test",
|
|
150
|
+
TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
151
|
+
);
|
|
152
|
+
const testData = createTestDataStream(testFile.size ?? 0, {
|
|
153
|
+
type: "random",
|
|
154
|
+
seed: 12345,
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
await runTestWithTimeout(
|
|
158
|
+
Effect.gen(function* () {
|
|
159
|
+
yield* filesystemStore.create(testFile);
|
|
160
|
+
|
|
161
|
+
const finalOffset = yield* filesystemStore.write(
|
|
162
|
+
{
|
|
163
|
+
file_id: testFile.id,
|
|
164
|
+
stream: testData,
|
|
165
|
+
offset: 0,
|
|
166
|
+
},
|
|
167
|
+
{ onProgress: undefined },
|
|
168
|
+
);
|
|
169
|
+
|
|
170
|
+
expect(finalOffset).toBe(testFile.size);
|
|
171
|
+
|
|
172
|
+
// Verify the uploaded data matches the original
|
|
173
|
+
const uploadedData = yield* assertFileUploaded(
|
|
174
|
+
testDirectory,
|
|
175
|
+
testFile.id,
|
|
176
|
+
testFile.size ?? 0,
|
|
177
|
+
);
|
|
178
|
+
const originalData = generateData(testFile.size ?? 0, {
|
|
179
|
+
type: "random",
|
|
180
|
+
seed: 12345,
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
expect(compareArrays(uploadedData, originalData)).toBe(true);
|
|
184
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
185
|
+
);
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
it("should upload files with file extensions", async () => {
|
|
189
|
+
const testFile = createTestUploadFile(
|
|
190
|
+
"test-file",
|
|
191
|
+
TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
192
|
+
{
|
|
193
|
+
metadata: {
|
|
194
|
+
fileName: "document.pdf",
|
|
195
|
+
},
|
|
196
|
+
},
|
|
197
|
+
);
|
|
198
|
+
const testData = createTestDataStream(testFile.size ?? 0);
|
|
199
|
+
|
|
200
|
+
await runTestWithTimeout(
|
|
201
|
+
Effect.gen(function* () {
|
|
202
|
+
const createdFile = yield* filesystemStore.create(testFile);
|
|
203
|
+
|
|
204
|
+
yield* filesystemStore.write(
|
|
205
|
+
{
|
|
206
|
+
file_id: testFile.id,
|
|
207
|
+
stream: testData,
|
|
208
|
+
offset: 0,
|
|
209
|
+
},
|
|
210
|
+
{ onProgress: undefined },
|
|
211
|
+
);
|
|
212
|
+
|
|
213
|
+
// Should create file with extension
|
|
214
|
+
yield* assertFileUploaded(
|
|
215
|
+
testDirectory,
|
|
216
|
+
"test-file.pdf",
|
|
217
|
+
testFile.size ?? 0,
|
|
218
|
+
);
|
|
219
|
+
|
|
220
|
+
// Verify storage path includes extension
|
|
221
|
+
expect(createdFile.storage.id).toContain(".pdf");
|
|
222
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
223
|
+
);
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
it("should create subdirectories as needed", async () => {
|
|
227
|
+
const testFile = createTestUploadFile(
|
|
228
|
+
"subdir/nested/file",
|
|
229
|
+
TEST_FILE_SIZES.TINY.size,
|
|
230
|
+
);
|
|
231
|
+
const testData = createTestDataStream(testFile.size ?? 0);
|
|
232
|
+
|
|
233
|
+
await runTestWithTimeout(
|
|
234
|
+
Effect.gen(function* () {
|
|
235
|
+
yield* filesystemStore.create(testFile);
|
|
236
|
+
|
|
237
|
+
yield* filesystemStore.write(
|
|
238
|
+
{
|
|
239
|
+
file_id: testFile.id,
|
|
240
|
+
stream: testData,
|
|
241
|
+
offset: 0,
|
|
242
|
+
},
|
|
243
|
+
{ onProgress: undefined },
|
|
244
|
+
);
|
|
245
|
+
|
|
246
|
+
// Verify file was created in subdirectory
|
|
247
|
+
yield* assertFileUploaded(
|
|
248
|
+
testDirectory,
|
|
249
|
+
"subdir/nested/file",
|
|
250
|
+
testFile.size ?? 0,
|
|
251
|
+
);
|
|
252
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
253
|
+
);
|
|
254
|
+
});
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
describe("Medium File Uploads", () => {
|
|
258
|
+
it("should upload medium files (10MB)", async () => {
|
|
259
|
+
const testFile = createTestUploadFile(
|
|
260
|
+
"medium-test",
|
|
261
|
+
TEST_FILE_SIZES.MEDIUM.size,
|
|
262
|
+
);
|
|
263
|
+
const testData = createTestDataStream(testFile.size ?? 0, {
|
|
264
|
+
type: "pattern",
|
|
265
|
+
pattern: new Uint8Array([0xab, 0xcd, 0xef]),
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
await runTestWithTimeout(
|
|
269
|
+
Effect.gen(function* () {
|
|
270
|
+
yield* filesystemStore.create(testFile);
|
|
271
|
+
|
|
272
|
+
const finalOffset = yield* filesystemStore.write(
|
|
273
|
+
{
|
|
274
|
+
file_id: testFile.id,
|
|
275
|
+
stream: testData,
|
|
276
|
+
offset: 0,
|
|
277
|
+
},
|
|
278
|
+
{ onProgress: undefined },
|
|
279
|
+
);
|
|
280
|
+
|
|
281
|
+
expect(finalOffset).toBe(testFile.size);
|
|
282
|
+
|
|
283
|
+
const uploadedData = yield* assertFileUploaded(
|
|
284
|
+
testDirectory,
|
|
285
|
+
testFile.id,
|
|
286
|
+
testFile.size ?? 0,
|
|
287
|
+
);
|
|
288
|
+
|
|
289
|
+
// Verify the data integrity
|
|
290
|
+
const originalData = generateData(testFile.size ?? 0, {
|
|
291
|
+
type: "pattern",
|
|
292
|
+
pattern: new Uint8Array([0xab, 0xcd, 0xef]),
|
|
293
|
+
});
|
|
294
|
+
expect(compareArrays(uploadedData, originalData)).toBe(true);
|
|
295
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
296
|
+
20000,
|
|
297
|
+
);
|
|
298
|
+
});
|
|
299
|
+
|
|
300
|
+
it("should upload large medium files (49MB)", async () => {
|
|
301
|
+
const testFile = createTestUploadFile(
|
|
302
|
+
"large-medium-test",
|
|
303
|
+
TEST_FILE_SIZES.MEDIUM_LARGE.size,
|
|
304
|
+
);
|
|
305
|
+
const testData = createTestDataStream(testFile.size ?? 0);
|
|
306
|
+
|
|
307
|
+
await runTestWithTimeout(
|
|
308
|
+
Effect.gen(function* () {
|
|
309
|
+
yield* filesystemStore.create(testFile);
|
|
310
|
+
|
|
311
|
+
const finalOffset = yield* filesystemStore.write(
|
|
312
|
+
{
|
|
313
|
+
file_id: testFile.id,
|
|
314
|
+
stream: testData,
|
|
315
|
+
offset: 0,
|
|
316
|
+
},
|
|
317
|
+
{ onProgress: undefined },
|
|
318
|
+
);
|
|
319
|
+
|
|
320
|
+
expect(finalOffset).toBe(testFile.size);
|
|
321
|
+
yield* assertFileUploaded(
|
|
322
|
+
testDirectory,
|
|
323
|
+
testFile.id,
|
|
324
|
+
testFile.size ?? 0,
|
|
325
|
+
);
|
|
326
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
327
|
+
30000,
|
|
328
|
+
);
|
|
329
|
+
});
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
describe("Large File Uploads", () => {
|
|
333
|
+
it("should upload large files (50MB) efficiently", async () => {
|
|
334
|
+
const testFile = createTestUploadFile(
|
|
335
|
+
"large-test",
|
|
336
|
+
TEST_FILE_SIZES.LARGE.size,
|
|
337
|
+
);
|
|
338
|
+
const testData = createTestDataStream(testFile.size ?? 0);
|
|
339
|
+
|
|
340
|
+
await runTestWithTimeout(
|
|
341
|
+
Effect.gen(function* () {
|
|
342
|
+
yield* filesystemStore.create(testFile);
|
|
343
|
+
|
|
344
|
+
const finalOffset = yield* filesystemStore.write(
|
|
345
|
+
{
|
|
346
|
+
file_id: testFile.id,
|
|
347
|
+
stream: testData,
|
|
348
|
+
offset: 0,
|
|
349
|
+
},
|
|
350
|
+
{ onProgress: undefined },
|
|
351
|
+
);
|
|
352
|
+
|
|
353
|
+
expect(finalOffset).toBe(testFile.size);
|
|
354
|
+
yield* assertFileUploaded(
|
|
355
|
+
testDirectory,
|
|
356
|
+
testFile.id,
|
|
357
|
+
testFile.size ?? 0,
|
|
358
|
+
);
|
|
359
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
360
|
+
45000,
|
|
361
|
+
);
|
|
362
|
+
});
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
describe("Upload Progress Tracking", () => {
|
|
366
|
+
it("should track progress for small files", async () => {
|
|
367
|
+
const testFile = createTestUploadFile(
|
|
368
|
+
"progress-small",
|
|
369
|
+
TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
370
|
+
);
|
|
371
|
+
const testData = createTestDataStream(testFile.size ?? 0);
|
|
372
|
+
const progressUpdates: number[] = [];
|
|
373
|
+
|
|
374
|
+
await runTestWithTimeout(
|
|
375
|
+
Effect.gen(function* () {
|
|
376
|
+
yield* filesystemStore.create(testFile);
|
|
377
|
+
|
|
378
|
+
const finalOffset = yield* filesystemStore.write(
|
|
379
|
+
{
|
|
380
|
+
file_id: testFile.id,
|
|
381
|
+
stream: testData,
|
|
382
|
+
offset: 0,
|
|
383
|
+
},
|
|
384
|
+
{
|
|
385
|
+
onProgress: (chunkSize) => progressUpdates.push(chunkSize),
|
|
386
|
+
},
|
|
387
|
+
);
|
|
388
|
+
|
|
389
|
+
expect(finalOffset).toBe(testFile.size);
|
|
390
|
+
expect(progressUpdates.length).toBeGreaterThan(0);
|
|
391
|
+
|
|
392
|
+
// Sum of all chunks should equal file size
|
|
393
|
+
const totalBytes = progressUpdates.reduce((sum, size) => sum + size, 0);
|
|
394
|
+
expect(totalBytes).toBe(testFile.size);
|
|
395
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
396
|
+
);
|
|
397
|
+
});
|
|
398
|
+
});
|
|
399
|
+
|
|
400
|
+
describe("Resumable Uploads", () => {
|
|
401
|
+
it.skip("should support resumable uploads with offset", { timeout: 30000 }, async () => {
|
|
402
|
+
const testFile = createTestUploadFile(
|
|
403
|
+
"resumable-test",
|
|
404
|
+
TEST_FILE_SIZES.MEDIUM.size,
|
|
405
|
+
);
|
|
406
|
+
|
|
407
|
+
// Split the upload into two parts
|
|
408
|
+
const fileSize = testFile.size ?? 0;
|
|
409
|
+
const part1Size = Math.floor(fileSize / 2);
|
|
410
|
+
const part2Size = fileSize - part1Size;
|
|
411
|
+
|
|
412
|
+
const part1Data = createTestDataStream(part1Size, { type: "zeros" });
|
|
413
|
+
const part2Data = createTestDataStream(part2Size, { type: "ones" });
|
|
414
|
+
|
|
415
|
+
await runTestWithTimeout(
|
|
416
|
+
Effect.gen(function* () {
|
|
417
|
+
yield* filesystemStore.create(testFile);
|
|
418
|
+
|
|
419
|
+
// Upload first part
|
|
420
|
+
const offset1 = yield* filesystemStore.write(
|
|
421
|
+
{
|
|
422
|
+
file_id: testFile.id,
|
|
423
|
+
stream: part1Data,
|
|
424
|
+
offset: 0,
|
|
425
|
+
},
|
|
426
|
+
{ onProgress: undefined },
|
|
427
|
+
);
|
|
428
|
+
|
|
429
|
+
expect(offset1).toBe(part1Size);
|
|
430
|
+
|
|
431
|
+
// Verify partial upload
|
|
432
|
+
const partialSize = yield* getFileSize(testDirectory, testFile.id);
|
|
433
|
+
expect(partialSize).toBe(part1Size);
|
|
434
|
+
|
|
435
|
+
// Upload second part (resume)
|
|
436
|
+
const offset2 = yield* filesystemStore.write(
|
|
437
|
+
{
|
|
438
|
+
file_id: testFile.id,
|
|
439
|
+
stream: part2Data,
|
|
440
|
+
offset: offset1,
|
|
441
|
+
},
|
|
442
|
+
{ onProgress: undefined },
|
|
443
|
+
);
|
|
444
|
+
|
|
445
|
+
expect(offset2).toBe(fileSize);
|
|
446
|
+
|
|
447
|
+
// Verify complete file was uploaded
|
|
448
|
+
const uploadedData = yield* assertFileUploaded(
|
|
449
|
+
testDirectory,
|
|
450
|
+
testFile.id,
|
|
451
|
+
fileSize,
|
|
452
|
+
);
|
|
453
|
+
|
|
454
|
+
// Verify first part is zeros and second part is ones
|
|
455
|
+
for (let i = 0; i < part1Size; i++) {
|
|
456
|
+
expect(uploadedData[i]).toBe(0);
|
|
457
|
+
}
|
|
458
|
+
for (let i = part1Size; i < fileSize; i++) {
|
|
459
|
+
expect(uploadedData[i]).toBe(0xff);
|
|
460
|
+
}
|
|
461
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
462
|
+
25000,
|
|
463
|
+
);
|
|
464
|
+
});
|
|
465
|
+
});
|
|
466
|
+
|
|
467
|
+
describe("File Read Operations", () => {
|
|
468
|
+
it("should read uploaded file data", async () => {
|
|
469
|
+
const testFile = createTestUploadFile(
|
|
470
|
+
"read-test",
|
|
471
|
+
TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
472
|
+
);
|
|
473
|
+
const originalData = generateData(testFile.size ?? 0, {
|
|
474
|
+
type: "text",
|
|
475
|
+
});
|
|
476
|
+
const testData = createTestDataStream(testFile.size ?? 0, {
|
|
477
|
+
type: "text",
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
await runTestWithTimeout(
|
|
481
|
+
Effect.gen(function* () {
|
|
482
|
+
yield* filesystemStore.create(testFile);
|
|
483
|
+
|
|
484
|
+
// Upload file
|
|
485
|
+
yield* filesystemStore.write(
|
|
486
|
+
{
|
|
487
|
+
file_id: testFile.id,
|
|
488
|
+
stream: testData,
|
|
489
|
+
offset: 0,
|
|
490
|
+
},
|
|
491
|
+
{ onProgress: undefined },
|
|
492
|
+
);
|
|
493
|
+
|
|
494
|
+
// Read file data
|
|
495
|
+
const readData = yield* filesystemStore.read(testFile.id);
|
|
496
|
+
|
|
497
|
+
expect(compareArrays(readData, originalData)).toBe(true);
|
|
498
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
499
|
+
);
|
|
500
|
+
});
|
|
501
|
+
|
|
502
|
+
it("should retrieve upload information accurately", async () => {
|
|
503
|
+
const testFile = createTestUploadFile(
|
|
504
|
+
"getupload-test",
|
|
505
|
+
TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
506
|
+
);
|
|
507
|
+
const testData = createTestDataStream(testFile.size ?? 0);
|
|
508
|
+
|
|
509
|
+
await runTestWithTimeout(
|
|
510
|
+
Effect.gen(function* () {
|
|
511
|
+
yield* filesystemStore.create(testFile);
|
|
512
|
+
|
|
513
|
+
// Upload file
|
|
514
|
+
yield* filesystemStore.write(
|
|
515
|
+
{
|
|
516
|
+
file_id: testFile.id,
|
|
517
|
+
stream: testData,
|
|
518
|
+
offset: 0,
|
|
519
|
+
},
|
|
520
|
+
{ onProgress: undefined },
|
|
521
|
+
);
|
|
522
|
+
|
|
523
|
+
// Verify file size matches expected
|
|
524
|
+
const fileSize = yield* getFileSize(testDirectory, testFile.id);
|
|
525
|
+
expect(fileSize).toBe(testFile.size); // Should be fully uploaded
|
|
526
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
527
|
+
);
|
|
528
|
+
});
|
|
529
|
+
});
|
|
530
|
+
|
|
531
|
+
describe("File Deletion", () => {
|
|
532
|
+
it("should remove uploaded files", async () => {
|
|
533
|
+
const testFile = createTestUploadFile(
|
|
534
|
+
"delete-test",
|
|
535
|
+
TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
536
|
+
);
|
|
537
|
+
const testData = createTestDataStream(testFile.size ?? 0);
|
|
538
|
+
|
|
539
|
+
await runTestWithTimeout(
|
|
540
|
+
Effect.gen(function* () {
|
|
541
|
+
yield* filesystemStore.create(testFile);
|
|
542
|
+
|
|
543
|
+
// Upload file
|
|
544
|
+
yield* filesystemStore.write(
|
|
545
|
+
{
|
|
546
|
+
file_id: testFile.id,
|
|
547
|
+
stream: testData,
|
|
548
|
+
offset: 0,
|
|
549
|
+
},
|
|
550
|
+
{ onProgress: undefined },
|
|
551
|
+
);
|
|
552
|
+
|
|
553
|
+
// Verify file exists
|
|
554
|
+
yield* assertFileUploaded(
|
|
555
|
+
testDirectory,
|
|
556
|
+
testFile.id,
|
|
557
|
+
testFile.size ?? 0,
|
|
558
|
+
);
|
|
559
|
+
|
|
560
|
+
// Remove file
|
|
561
|
+
yield* filesystemStore.remove(testFile.id);
|
|
562
|
+
|
|
563
|
+
// Verify file is deleted from KV store
|
|
564
|
+
const kvStore = yield* UploadFileKVStore;
|
|
565
|
+
const result = yield* Effect.either(kvStore.get(testFile.id));
|
|
566
|
+
expect(result._tag).toBe("Left");
|
|
567
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
568
|
+
);
|
|
569
|
+
});
|
|
570
|
+
});
|
|
571
|
+
|
|
572
|
+
describe("Standard Test Files", () => {
|
|
573
|
+
it("should handle all standard test file types", async () => {
|
|
574
|
+
const testFiles = createStandardTestFiles();
|
|
575
|
+
|
|
576
|
+
await runTestWithTimeout(
|
|
577
|
+
Effect.gen(function* () {
|
|
578
|
+
for (const testFileData of testFiles) {
|
|
579
|
+
const testFile = createTestUploadFile(
|
|
580
|
+
testFileData.id,
|
|
581
|
+
testFileData.size,
|
|
582
|
+
{
|
|
583
|
+
metadata: testFileData.metadata,
|
|
584
|
+
},
|
|
585
|
+
);
|
|
586
|
+
|
|
587
|
+
yield* filesystemStore.create(testFile);
|
|
588
|
+
|
|
589
|
+
const finalOffset = yield* filesystemStore.write(
|
|
590
|
+
{
|
|
591
|
+
file_id: testFile.id,
|
|
592
|
+
stream: testFileData.stream,
|
|
593
|
+
offset: 0,
|
|
594
|
+
},
|
|
595
|
+
{ onProgress: undefined },
|
|
596
|
+
);
|
|
597
|
+
|
|
598
|
+
expect(finalOffset).toBe(testFile.size);
|
|
599
|
+
|
|
600
|
+
const uploadedData = yield* assertFileUploaded(
|
|
601
|
+
testDirectory,
|
|
602
|
+
testFile.id,
|
|
603
|
+
testFile.size ?? 0,
|
|
604
|
+
);
|
|
605
|
+
expect(compareArrays(uploadedData, testFileData.data)).toBe(true);
|
|
606
|
+
}
|
|
607
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
608
|
+
60000,
|
|
609
|
+
);
|
|
610
|
+
});
|
|
611
|
+
});
|
|
612
|
+
|
|
613
|
+
describe("Error Handling", () => {
|
|
614
|
+
it("should handle file not found errors", async () => {
|
|
615
|
+
await runTestWithTimeout(
|
|
616
|
+
Effect.gen(function* () {
|
|
617
|
+
const result = yield* Effect.either(
|
|
618
|
+
filesystemStore.read("non-existent-file"),
|
|
619
|
+
);
|
|
620
|
+
|
|
621
|
+
expect(result._tag).toBe("Left");
|
|
622
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
623
|
+
);
|
|
624
|
+
});
|
|
625
|
+
|
|
626
|
+
it("should handle remove errors for non-existent files", async () => {
|
|
627
|
+
await runTestWithTimeout(
|
|
628
|
+
Effect.gen(function* () {
|
|
629
|
+
const result = yield* Effect.either(
|
|
630
|
+
filesystemStore.remove("non-existent-file"),
|
|
631
|
+
);
|
|
632
|
+
|
|
633
|
+
expect(result._tag).toBe("Left");
|
|
634
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
635
|
+
);
|
|
636
|
+
});
|
|
637
|
+
});
|
|
638
|
+
|
|
639
|
+
describe("Filesystem-Specific Features", () => {
|
|
640
|
+
it("should list all uploaded files", async () => {
|
|
641
|
+
const file1 = createTestUploadFile("file1", TEST_FILE_SIZES.TINY.size);
|
|
642
|
+
const file2 = createTestUploadFile("file2", TEST_FILE_SIZES.TINY.size);
|
|
643
|
+
const data1 = createTestDataStream(file1.size ?? 0);
|
|
644
|
+
const data2 = createTestDataStream(file2.size ?? 0);
|
|
645
|
+
|
|
646
|
+
await runTestWithTimeout(
|
|
647
|
+
Effect.gen(function* () {
|
|
648
|
+
yield* filesystemStore.create(file1);
|
|
649
|
+
yield* filesystemStore.write(
|
|
650
|
+
{ file_id: file1.id, stream: data1, offset: 0 },
|
|
651
|
+
{ onProgress: undefined },
|
|
652
|
+
);
|
|
653
|
+
|
|
654
|
+
yield* filesystemStore.create(file2);
|
|
655
|
+
yield* filesystemStore.write(
|
|
656
|
+
{ file_id: file2.id, stream: data2, offset: 0 },
|
|
657
|
+
{ onProgress: undefined },
|
|
658
|
+
);
|
|
659
|
+
|
|
660
|
+
const files = yield* listFiles(testDirectory);
|
|
661
|
+
expect(files.length).toBe(2);
|
|
662
|
+
expect(files).toContain("file1");
|
|
663
|
+
expect(files).toContain("file2");
|
|
664
|
+
}).pipe(Effect.provide(TestLayersWithMemoryKV())),
|
|
665
|
+
);
|
|
666
|
+
});
|
|
667
|
+
});
|
|
668
|
+
});
|
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
import { Stream } from "effect";
|
|
2
|
+
|
|
3
|
+
export interface TestFileSize {
|
|
4
|
+
name: string;
|
|
5
|
+
size: number;
|
|
6
|
+
description: string;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export const TEST_FILE_SIZES: Record<string, TestFileSize> = {
|
|
10
|
+
TINY: {
|
|
11
|
+
name: "tiny",
|
|
12
|
+
size: 1024, // 1KB
|
|
13
|
+
description: "Tiny file for edge cases",
|
|
14
|
+
},
|
|
15
|
+
SMALL_BASIC: {
|
|
16
|
+
name: "small-basic",
|
|
17
|
+
size: 1024 * 1024, // 1MB
|
|
18
|
+
description: "Basic small file",
|
|
19
|
+
},
|
|
20
|
+
SMALL_LARGE: {
|
|
21
|
+
name: "small-large",
|
|
22
|
+
size: Math.floor(4.9 * 1024 * 1024), // 4.9MB
|
|
23
|
+
description: "Large small file",
|
|
24
|
+
},
|
|
25
|
+
MEDIUM: {
|
|
26
|
+
name: "medium",
|
|
27
|
+
size: 10 * 1024 * 1024, // 10MB
|
|
28
|
+
description: "Standard medium file",
|
|
29
|
+
},
|
|
30
|
+
MEDIUM_LARGE: {
|
|
31
|
+
name: "medium-large",
|
|
32
|
+
size: 49 * 1024 * 1024, // 49MB
|
|
33
|
+
description: "Large medium file",
|
|
34
|
+
},
|
|
35
|
+
LARGE: {
|
|
36
|
+
name: "large",
|
|
37
|
+
size: 50 * 1024 * 1024, // 50MB
|
|
38
|
+
description: "Standard large file",
|
|
39
|
+
},
|
|
40
|
+
} as const;
|
|
41
|
+
|
|
42
|
+
export interface TestFilePattern {
|
|
43
|
+
type: "random" | "zeros" | "ones" | "pattern" | "text";
|
|
44
|
+
pattern?: Uint8Array;
|
|
45
|
+
seed?: number;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Generate random data with optional seed for reproducibility
|
|
50
|
+
*/
|
|
51
|
+
export function generateRandomData(size: number, seed?: number): Uint8Array {
|
|
52
|
+
const data = new Uint8Array(size);
|
|
53
|
+
|
|
54
|
+
if (seed !== undefined) {
|
|
55
|
+
// Simple LCG for reproducible randomness
|
|
56
|
+
let rng = seed;
|
|
57
|
+
for (let i = 0; i < size; i++) {
|
|
58
|
+
rng = (rng * 1664525 + 1013904223) >>> 0;
|
|
59
|
+
data[i] = (rng >>> 24) & 0xff;
|
|
60
|
+
}
|
|
61
|
+
} else {
|
|
62
|
+
// crypto.getRandomValues has a 65,536 byte limit, so we need to generate in chunks
|
|
63
|
+
const maxChunkSize = 65536;
|
|
64
|
+
let offset = 0;
|
|
65
|
+
|
|
66
|
+
while (offset < size) {
|
|
67
|
+
const chunkSize = Math.min(maxChunkSize, size - offset);
|
|
68
|
+
const chunk = data.subarray(offset, offset + chunkSize);
|
|
69
|
+
crypto.getRandomValues(chunk);
|
|
70
|
+
offset += chunkSize;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return data;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Generate data filled with zeros
|
|
79
|
+
*/
|
|
80
|
+
export function generateZeroData(size: number): Uint8Array {
|
|
81
|
+
return new Uint8Array(size);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Generate data filled with ones
|
|
86
|
+
*/
|
|
87
|
+
export function generateOnesData(size: number): Uint8Array {
|
|
88
|
+
const data = new Uint8Array(size);
|
|
89
|
+
data.fill(0xff);
|
|
90
|
+
return data;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Generate data with repeating pattern
|
|
95
|
+
*/
|
|
96
|
+
export function generatePatternData(
|
|
97
|
+
size: number,
|
|
98
|
+
pattern: Uint8Array,
|
|
99
|
+
): Uint8Array {
|
|
100
|
+
const data = new Uint8Array(size);
|
|
101
|
+
for (let i = 0; i < size; i++) {
|
|
102
|
+
data[i] = pattern[i % pattern.length];
|
|
103
|
+
}
|
|
104
|
+
return data;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Generate text data
|
|
109
|
+
*/
|
|
110
|
+
export function generateTextData(size: number): Uint8Array {
|
|
111
|
+
const text = "The quick brown fox jumps over the lazy dog. ";
|
|
112
|
+
const encoder = new TextEncoder();
|
|
113
|
+
const pattern = encoder.encode(text);
|
|
114
|
+
return generatePatternData(size, pattern);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Generate test data based on pattern specification
|
|
119
|
+
*/
|
|
120
|
+
export function generateData(
|
|
121
|
+
size: number,
|
|
122
|
+
pattern?: TestFilePattern,
|
|
123
|
+
): Uint8Array {
|
|
124
|
+
if (!pattern) {
|
|
125
|
+
return generateRandomData(size);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
switch (pattern.type) {
|
|
129
|
+
case "random":
|
|
130
|
+
return generateRandomData(size, pattern.seed);
|
|
131
|
+
case "zeros":
|
|
132
|
+
return generateZeroData(size);
|
|
133
|
+
case "ones":
|
|
134
|
+
return generateOnesData(size);
|
|
135
|
+
case "pattern":
|
|
136
|
+
return generatePatternData(size, pattern.pattern || new Uint8Array([0]));
|
|
137
|
+
case "text":
|
|
138
|
+
return generateTextData(size);
|
|
139
|
+
default:
|
|
140
|
+
return generateRandomData(size);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* Create an Effect Stream from test data
|
|
146
|
+
*/
|
|
147
|
+
export function createTestDataStream(
|
|
148
|
+
size: number,
|
|
149
|
+
pattern?: TestFilePattern,
|
|
150
|
+
chunkSize = 64 * 1024, // 64KB chunks by default
|
|
151
|
+
): Stream.Stream<Uint8Array, never, never> {
|
|
152
|
+
const data = generateData(size, pattern);
|
|
153
|
+
|
|
154
|
+
return Stream.make(...chunkData(data, chunkSize));
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Split data into chunks
|
|
159
|
+
*/
|
|
160
|
+
function chunkData(data: Uint8Array, chunkSize: number): Uint8Array[] {
|
|
161
|
+
const chunks: Uint8Array[] = [];
|
|
162
|
+
let offset = 0;
|
|
163
|
+
|
|
164
|
+
while (offset < data.byteLength) {
|
|
165
|
+
const size = Math.min(chunkSize, data.byteLength - offset);
|
|
166
|
+
chunks.push(data.subarray(offset, offset + size));
|
|
167
|
+
offset += size;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
return chunks;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Compare two Uint8Arrays
|
|
175
|
+
*/
|
|
176
|
+
export function compareArrays(a: Uint8Array, b: Uint8Array): boolean {
|
|
177
|
+
if (a.byteLength !== b.byteLength) {
|
|
178
|
+
return false;
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
for (let i = 0; i < a.byteLength; i++) {
|
|
182
|
+
if (a[i] !== b[i]) {
|
|
183
|
+
return false;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
return true;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Create standard test files with different patterns
|
|
192
|
+
*/
|
|
193
|
+
export function createStandardTestFiles() {
|
|
194
|
+
return [
|
|
195
|
+
{
|
|
196
|
+
id: "test-zeros",
|
|
197
|
+
size: TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
198
|
+
stream: createTestDataStream(TEST_FILE_SIZES.SMALL_BASIC.size, {
|
|
199
|
+
type: "zeros",
|
|
200
|
+
}),
|
|
201
|
+
data: generateZeroData(TEST_FILE_SIZES.SMALL_BASIC.size),
|
|
202
|
+
metadata: { contentType: "application/octet-stream" },
|
|
203
|
+
},
|
|
204
|
+
{
|
|
205
|
+
id: "test-ones",
|
|
206
|
+
size: TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
207
|
+
stream: createTestDataStream(TEST_FILE_SIZES.SMALL_BASIC.size, {
|
|
208
|
+
type: "ones",
|
|
209
|
+
}),
|
|
210
|
+
data: generateOnesData(TEST_FILE_SIZES.SMALL_BASIC.size),
|
|
211
|
+
metadata: { contentType: "application/octet-stream" },
|
|
212
|
+
},
|
|
213
|
+
{
|
|
214
|
+
id: "test-pattern",
|
|
215
|
+
size: TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
216
|
+
stream: createTestDataStream(TEST_FILE_SIZES.SMALL_BASIC.size, {
|
|
217
|
+
type: "pattern",
|
|
218
|
+
pattern: new Uint8Array([0xaa, 0xbb, 0xcc]),
|
|
219
|
+
}),
|
|
220
|
+
data: generatePatternData(
|
|
221
|
+
TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
222
|
+
new Uint8Array([0xaa, 0xbb, 0xcc]),
|
|
223
|
+
),
|
|
224
|
+
metadata: { contentType: "application/octet-stream" },
|
|
225
|
+
},
|
|
226
|
+
{
|
|
227
|
+
id: "test-text",
|
|
228
|
+
size: TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
229
|
+
stream: createTestDataStream(TEST_FILE_SIZES.SMALL_BASIC.size, {
|
|
230
|
+
type: "text",
|
|
231
|
+
}),
|
|
232
|
+
data: generateTextData(TEST_FILE_SIZES.SMALL_BASIC.size),
|
|
233
|
+
metadata: { contentType: "text/plain" },
|
|
234
|
+
},
|
|
235
|
+
{
|
|
236
|
+
id: "test-random-seeded",
|
|
237
|
+
size: TEST_FILE_SIZES.SMALL_BASIC.size,
|
|
238
|
+
stream: createTestDataStream(TEST_FILE_SIZES.SMALL_BASIC.size, {
|
|
239
|
+
type: "random",
|
|
240
|
+
seed: 42,
|
|
241
|
+
}),
|
|
242
|
+
data: generateRandomData(TEST_FILE_SIZES.SMALL_BASIC.size, 42),
|
|
243
|
+
metadata: { contentType: "application/octet-stream" },
|
|
244
|
+
},
|
|
245
|
+
];
|
|
246
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import type { UploadFile } from "@uploadista/core/types";
|
|
5
|
+
import { uploadFileKvStore } from "@uploadista/core/types";
|
|
6
|
+
import { memoryKvStore } from "@uploadista/kv-store-memory";
|
|
7
|
+
import { Effect, Layer } from "effect";
|
|
8
|
+
import type { FileStoreOptions } from "../../src/file-store";
|
|
9
|
+
|
|
10
|
+
// Test directory configuration
|
|
11
|
+
export const TEST_DELIVERY_URL = "http://localhost:3000/files";
|
|
12
|
+
|
|
13
|
+
// Helper to create a temporary test directory
|
|
14
|
+
export const createTestDirectory = (): Effect.Effect<string, never> =>
|
|
15
|
+
Effect.promise(async () => {
|
|
16
|
+
const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "uploadista-test-"));
|
|
17
|
+
return tmpDir;
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
// Helper to clean up test directory
|
|
21
|
+
export const cleanupTestDirectory = (
|
|
22
|
+
directory: string,
|
|
23
|
+
): Effect.Effect<void, never> =>
|
|
24
|
+
Effect.promise(async () => {
|
|
25
|
+
try {
|
|
26
|
+
await fs.rm(directory, { recursive: true, force: true });
|
|
27
|
+
} catch (error) {
|
|
28
|
+
// Ignore errors during cleanup
|
|
29
|
+
console.warn(`Failed to cleanup test directory ${directory}:`, error);
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// Common filesystem store configuration for tests
|
|
34
|
+
export const createTestFilesystemStoreConfig = (
|
|
35
|
+
directory: string,
|
|
36
|
+
overrides: Partial<FileStoreOptions> = {},
|
|
37
|
+
): FileStoreOptions => ({
|
|
38
|
+
directory,
|
|
39
|
+
deliveryUrl: TEST_DELIVERY_URL,
|
|
40
|
+
...overrides,
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
// Layer that provides KV store for testing
|
|
44
|
+
export const TestLayersWithMemoryKV = () => {
|
|
45
|
+
return uploadFileKvStore.pipe(Layer.provide(memoryKvStore));
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
// Helper to run tests with timeout
|
|
49
|
+
export const runTestWithTimeout = async <E>(
|
|
50
|
+
effect: Effect.Effect<void, E>,
|
|
51
|
+
timeout = 10000,
|
|
52
|
+
) => {
|
|
53
|
+
await Effect.runPromise(Effect.timeout(effect, `${timeout} millis`));
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
// Helper to create test upload file
|
|
57
|
+
export const createTestUploadFile = (
|
|
58
|
+
id: string,
|
|
59
|
+
size: number,
|
|
60
|
+
options: {
|
|
61
|
+
metadata?: Record<string, string | number | boolean>;
|
|
62
|
+
} = {},
|
|
63
|
+
): UploadFile => ({
|
|
64
|
+
id,
|
|
65
|
+
size,
|
|
66
|
+
offset: 0,
|
|
67
|
+
metadata: options.metadata,
|
|
68
|
+
storage: {
|
|
69
|
+
id: "test-storage",
|
|
70
|
+
type: "filesystem",
|
|
71
|
+
path: id,
|
|
72
|
+
bucket: "test-directory",
|
|
73
|
+
},
|
|
74
|
+
sizeIsDeferred: false,
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
// Helper to assert file exists on filesystem
|
|
78
|
+
export const assertFileExists = (
|
|
79
|
+
directory: string,
|
|
80
|
+
fileId: string,
|
|
81
|
+
): Effect.Effect<void, never> =>
|
|
82
|
+
Effect.promise(async () => {
|
|
83
|
+
const filePath = path.join(directory, fileId);
|
|
84
|
+
try {
|
|
85
|
+
await fs.access(filePath);
|
|
86
|
+
} catch (_error) {
|
|
87
|
+
throw new Error(`File ${fileId} not found at ${filePath}`);
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
// Helper to read file from filesystem
|
|
92
|
+
export const readFileFromFilesystem = (
|
|
93
|
+
directory: string,
|
|
94
|
+
fileId: string,
|
|
95
|
+
): Effect.Effect<Uint8Array, never> =>
|
|
96
|
+
Effect.promise(async () => {
|
|
97
|
+
const filePath = path.join(directory, fileId);
|
|
98
|
+
const buffer = await fs.readFile(filePath);
|
|
99
|
+
return new Uint8Array(buffer);
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
// Helper to get file size from filesystem
|
|
103
|
+
export const getFileSize = (
|
|
104
|
+
directory: string,
|
|
105
|
+
fileId: string,
|
|
106
|
+
): Effect.Effect<number, never> =>
|
|
107
|
+
Effect.promise(async () => {
|
|
108
|
+
const filePath = path.join(directory, fileId);
|
|
109
|
+
const stats = await fs.stat(filePath);
|
|
110
|
+
return stats.size;
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
// Helper to list files in directory
|
|
114
|
+
export const listFiles = (directory: string): Effect.Effect<string[], never> =>
|
|
115
|
+
Effect.promise(async () => {
|
|
116
|
+
try {
|
|
117
|
+
const files = await fs.readdir(directory, { recursive: true });
|
|
118
|
+
return files.filter((file) => {
|
|
119
|
+
// Filter out directories
|
|
120
|
+
return !file.endsWith("/");
|
|
121
|
+
});
|
|
122
|
+
} catch (_error) {
|
|
123
|
+
return [];
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
// Helper to assert file was uploaded correctly
|
|
128
|
+
export const assertFileUploaded = (
|
|
129
|
+
directory: string,
|
|
130
|
+
fileId: string,
|
|
131
|
+
expectedSize: number,
|
|
132
|
+
): Effect.Effect<Uint8Array, never> =>
|
|
133
|
+
Effect.gen(function* () {
|
|
134
|
+
yield* assertFileExists(directory, fileId);
|
|
135
|
+
|
|
136
|
+
const size = yield* getFileSize(directory, fileId);
|
|
137
|
+
if (size !== expectedSize) {
|
|
138
|
+
throw new Error(
|
|
139
|
+
`File size mismatch: expected ${expectedSize}, got ${size}`,
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
return yield* readFileFromFilesystem(directory, fileId);
|
|
144
|
+
});
|
package/vitest.config.ts
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { defineConfig } from "vitest/config";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Shared vitest configuration template for uploadista-sdk packages
|
|
5
|
+
*
|
|
6
|
+
* This template should be used by all SDK packages to ensure consistent
|
|
7
|
+
* testing configuration across the monorepo.
|
|
8
|
+
*
|
|
9
|
+
* Key features:
|
|
10
|
+
* - Tests in dedicated `tests/` directories (not colocated with src)
|
|
11
|
+
* - Node environment for server-side code
|
|
12
|
+
* - V8 coverage provider
|
|
13
|
+
* - Global test functions available
|
|
14
|
+
* - Effect testing support via @effect/vitest
|
|
15
|
+
*
|
|
16
|
+
* Usage:
|
|
17
|
+
* Copy this file to your package root as `vitest.config.ts` and customize
|
|
18
|
+
* if needed (though most packages should use this as-is).
|
|
19
|
+
*/
|
|
20
|
+
export default defineConfig({
|
|
21
|
+
test: {
|
|
22
|
+
globals: true,
|
|
23
|
+
environment: "node",
|
|
24
|
+
include: ["tests/**/*.test.ts"],
|
|
25
|
+
exclude: ["node_modules", "dist"],
|
|
26
|
+
coverage: {
|
|
27
|
+
provider: "v8",
|
|
28
|
+
reporter: ["text", "json", "html"],
|
|
29
|
+
exclude: [
|
|
30
|
+
"node_modules/",
|
|
31
|
+
"dist/",
|
|
32
|
+
"**/*.d.ts",
|
|
33
|
+
"**/*.test.ts",
|
|
34
|
+
"**/*.spec.ts",
|
|
35
|
+
"tests/",
|
|
36
|
+
],
|
|
37
|
+
},
|
|
38
|
+
},
|
|
39
|
+
});
|