@uploadista/data-store-s3 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/.turbo/turbo-check.log +5 -0
- package/LICENSE +21 -0
- package/README.md +588 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +1 -0
- package/dist/observability.d.ts +45 -0
- package/dist/observability.d.ts.map +1 -0
- package/dist/observability.js +155 -0
- package/dist/s3-store-old.d.ts +51 -0
- package/dist/s3-store-old.d.ts.map +1 -0
- package/dist/s3-store-old.js +765 -0
- package/dist/s3-store.d.ts +9 -0
- package/dist/s3-store.d.ts.map +1 -0
- package/dist/s3-store.js +666 -0
- package/dist/services/__mocks__/s3-client-mock.service.d.ts +44 -0
- package/dist/services/__mocks__/s3-client-mock.service.d.ts.map +1 -0
- package/dist/services/__mocks__/s3-client-mock.service.js +379 -0
- package/dist/services/index.d.ts +2 -0
- package/dist/services/index.d.ts.map +1 -0
- package/dist/services/index.js +1 -0
- package/dist/services/s3-client.service.d.ts +68 -0
- package/dist/services/s3-client.service.d.ts.map +1 -0
- package/dist/services/s3-client.service.js +209 -0
- package/dist/test-observability.d.ts +6 -0
- package/dist/test-observability.d.ts.map +1 -0
- package/dist/test-observability.js +62 -0
- package/dist/types.d.ts +81 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +1 -0
- package/dist/utils/calculations.d.ts +7 -0
- package/dist/utils/calculations.d.ts.map +1 -0
- package/dist/utils/calculations.js +41 -0
- package/dist/utils/error-handling.d.ts +7 -0
- package/dist/utils/error-handling.d.ts.map +1 -0
- package/dist/utils/error-handling.js +29 -0
- package/dist/utils/index.d.ts +4 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +3 -0
- package/dist/utils/stream-adapter.d.ts +14 -0
- package/dist/utils/stream-adapter.d.ts.map +1 -0
- package/dist/utils/stream-adapter.js +41 -0
- package/package.json +36 -0
- package/src/__tests__/integration/s3-store.integration.test.ts +548 -0
- package/src/__tests__/multipart-logic.test.ts +395 -0
- package/src/__tests__/s3-store.edge-cases.test.ts +681 -0
- package/src/__tests__/s3-store.performance.test.ts +622 -0
- package/src/__tests__/s3-store.test.ts +662 -0
- package/src/__tests__/utils/performance-helpers.ts +459 -0
- package/src/__tests__/utils/test-data-generator.ts +331 -0
- package/src/__tests__/utils/test-setup.ts +256 -0
- package/src/index.ts +1 -0
- package/src/s3-store.ts +1059 -0
- package/src/services/__mocks__/s3-client-mock.service.ts +604 -0
- package/src/services/index.ts +1 -0
- package/src/services/s3-client.service.ts +359 -0
- package/src/types.ts +96 -0
- package/src/utils/calculations.ts +61 -0
- package/src/utils/error-handling.ts +52 -0
- package/src/utils/index.ts +3 -0
- package/src/utils/stream-adapter.ts +50 -0
- package/tsconfig.json +19 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1,604 @@
|
|
|
1
|
+
import type AWS from "@aws-sdk/client-s3";
|
|
2
|
+
import { UploadistaError } from "@uploadista/core/errors";
|
|
3
|
+
import { Effect, Layer, Ref } from "effect";
|
|
4
|
+
import type { S3OperationContext } from "../../types";
|
|
5
|
+
import { S3ClientService } from "../s3-client.service";
|
|
6
|
+
|
|
7
|
+
// Mock configuration for testing scenarios
|
|
8
|
+
export interface MockS3Config {
|
|
9
|
+
simulateLatency?: number; // ms delay for all operations
|
|
10
|
+
errorRate?: number; // 0-1 probability of random errors
|
|
11
|
+
uploadFailureRate?: number; // 0-1 probability of upload failures
|
|
12
|
+
maxObjectSize?: number; // Maximum allowed object size
|
|
13
|
+
enableErrorInjection?: boolean;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// In-memory storage for mock S3
|
|
17
|
+
interface MockStorage {
|
|
18
|
+
objects: Map<string, Uint8Array>;
|
|
19
|
+
multipartUploads: Map<
|
|
20
|
+
string,
|
|
21
|
+
{
|
|
22
|
+
uploadId: string;
|
|
23
|
+
parts: Map<number, { etag: string; data: Uint8Array }>;
|
|
24
|
+
metadata: {
|
|
25
|
+
contentType?: string;
|
|
26
|
+
cacheControl?: string;
|
|
27
|
+
key: string;
|
|
28
|
+
bucket: string;
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
>;
|
|
32
|
+
incompleteParts: Map<string, Uint8Array>;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Mock metrics for testing
|
|
36
|
+
interface MockMetrics {
|
|
37
|
+
operationCounts: Map<string, number>;
|
|
38
|
+
lastOperation?: string;
|
|
39
|
+
totalBytesUploaded: number;
|
|
40
|
+
totalBytesDownloaded: number;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Additional methods for testing that extend the base S3ClientService
|
|
44
|
+
export interface MockS3TestMethods {
|
|
45
|
+
readonly setConfig: (config: Partial<MockS3Config>) => Effect.Effect<void>;
|
|
46
|
+
readonly clearStorage: () => Effect.Effect<void>;
|
|
47
|
+
readonly injectError: (
|
|
48
|
+
operation: string,
|
|
49
|
+
error: Error,
|
|
50
|
+
) => Effect.Effect<void>;
|
|
51
|
+
readonly clearError: (operation: string) => Effect.Effect<void>;
|
|
52
|
+
readonly getMetrics: () => Effect.Effect<MockMetrics>;
|
|
53
|
+
readonly getStorage: () => Effect.Effect<MockStorage>;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export const makeMockS3ClientService = (
|
|
57
|
+
bucket: string,
|
|
58
|
+
initialConfig: MockS3Config = {},
|
|
59
|
+
): Effect.Effect<S3ClientService["Type"] & MockS3TestMethods, never> => {
|
|
60
|
+
return Effect.gen(function* () {
|
|
61
|
+
const storageRef = yield* Ref.make<MockStorage>({
|
|
62
|
+
objects: new Map(),
|
|
63
|
+
multipartUploads: new Map(),
|
|
64
|
+
incompleteParts: new Map(),
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
const metricsRef = yield* Ref.make<MockMetrics>({
|
|
68
|
+
operationCounts: new Map(),
|
|
69
|
+
totalBytesUploaded: 0,
|
|
70
|
+
totalBytesDownloaded: 0,
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
const configRef = yield* Ref.make<MockS3Config>(initialConfig);
|
|
74
|
+
const errorInjectionRef = yield* Ref.make<Map<string, Error>>(new Map());
|
|
75
|
+
|
|
76
|
+
const simulateLatency = () =>
|
|
77
|
+
Effect.gen(function* () {
|
|
78
|
+
const config = yield* Ref.get(configRef);
|
|
79
|
+
if (config.simulateLatency && config.simulateLatency > 0) {
|
|
80
|
+
yield* Effect.sleep(`${config.simulateLatency} millis`);
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
const recordOperation = (operation: string) =>
|
|
85
|
+
Effect.gen(function* () {
|
|
86
|
+
yield* Ref.update(metricsRef, (metrics) => ({
|
|
87
|
+
...metrics,
|
|
88
|
+
lastOperation: operation,
|
|
89
|
+
operationCounts: new Map([
|
|
90
|
+
...metrics.operationCounts,
|
|
91
|
+
[operation, (metrics.operationCounts.get(operation) || 0) + 1],
|
|
92
|
+
]),
|
|
93
|
+
}));
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
const checkForInjectedError = (operation: string) =>
|
|
97
|
+
Effect.gen(function* () {
|
|
98
|
+
const errorMap = yield* Ref.get(errorInjectionRef);
|
|
99
|
+
const error = errorMap.get(operation);
|
|
100
|
+
if (error) {
|
|
101
|
+
// Don't remove the error - let it persist for retries
|
|
102
|
+
// Tests should clear errors explicitly when done
|
|
103
|
+
yield* Effect.fail(
|
|
104
|
+
UploadistaError.fromCode("FILE_WRITE_ERROR", error),
|
|
105
|
+
);
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
const maybeInjectRandomError = (operation: string) =>
|
|
110
|
+
Effect.gen(function* () {
|
|
111
|
+
const config = yield* Ref.get(configRef);
|
|
112
|
+
if (config.errorRate && Math.random() < config.errorRate) {
|
|
113
|
+
yield* Effect.fail(
|
|
114
|
+
UploadistaError.fromCode(
|
|
115
|
+
"FILE_WRITE_ERROR",
|
|
116
|
+
new Error(`Random error in ${operation}`),
|
|
117
|
+
),
|
|
118
|
+
);
|
|
119
|
+
}
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
const generateETag = (data: Uint8Array): string => {
|
|
123
|
+
// Simple hash for ETag simulation
|
|
124
|
+
let hash = 0;
|
|
125
|
+
for (let i = 0; i < data.length; i++) {
|
|
126
|
+
hash = ((hash << 5) - hash + data[i]) & 0xffffffff;
|
|
127
|
+
}
|
|
128
|
+
return `"${Math.abs(hash).toString(16)}"`;
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
const generateUploadId = (): string => {
|
|
132
|
+
return `upload-${Date.now()}-${Math.random().toString(36).substring(2)}`;
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
// Implementation of service methods
|
|
136
|
+
const setConfig = (config: Partial<MockS3Config>) =>
|
|
137
|
+
Ref.update(configRef, (current) => ({ ...current, ...config }));
|
|
138
|
+
|
|
139
|
+
const clearStorage = () =>
|
|
140
|
+
Effect.gen(function* () {
|
|
141
|
+
yield* Ref.set(storageRef, {
|
|
142
|
+
objects: new Map(),
|
|
143
|
+
multipartUploads: new Map(),
|
|
144
|
+
incompleteParts: new Map(),
|
|
145
|
+
});
|
|
146
|
+
// Also clear injected errors
|
|
147
|
+
yield* Ref.set(errorInjectionRef, new Map());
|
|
148
|
+
// Reset metrics
|
|
149
|
+
yield* Ref.set(metricsRef, {
|
|
150
|
+
operationCounts: new Map(),
|
|
151
|
+
totalBytesUploaded: 0,
|
|
152
|
+
totalBytesDownloaded: 0,
|
|
153
|
+
});
|
|
154
|
+
});
|
|
155
|
+
|
|
156
|
+
const injectError = (operation: string, error: Error) =>
|
|
157
|
+
Ref.update(
|
|
158
|
+
errorInjectionRef,
|
|
159
|
+
(map) => new Map([...map, [operation, error]]),
|
|
160
|
+
);
|
|
161
|
+
|
|
162
|
+
const clearError = (operation: string) =>
|
|
163
|
+
Ref.update(errorInjectionRef, (map) => {
|
|
164
|
+
const newMap = new Map(map);
|
|
165
|
+
newMap.delete(operation);
|
|
166
|
+
return newMap;
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
const getMetrics = () => Ref.get(metricsRef);
|
|
170
|
+
|
|
171
|
+
const getObject = (key: string) =>
|
|
172
|
+
Effect.gen(function* () {
|
|
173
|
+
yield* simulateLatency();
|
|
174
|
+
yield* recordOperation("getObject");
|
|
175
|
+
yield* checkForInjectedError("getObject");
|
|
176
|
+
yield* maybeInjectRandomError("getObject");
|
|
177
|
+
|
|
178
|
+
const storage = yield* Ref.get(storageRef);
|
|
179
|
+
const data = storage.objects.get(key);
|
|
180
|
+
|
|
181
|
+
if (!data) {
|
|
182
|
+
yield* Effect.fail(
|
|
183
|
+
UploadistaError.fromCode(
|
|
184
|
+
"FILE_NOT_FOUND",
|
|
185
|
+
new Error(`Object not found: ${key}`),
|
|
186
|
+
),
|
|
187
|
+
);
|
|
188
|
+
return new ReadableStream(); // Never reached but helps TypeScript
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
yield* Ref.update(metricsRef, (metrics) => ({
|
|
192
|
+
...metrics,
|
|
193
|
+
totalBytesDownloaded: metrics.totalBytesDownloaded + data.length,
|
|
194
|
+
}));
|
|
195
|
+
|
|
196
|
+
// Convert Uint8Array to ReadableStream
|
|
197
|
+
return new ReadableStream({
|
|
198
|
+
start(controller) {
|
|
199
|
+
controller.enqueue(data);
|
|
200
|
+
controller.close();
|
|
201
|
+
},
|
|
202
|
+
});
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
const headObject = (key: string) =>
|
|
206
|
+
Effect.gen(function* () {
|
|
207
|
+
yield* simulateLatency();
|
|
208
|
+
yield* recordOperation("headObject");
|
|
209
|
+
yield* checkForInjectedError("headObject");
|
|
210
|
+
|
|
211
|
+
const storage = yield* Ref.get(storageRef);
|
|
212
|
+
const data = storage.objects.get(key);
|
|
213
|
+
return data?.length;
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
const putObject = (key: string, body: Uint8Array) =>
|
|
217
|
+
Effect.gen(function* () {
|
|
218
|
+
yield* simulateLatency();
|
|
219
|
+
yield* recordOperation("putObject");
|
|
220
|
+
yield* checkForInjectedError("putObject");
|
|
221
|
+
yield* maybeInjectRandomError("putObject");
|
|
222
|
+
|
|
223
|
+
const config = yield* Ref.get(configRef);
|
|
224
|
+
if (config.maxObjectSize && body.length > config.maxObjectSize) {
|
|
225
|
+
yield* Effect.fail(
|
|
226
|
+
UploadistaError.fromCode(
|
|
227
|
+
"FILE_WRITE_ERROR",
|
|
228
|
+
new Error(
|
|
229
|
+
`Object size ${body.length} exceeds maximum ${config.maxObjectSize}`,
|
|
230
|
+
),
|
|
231
|
+
),
|
|
232
|
+
);
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
yield* Ref.update(storageRef, (storage) => ({
|
|
236
|
+
...storage,
|
|
237
|
+
objects: new Map([...storage.objects, [key, body]]),
|
|
238
|
+
}));
|
|
239
|
+
|
|
240
|
+
yield* Ref.update(metricsRef, (metrics) => ({
|
|
241
|
+
...metrics,
|
|
242
|
+
totalBytesUploaded: metrics.totalBytesUploaded + body.length,
|
|
243
|
+
}));
|
|
244
|
+
|
|
245
|
+
return generateETag(body);
|
|
246
|
+
});
|
|
247
|
+
|
|
248
|
+
const deleteObject = (key: string) =>
|
|
249
|
+
Effect.gen(function* () {
|
|
250
|
+
yield* simulateLatency();
|
|
251
|
+
yield* recordOperation("deleteObject");
|
|
252
|
+
yield* checkForInjectedError("deleteObject");
|
|
253
|
+
|
|
254
|
+
yield* Ref.update(storageRef, (storage) => {
|
|
255
|
+
const newObjects = new Map(storage.objects);
|
|
256
|
+
newObjects.delete(key);
|
|
257
|
+
return { ...storage, objects: newObjects };
|
|
258
|
+
});
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
const deleteObjects = (keys: string[]) =>
|
|
262
|
+
Effect.gen(function* () {
|
|
263
|
+
yield* simulateLatency();
|
|
264
|
+
yield* recordOperation("deleteObjects");
|
|
265
|
+
yield* checkForInjectedError("deleteObjects");
|
|
266
|
+
|
|
267
|
+
yield* Ref.update(storageRef, (storage) => {
|
|
268
|
+
const newObjects = new Map(storage.objects);
|
|
269
|
+
for (const key of keys) {
|
|
270
|
+
newObjects.delete(key);
|
|
271
|
+
}
|
|
272
|
+
return { ...storage, objects: newObjects };
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
return {
|
|
276
|
+
$metadata: {},
|
|
277
|
+
Deleted: keys.map((key) => ({ Key: key })),
|
|
278
|
+
Errors: [],
|
|
279
|
+
} as AWS.DeleteObjectsCommandOutput;
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
const createMultipartUpload = (context: S3OperationContext) =>
|
|
283
|
+
Effect.gen(function* () {
|
|
284
|
+
yield* simulateLatency();
|
|
285
|
+
yield* recordOperation("createMultipartUpload");
|
|
286
|
+
yield* checkForInjectedError("createMultipartUpload");
|
|
287
|
+
|
|
288
|
+
const uploadId = generateUploadId();
|
|
289
|
+
|
|
290
|
+
yield* Ref.update(storageRef, (storage) => ({
|
|
291
|
+
...storage,
|
|
292
|
+
multipartUploads: new Map([
|
|
293
|
+
...storage.multipartUploads,
|
|
294
|
+
[
|
|
295
|
+
uploadId,
|
|
296
|
+
{
|
|
297
|
+
uploadId,
|
|
298
|
+
parts: new Map(),
|
|
299
|
+
metadata: {
|
|
300
|
+
contentType: context.contentType,
|
|
301
|
+
cacheControl: context.cacheControl,
|
|
302
|
+
key: context.key,
|
|
303
|
+
bucket: context.bucket,
|
|
304
|
+
},
|
|
305
|
+
},
|
|
306
|
+
],
|
|
307
|
+
]),
|
|
308
|
+
}));
|
|
309
|
+
|
|
310
|
+
return {
|
|
311
|
+
uploadId,
|
|
312
|
+
bucket: context.bucket,
|
|
313
|
+
key: context.key,
|
|
314
|
+
};
|
|
315
|
+
});
|
|
316
|
+
|
|
317
|
+
const uploadPart = (
|
|
318
|
+
context: S3OperationContext & { partNumber: number; data: Uint8Array },
|
|
319
|
+
) =>
|
|
320
|
+
Effect.gen(function* () {
|
|
321
|
+
yield* simulateLatency();
|
|
322
|
+
yield* recordOperation("uploadPart");
|
|
323
|
+
yield* checkForInjectedError("uploadPart");
|
|
324
|
+
|
|
325
|
+
const config = yield* Ref.get(configRef);
|
|
326
|
+
if (
|
|
327
|
+
config.uploadFailureRate &&
|
|
328
|
+
Math.random() < config.uploadFailureRate
|
|
329
|
+
) {
|
|
330
|
+
yield* Effect.fail(
|
|
331
|
+
UploadistaError.fromCode(
|
|
332
|
+
"FILE_WRITE_ERROR",
|
|
333
|
+
new Error(`Upload failed for part ${context.partNumber}`),
|
|
334
|
+
),
|
|
335
|
+
);
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
const storage = yield* Ref.get(storageRef);
|
|
339
|
+
const upload = storage.multipartUploads.get(context.uploadId);
|
|
340
|
+
|
|
341
|
+
if (!upload) {
|
|
342
|
+
return yield* Effect.fail(
|
|
343
|
+
UploadistaError.fromCode(
|
|
344
|
+
"FILE_NOT_FOUND",
|
|
345
|
+
new Error(`Upload not found: ${context.uploadId}`),
|
|
346
|
+
),
|
|
347
|
+
);
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
const etag = generateETag(context.data);
|
|
351
|
+
upload.parts.set(context.partNumber, {
|
|
352
|
+
etag,
|
|
353
|
+
data: context.data,
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
yield* Ref.update(metricsRef, (metrics) => ({
|
|
357
|
+
...metrics,
|
|
358
|
+
totalBytesUploaded: metrics.totalBytesUploaded + context.data.length,
|
|
359
|
+
}));
|
|
360
|
+
|
|
361
|
+
return etag as string;
|
|
362
|
+
});
|
|
363
|
+
|
|
364
|
+
const completeMultipartUpload = (
|
|
365
|
+
context: S3OperationContext,
|
|
366
|
+
parts: Array<AWS.Part>,
|
|
367
|
+
) =>
|
|
368
|
+
Effect.gen(function* () {
|
|
369
|
+
yield* simulateLatency();
|
|
370
|
+
yield* recordOperation("completeMultipartUpload");
|
|
371
|
+
yield* checkForInjectedError("completeMultipartUpload");
|
|
372
|
+
|
|
373
|
+
const storage = yield* Ref.get(storageRef);
|
|
374
|
+
const upload = storage.multipartUploads.get(context.uploadId);
|
|
375
|
+
|
|
376
|
+
if (!upload) {
|
|
377
|
+
yield* Effect.fail(
|
|
378
|
+
UploadistaError.fromCode(
|
|
379
|
+
"FILE_NOT_FOUND",
|
|
380
|
+
new Error(`Upload not found: ${context.uploadId}`),
|
|
381
|
+
),
|
|
382
|
+
);
|
|
383
|
+
return; // This will never execute but helps TypeScript
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
// Validate all parts are present
|
|
387
|
+
for (const part of parts) {
|
|
388
|
+
if (!part.PartNumber || !upload.parts.has(part.PartNumber)) {
|
|
389
|
+
yield* Effect.fail(
|
|
390
|
+
UploadistaError.fromCode(
|
|
391
|
+
"FILE_WRITE_ERROR",
|
|
392
|
+
new Error(`Part ${part.PartNumber} not found`),
|
|
393
|
+
),
|
|
394
|
+
);
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
// Combine all parts into final object
|
|
399
|
+
const sortedParts = parts
|
|
400
|
+
.sort((a, b) => (a.PartNumber || 0) - (b.PartNumber || 0))
|
|
401
|
+
.map((part) => {
|
|
402
|
+
const partData = upload.parts.get(part.PartNumber || 0);
|
|
403
|
+
if (!partData) throw new Error(`Part ${part.PartNumber} not found`);
|
|
404
|
+
return partData.data;
|
|
405
|
+
});
|
|
406
|
+
|
|
407
|
+
const totalLength = sortedParts.reduce(
|
|
408
|
+
(sum, part) => sum + part.length,
|
|
409
|
+
0,
|
|
410
|
+
);
|
|
411
|
+
const combinedData = new Uint8Array(totalLength);
|
|
412
|
+
let offset = 0;
|
|
413
|
+
|
|
414
|
+
for (const part of sortedParts) {
|
|
415
|
+
combinedData.set(part, offset);
|
|
416
|
+
offset += part.length;
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
// Store the final object
|
|
420
|
+
yield* Ref.update(storageRef, (storage) => ({
|
|
421
|
+
...storage,
|
|
422
|
+
objects: new Map([...storage.objects, [context.key, combinedData]]),
|
|
423
|
+
multipartUploads: new Map(
|
|
424
|
+
[...storage.multipartUploads].filter(
|
|
425
|
+
([id]) => id !== context.uploadId,
|
|
426
|
+
),
|
|
427
|
+
),
|
|
428
|
+
}));
|
|
429
|
+
|
|
430
|
+
return `https://${context.bucket}.s3.amazonaws.com/${context.key}`;
|
|
431
|
+
});
|
|
432
|
+
|
|
433
|
+
const abortMultipartUpload = (context: S3OperationContext) =>
|
|
434
|
+
Effect.gen(function* () {
|
|
435
|
+
yield* simulateLatency();
|
|
436
|
+
yield* recordOperation("abortMultipartUpload");
|
|
437
|
+
yield* checkForInjectedError("abortMultipartUpload");
|
|
438
|
+
|
|
439
|
+
yield* Ref.update(storageRef, (storage) => ({
|
|
440
|
+
...storage,
|
|
441
|
+
multipartUploads: new Map(
|
|
442
|
+
[...storage.multipartUploads].filter(
|
|
443
|
+
([id]) => id !== context.uploadId,
|
|
444
|
+
),
|
|
445
|
+
),
|
|
446
|
+
}));
|
|
447
|
+
});
|
|
448
|
+
|
|
449
|
+
const listParts = (
|
|
450
|
+
context: S3OperationContext & { partNumberMarker?: string },
|
|
451
|
+
) =>
|
|
452
|
+
Effect.gen(function* () {
|
|
453
|
+
yield* simulateLatency();
|
|
454
|
+
yield* recordOperation("listParts");
|
|
455
|
+
yield* checkForInjectedError("listParts");
|
|
456
|
+
|
|
457
|
+
const storage = yield* Ref.get(storageRef);
|
|
458
|
+
const upload = storage.multipartUploads.get(context.uploadId);
|
|
459
|
+
|
|
460
|
+
if (!upload) {
|
|
461
|
+
return yield* Effect.fail(
|
|
462
|
+
UploadistaError.fromCode(
|
|
463
|
+
"FILE_NOT_FOUND",
|
|
464
|
+
new Error(`Upload not found: ${context.uploadId}`),
|
|
465
|
+
),
|
|
466
|
+
);
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
const parts: AWS.Part[] = Array.from(upload.parts.entries())
|
|
470
|
+
.map(([partNumber, part]) => ({
|
|
471
|
+
PartNumber: partNumber,
|
|
472
|
+
ETag: part.etag,
|
|
473
|
+
Size: part.data.length,
|
|
474
|
+
}))
|
|
475
|
+
.sort((a, b) => (a.PartNumber || 0) - (b.PartNumber || 0));
|
|
476
|
+
|
|
477
|
+
return {
|
|
478
|
+
parts,
|
|
479
|
+
isTruncated: false,
|
|
480
|
+
nextPartNumberMarker: undefined,
|
|
481
|
+
};
|
|
482
|
+
});
|
|
483
|
+
|
|
484
|
+
const listMultipartUploads = (
|
|
485
|
+
_keyMarker?: string,
|
|
486
|
+
_uploadIdMarker?: string,
|
|
487
|
+
) =>
|
|
488
|
+
Effect.gen(function* () {
|
|
489
|
+
yield* simulateLatency();
|
|
490
|
+
yield* recordOperation("listMultipartUploads");
|
|
491
|
+
yield* checkForInjectedError("listMultipartUploads");
|
|
492
|
+
|
|
493
|
+
const storage = yield* Ref.get(storageRef);
|
|
494
|
+
const uploads = Array.from(storage.multipartUploads.values()).map(
|
|
495
|
+
(upload) => ({
|
|
496
|
+
Key: upload.metadata.key,
|
|
497
|
+
UploadId: upload.uploadId,
|
|
498
|
+
Initiated: new Date(),
|
|
499
|
+
}),
|
|
500
|
+
);
|
|
501
|
+
|
|
502
|
+
return {
|
|
503
|
+
Uploads: uploads,
|
|
504
|
+
IsTruncated: false,
|
|
505
|
+
} as AWS.ListMultipartUploadsCommandOutput;
|
|
506
|
+
});
|
|
507
|
+
|
|
508
|
+
const getIncompletePart = (id: string) =>
|
|
509
|
+
Effect.gen(function* () {
|
|
510
|
+
yield* simulateLatency();
|
|
511
|
+
yield* recordOperation("getIncompletePart");
|
|
512
|
+
|
|
513
|
+
const storage = yield* Ref.get(storageRef);
|
|
514
|
+
const data = storage.incompleteParts.get(`${id}.part`);
|
|
515
|
+
|
|
516
|
+
if (!data) {
|
|
517
|
+
return undefined;
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
return new ReadableStream({
|
|
521
|
+
start(controller) {
|
|
522
|
+
controller.enqueue(data);
|
|
523
|
+
controller.close();
|
|
524
|
+
},
|
|
525
|
+
});
|
|
526
|
+
});
|
|
527
|
+
|
|
528
|
+
const getIncompletePartSize = (id: string) =>
|
|
529
|
+
Effect.gen(function* () {
|
|
530
|
+
yield* simulateLatency();
|
|
531
|
+
yield* recordOperation("getIncompletePartSize");
|
|
532
|
+
|
|
533
|
+
const storage = yield* Ref.get(storageRef);
|
|
534
|
+
const data = storage.incompleteParts.get(`${id}.part`);
|
|
535
|
+
return data?.length;
|
|
536
|
+
});
|
|
537
|
+
|
|
538
|
+
const putIncompletePart = (id: string, data: Uint8Array) =>
|
|
539
|
+
Effect.gen(function* () {
|
|
540
|
+
yield* simulateLatency();
|
|
541
|
+
yield* recordOperation("putIncompletePart");
|
|
542
|
+
yield* checkForInjectedError("putIncompletePart");
|
|
543
|
+
|
|
544
|
+
yield* Ref.update(storageRef, (storage) => ({
|
|
545
|
+
...storage,
|
|
546
|
+
incompleteParts: new Map([
|
|
547
|
+
...storage.incompleteParts,
|
|
548
|
+
[`${id}.part`, data],
|
|
549
|
+
]),
|
|
550
|
+
}));
|
|
551
|
+
|
|
552
|
+
yield* Ref.update(metricsRef, (metrics) => ({
|
|
553
|
+
...metrics,
|
|
554
|
+
totalBytesUploaded: metrics.totalBytesUploaded + data.length,
|
|
555
|
+
}));
|
|
556
|
+
|
|
557
|
+
return generateETag(data);
|
|
558
|
+
});
|
|
559
|
+
|
|
560
|
+
const deleteIncompletePart = (id: string) =>
|
|
561
|
+
Effect.gen(function* () {
|
|
562
|
+
yield* simulateLatency();
|
|
563
|
+
yield* recordOperation("deleteIncompletePart");
|
|
564
|
+
|
|
565
|
+
yield* Ref.update(storageRef, (storage) => {
|
|
566
|
+
const newIncompleteParts = new Map(storage.incompleteParts);
|
|
567
|
+
newIncompleteParts.delete(`${id}.part`);
|
|
568
|
+
return { ...storage, incompleteParts: newIncompleteParts };
|
|
569
|
+
});
|
|
570
|
+
});
|
|
571
|
+
|
|
572
|
+
const getStorage = () => Ref.get(storageRef);
|
|
573
|
+
|
|
574
|
+
return {
|
|
575
|
+
bucket,
|
|
576
|
+
// S3ClientService methods
|
|
577
|
+
getObject,
|
|
578
|
+
headObject,
|
|
579
|
+
putObject,
|
|
580
|
+
deleteObject,
|
|
581
|
+
deleteObjects,
|
|
582
|
+
createMultipartUpload,
|
|
583
|
+
uploadPart,
|
|
584
|
+
completeMultipartUpload,
|
|
585
|
+
abortMultipartUpload,
|
|
586
|
+
listParts,
|
|
587
|
+
listMultipartUploads,
|
|
588
|
+
getIncompletePart,
|
|
589
|
+
getIncompletePartSize,
|
|
590
|
+
putIncompletePart,
|
|
591
|
+
deleteIncompletePart,
|
|
592
|
+
// Mock-specific test methods
|
|
593
|
+
setConfig,
|
|
594
|
+
clearStorage,
|
|
595
|
+
injectError,
|
|
596
|
+
clearError,
|
|
597
|
+
getMetrics,
|
|
598
|
+
getStorage,
|
|
599
|
+
};
|
|
600
|
+
});
|
|
601
|
+
};
|
|
602
|
+
|
|
603
|
+
export const MockS3ClientLayer = (bucket: string, config?: MockS3Config) =>
|
|
604
|
+
Layer.effect(S3ClientService, makeMockS3ClientService(bucket, config));
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from "./s3-client.service";
|