@uploadista/data-store-gcs 0.0.13-beta.4 → 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@uploadista/data-store-gcs",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0.13
|
|
4
|
+
"version": "0.0.13",
|
|
5
5
|
"description": "Google Cloud Storage data store for Uploadista",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"author": "Uploadista",
|
|
@@ -15,19 +15,25 @@
|
|
|
15
15
|
},
|
|
16
16
|
"dependencies": {
|
|
17
17
|
"@google-cloud/storage": "7.17.3",
|
|
18
|
-
"effect": "3.19.
|
|
19
|
-
"@uploadista/core": "0.0.13
|
|
20
|
-
"@uploadista/observability": "0.0.13
|
|
18
|
+
"effect": "3.19.3",
|
|
19
|
+
"@uploadista/core": "0.0.13",
|
|
20
|
+
"@uploadista/observability": "0.0.13"
|
|
21
21
|
},
|
|
22
22
|
"devDependencies": {
|
|
23
|
-
"
|
|
24
|
-
"
|
|
23
|
+
"@effect/vitest": "0.27.0",
|
|
24
|
+
"tsdown": "0.16.3",
|
|
25
|
+
"vitest": "4.0.8",
|
|
26
|
+
"@uploadista/kv-store-memory": "0.0.13",
|
|
27
|
+
"@uploadista/typescript-config": "0.0.13"
|
|
25
28
|
},
|
|
26
29
|
"scripts": {
|
|
27
30
|
"build": "tsdown",
|
|
31
|
+
"check": "biome check --write ./src",
|
|
28
32
|
"format": "biome format --write ./src",
|
|
29
33
|
"lint": "biome lint --write ./src",
|
|
30
|
-
"
|
|
34
|
+
"test": "vitest",
|
|
35
|
+
"test:run": "vitest run",
|
|
36
|
+
"test:watch": "vitest --watch",
|
|
31
37
|
"typecheck": "tsc --noEmit"
|
|
32
38
|
}
|
|
33
39
|
}
|
|
@@ -0,0 +1,567 @@
|
|
|
1
|
+
import { UploadistaError } from "@uploadista/core/errors";
|
|
2
|
+
import { Effect, Layer } from "effect";
|
|
3
|
+
import type {
|
|
4
|
+
GCSClient,
|
|
5
|
+
GCSObjectMetadata,
|
|
6
|
+
GCSOperationContext,
|
|
7
|
+
} from "../gcs-client.service";
|
|
8
|
+
import { GCSClientService } from "../gcs-client.service";
|
|
9
|
+
|
|
10
|
+
export interface MockGCSConfig {
|
|
11
|
+
simulateLatency: number;
|
|
12
|
+
errorRate: number;
|
|
13
|
+
uploadFailureRate: number;
|
|
14
|
+
enableErrorInjection: boolean;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
// Storage types
|
|
18
|
+
export interface MockGCSObject {
|
|
19
|
+
key: string;
|
|
20
|
+
data: Uint8Array;
|
|
21
|
+
metadata: Partial<GCSObjectMetadata>;
|
|
22
|
+
contentType?: string;
|
|
23
|
+
createdAt: Date;
|
|
24
|
+
updatedAt: Date;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export interface MockGCSStorage {
|
|
28
|
+
objects: Map<string, MockGCSObject>;
|
|
29
|
+
resumableUploads: Map<
|
|
30
|
+
string,
|
|
31
|
+
{ context: GCSOperationContext; data: Uint8Array[] }
|
|
32
|
+
>;
|
|
33
|
+
operationCounts: Map<string, number>;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export interface MockGCSTestMethods {
|
|
37
|
+
// Test-only methods to inspect internal state
|
|
38
|
+
getStorage: () => Effect.Effect<MockGCSStorage, never>;
|
|
39
|
+
getMetrics: () => Effect.Effect<
|
|
40
|
+
{
|
|
41
|
+
operationCounts: Map<string, number>;
|
|
42
|
+
totalObjects: number;
|
|
43
|
+
totalBytes: number;
|
|
44
|
+
},
|
|
45
|
+
never
|
|
46
|
+
>;
|
|
47
|
+
clearStorage: () => Effect.Effect<void, never>;
|
|
48
|
+
injectError: (operation: string, error: Error) => Effect.Effect<void, never>;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Create a mock GCS client that simulates GCS operations in memory
|
|
53
|
+
*/
|
|
54
|
+
export function createMockGCSClient(
|
|
55
|
+
bucket: string,
|
|
56
|
+
config: MockGCSConfig,
|
|
57
|
+
): GCSClient & MockGCSTestMethods {
|
|
58
|
+
const storage: MockGCSStorage = {
|
|
59
|
+
objects: new Map(),
|
|
60
|
+
resumableUploads: new Map(),
|
|
61
|
+
operationCounts: new Map(),
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
const injectedErrors = new Map<string, Error>();
|
|
65
|
+
|
|
66
|
+
// Helper: simulate latency
|
|
67
|
+
const simulateLatency = () =>
|
|
68
|
+
config.simulateLatency > 0
|
|
69
|
+
? Effect.sleep(`${config.simulateLatency} millis`)
|
|
70
|
+
: Effect.void;
|
|
71
|
+
|
|
72
|
+
// Helper: check for injected errors
|
|
73
|
+
const checkInjectedError = (operation: string) =>
|
|
74
|
+
Effect.gen(function* () {
|
|
75
|
+
const error = injectedErrors.get(operation);
|
|
76
|
+
if (error) {
|
|
77
|
+
injectedErrors.delete(operation);
|
|
78
|
+
return yield* Effect.fail(
|
|
79
|
+
UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error }),
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
// Helper: simulate random errors
|
|
85
|
+
const maybeInjectError = (operation: string) =>
|
|
86
|
+
Effect.gen(function* () {
|
|
87
|
+
if (config.enableErrorInjection) {
|
|
88
|
+
const errorRate =
|
|
89
|
+
operation.includes("upload") || operation.includes("put")
|
|
90
|
+
? config.uploadFailureRate
|
|
91
|
+
: config.errorRate;
|
|
92
|
+
|
|
93
|
+
if (Math.random() < errorRate) {
|
|
94
|
+
return yield* Effect.fail(
|
|
95
|
+
UploadistaError.fromCode("FILE_WRITE_ERROR", {
|
|
96
|
+
cause: new Error(`Simulated ${operation} failure`),
|
|
97
|
+
}),
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
// Helper: track operation
|
|
104
|
+
const trackOperation = (operation: string) =>
|
|
105
|
+
Effect.sync(() => {
|
|
106
|
+
const count = storage.operationCounts.get(operation) || 0;
|
|
107
|
+
storage.operationCounts.set(operation, count + 1);
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
// Helper: convert ReadableStream to Uint8Array
|
|
111
|
+
const streamToUint8Array = (
|
|
112
|
+
stream: ReadableStream<Uint8Array>,
|
|
113
|
+
): Effect.Effect<Uint8Array, UploadistaError> =>
|
|
114
|
+
Effect.gen(function* () {
|
|
115
|
+
const reader = stream.getReader();
|
|
116
|
+
const chunks: Uint8Array[] = [];
|
|
117
|
+
let totalLength = 0;
|
|
118
|
+
|
|
119
|
+
while (true) {
|
|
120
|
+
const { done, value } = yield* Effect.promise(() => reader.read());
|
|
121
|
+
if (done) break;
|
|
122
|
+
chunks.push(value);
|
|
123
|
+
totalLength += value.byteLength;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
const result = new Uint8Array(totalLength);
|
|
127
|
+
let offset = 0;
|
|
128
|
+
for (const chunk of chunks) {
|
|
129
|
+
result.set(chunk, offset);
|
|
130
|
+
offset += chunk.byteLength;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return result;
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
return {
|
|
137
|
+
bucket,
|
|
138
|
+
|
|
139
|
+
// Basic operations
|
|
140
|
+
getObject: (key: string) =>
|
|
141
|
+
Effect.gen(function* () {
|
|
142
|
+
yield* simulateLatency();
|
|
143
|
+
yield* trackOperation("getObject");
|
|
144
|
+
yield* checkInjectedError("getObject");
|
|
145
|
+
yield* maybeInjectError("getObject");
|
|
146
|
+
|
|
147
|
+
const obj = storage.objects.get(key);
|
|
148
|
+
if (!obj) {
|
|
149
|
+
return yield* Effect.fail(UploadistaError.fromCode("FILE_NOT_FOUND"));
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Convert Uint8Array to ReadableStream
|
|
153
|
+
const stream = new ReadableStream({
|
|
154
|
+
start(controller) {
|
|
155
|
+
controller.enqueue(obj.data);
|
|
156
|
+
controller.close();
|
|
157
|
+
},
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
return stream;
|
|
161
|
+
}),
|
|
162
|
+
|
|
163
|
+
getObjectMetadata: (key: string) =>
|
|
164
|
+
Effect.gen(function* () {
|
|
165
|
+
yield* simulateLatency();
|
|
166
|
+
yield* trackOperation("getObjectMetadata");
|
|
167
|
+
yield* checkInjectedError("getObjectMetadata");
|
|
168
|
+
yield* maybeInjectError("getObjectMetadata");
|
|
169
|
+
|
|
170
|
+
const obj = storage.objects.get(key);
|
|
171
|
+
if (!obj) {
|
|
172
|
+
return yield* Effect.fail(UploadistaError.fromCode("FILE_NOT_FOUND"));
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
return {
|
|
176
|
+
name: key,
|
|
177
|
+
bucket,
|
|
178
|
+
size: obj.data.byteLength,
|
|
179
|
+
contentType: obj.contentType,
|
|
180
|
+
metadata: obj.metadata.metadata as Record<string, string | null>,
|
|
181
|
+
generation: "1",
|
|
182
|
+
timeCreated: obj.createdAt.toISOString(),
|
|
183
|
+
updated: obj.updatedAt.toISOString(),
|
|
184
|
+
};
|
|
185
|
+
}),
|
|
186
|
+
|
|
187
|
+
getObjectBuffer: (key: string) =>
|
|
188
|
+
Effect.gen(function* () {
|
|
189
|
+
yield* simulateLatency();
|
|
190
|
+
yield* trackOperation("getObjectBuffer");
|
|
191
|
+
yield* checkInjectedError("getObjectBuffer");
|
|
192
|
+
yield* maybeInjectError("getObjectBuffer");
|
|
193
|
+
|
|
194
|
+
const obj = storage.objects.get(key);
|
|
195
|
+
if (!obj) {
|
|
196
|
+
return yield* Effect.fail(UploadistaError.fromCode("FILE_NOT_FOUND"));
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
return obj.data;
|
|
200
|
+
}),
|
|
201
|
+
|
|
202
|
+
objectExists: (key: string) =>
|
|
203
|
+
Effect.gen(function* () {
|
|
204
|
+
yield* simulateLatency();
|
|
205
|
+
yield* trackOperation("objectExists");
|
|
206
|
+
|
|
207
|
+
return storage.objects.has(key);
|
|
208
|
+
}),
|
|
209
|
+
|
|
210
|
+
putObject: (key: string, body: Uint8Array, context?: Partial<GCSOperationContext>) =>
|
|
211
|
+
Effect.gen(function* () {
|
|
212
|
+
yield* simulateLatency();
|
|
213
|
+
yield* trackOperation("putObject");
|
|
214
|
+
yield* checkInjectedError("putObject");
|
|
215
|
+
yield* maybeInjectError("putObject");
|
|
216
|
+
|
|
217
|
+
storage.objects.set(key, {
|
|
218
|
+
key,
|
|
219
|
+
data: body,
|
|
220
|
+
metadata: {
|
|
221
|
+
metadata: context?.metadata,
|
|
222
|
+
},
|
|
223
|
+
contentType: context?.contentType,
|
|
224
|
+
createdAt: new Date(),
|
|
225
|
+
updatedAt: new Date(),
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
return key;
|
|
229
|
+
}),
|
|
230
|
+
|
|
231
|
+
putObjectFromStreamWithPatching: (
|
|
232
|
+
key: string,
|
|
233
|
+
offset: number,
|
|
234
|
+
readableStream: ReadableStream,
|
|
235
|
+
context?: Partial<GCSOperationContext>,
|
|
236
|
+
onProgress?: (chunkSize: number) => void,
|
|
237
|
+
isAppend?: boolean,
|
|
238
|
+
) =>
|
|
239
|
+
Effect.gen(function* () {
|
|
240
|
+
yield* simulateLatency();
|
|
241
|
+
yield* trackOperation("putObjectFromStreamWithPatching");
|
|
242
|
+
yield* checkInjectedError("putObjectFromStreamWithPatching");
|
|
243
|
+
yield* maybeInjectError("putObjectFromStreamWithPatching");
|
|
244
|
+
|
|
245
|
+
// Read stream data
|
|
246
|
+
const newData = yield* streamToUint8Array(readableStream);
|
|
247
|
+
|
|
248
|
+
if (isAppend) {
|
|
249
|
+
// Append mode: combine with existing data
|
|
250
|
+
const patchKey = `${key}_patch`;
|
|
251
|
+
|
|
252
|
+
// Store patch temporarily
|
|
253
|
+
storage.objects.set(patchKey, {
|
|
254
|
+
key: patchKey,
|
|
255
|
+
data: newData,
|
|
256
|
+
metadata: {},
|
|
257
|
+
contentType: context?.contentType,
|
|
258
|
+
createdAt: new Date(),
|
|
259
|
+
updatedAt: new Date(),
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
// Get existing file
|
|
263
|
+
const existingObj = storage.objects.get(key);
|
|
264
|
+
if (!existingObj) {
|
|
265
|
+
return yield* Effect.fail(
|
|
266
|
+
UploadistaError.fromCode("FILE_NOT_FOUND"),
|
|
267
|
+
);
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
// Combine files
|
|
271
|
+
const combinedData = new Uint8Array(
|
|
272
|
+
existingObj.data.byteLength + newData.byteLength,
|
|
273
|
+
);
|
|
274
|
+
combinedData.set(existingObj.data, 0);
|
|
275
|
+
combinedData.set(newData, existingObj.data.byteLength);
|
|
276
|
+
|
|
277
|
+
// Update main file
|
|
278
|
+
storage.objects.set(key, {
|
|
279
|
+
key,
|
|
280
|
+
data: combinedData,
|
|
281
|
+
metadata: {
|
|
282
|
+
metadata: context?.metadata,
|
|
283
|
+
},
|
|
284
|
+
contentType: context?.contentType || existingObj.contentType,
|
|
285
|
+
createdAt: existingObj.createdAt,
|
|
286
|
+
updatedAt: new Date(),
|
|
287
|
+
});
|
|
288
|
+
|
|
289
|
+
// Delete patch
|
|
290
|
+
storage.objects.delete(patchKey);
|
|
291
|
+
|
|
292
|
+
if (onProgress) {
|
|
293
|
+
onProgress(combinedData.byteLength);
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
return combinedData.byteLength;
|
|
297
|
+
} else {
|
|
298
|
+
// Direct upload mode
|
|
299
|
+
storage.objects.set(key, {
|
|
300
|
+
key,
|
|
301
|
+
data: newData,
|
|
302
|
+
metadata: {
|
|
303
|
+
metadata: context?.metadata,
|
|
304
|
+
},
|
|
305
|
+
contentType: context?.contentType,
|
|
306
|
+
createdAt: new Date(),
|
|
307
|
+
updatedAt: new Date(),
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
if (onProgress) {
|
|
311
|
+
onProgress(newData.byteLength);
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
return newData.byteLength;
|
|
315
|
+
}
|
|
316
|
+
}),
|
|
317
|
+
|
|
318
|
+
deleteObject: (key: string) =>
|
|
319
|
+
Effect.gen(function* () {
|
|
320
|
+
yield* simulateLatency();
|
|
321
|
+
yield* trackOperation("deleteObject");
|
|
322
|
+
yield* checkInjectedError("deleteObject");
|
|
323
|
+
yield* maybeInjectError("deleteObject");
|
|
324
|
+
|
|
325
|
+
storage.objects.delete(key);
|
|
326
|
+
}),
|
|
327
|
+
|
|
328
|
+
// Resumable upload operations
|
|
329
|
+
createResumableUpload: (context: GCSOperationContext) =>
|
|
330
|
+
Effect.gen(function* () {
|
|
331
|
+
yield* simulateLatency();
|
|
332
|
+
yield* trackOperation("createResumableUpload");
|
|
333
|
+
yield* checkInjectedError("createResumableUpload");
|
|
334
|
+
yield* maybeInjectError("createResumableUpload");
|
|
335
|
+
|
|
336
|
+
const uploadId = `upload-${Date.now()}-${Math.random().toString(36).substring(7)}`;
|
|
337
|
+
const uploadUrl = `https://storage.googleapis.com/upload/storage/v1/b/${bucket}/o?uploadType=resumable&upload_id=${uploadId}`;
|
|
338
|
+
|
|
339
|
+
storage.resumableUploads.set(uploadUrl, { context, data: [] });
|
|
340
|
+
|
|
341
|
+
return uploadUrl;
|
|
342
|
+
}),
|
|
343
|
+
|
|
344
|
+
uploadChunk: (uploadUrl: string, chunk: Uint8Array, start: number, total?: number) =>
|
|
345
|
+
Effect.gen(function* () {
|
|
346
|
+
yield* simulateLatency();
|
|
347
|
+
yield* trackOperation("uploadChunk");
|
|
348
|
+
yield* checkInjectedError("uploadChunk");
|
|
349
|
+
yield* maybeInjectError("uploadChunk");
|
|
350
|
+
|
|
351
|
+
const upload = storage.resumableUploads.get(uploadUrl);
|
|
352
|
+
if (!upload) {
|
|
353
|
+
return yield* Effect.fail(
|
|
354
|
+
UploadistaError.fromCode("FILE_NOT_FOUND"),
|
|
355
|
+
);
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
upload.data.push(chunk);
|
|
359
|
+
|
|
360
|
+
const bytesUploaded = start + chunk.byteLength;
|
|
361
|
+
const completed = total !== undefined && bytesUploaded >= total;
|
|
362
|
+
|
|
363
|
+
if (completed) {
|
|
364
|
+
// Combine all chunks
|
|
365
|
+
const totalSize = upload.data.reduce(
|
|
366
|
+
(sum, c) => sum + c.byteLength,
|
|
367
|
+
0,
|
|
368
|
+
);
|
|
369
|
+
const combinedData = new Uint8Array(totalSize);
|
|
370
|
+
let offset = 0;
|
|
371
|
+
for (const c of upload.data) {
|
|
372
|
+
combinedData.set(c, offset);
|
|
373
|
+
offset += c.byteLength;
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
// Store the object
|
|
377
|
+
storage.objects.set(upload.context.key, {
|
|
378
|
+
key: upload.context.key,
|
|
379
|
+
data: combinedData,
|
|
380
|
+
metadata: {
|
|
381
|
+
metadata: upload.context.metadata,
|
|
382
|
+
},
|
|
383
|
+
contentType: upload.context.contentType,
|
|
384
|
+
createdAt: new Date(),
|
|
385
|
+
updatedAt: new Date(),
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
// Clean up resumable upload
|
|
389
|
+
storage.resumableUploads.delete(uploadUrl);
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
return { completed, bytesUploaded };
|
|
393
|
+
}),
|
|
394
|
+
|
|
395
|
+
getUploadStatus: (uploadUrl: string) =>
|
|
396
|
+
Effect.gen(function* () {
|
|
397
|
+
yield* simulateLatency();
|
|
398
|
+
yield* trackOperation("getUploadStatus");
|
|
399
|
+
|
|
400
|
+
const upload = storage.resumableUploads.get(uploadUrl);
|
|
401
|
+
if (!upload) {
|
|
402
|
+
return yield* Effect.fail(
|
|
403
|
+
UploadistaError.fromCode("FILE_NOT_FOUND"),
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
const bytesUploaded = upload.data.reduce(
|
|
408
|
+
(sum, chunk) => sum + chunk.byteLength,
|
|
409
|
+
0,
|
|
410
|
+
);
|
|
411
|
+
|
|
412
|
+
return { bytesUploaded, completed: false };
|
|
413
|
+
}),
|
|
414
|
+
|
|
415
|
+
cancelUpload: (uploadUrl: string) =>
|
|
416
|
+
Effect.gen(function* () {
|
|
417
|
+
yield* simulateLatency();
|
|
418
|
+
yield* trackOperation("cancelUpload");
|
|
419
|
+
|
|
420
|
+
storage.resumableUploads.delete(uploadUrl);
|
|
421
|
+
}),
|
|
422
|
+
|
|
423
|
+
// Compose operations
|
|
424
|
+
composeObjects: (sourceKeys: string[], destinationKey: string, context?: Partial<GCSOperationContext>) =>
|
|
425
|
+
Effect.gen(function* () {
|
|
426
|
+
yield* simulateLatency();
|
|
427
|
+
yield* trackOperation("composeObjects");
|
|
428
|
+
yield* checkInjectedError("composeObjects");
|
|
429
|
+
yield* maybeInjectError("composeObjects");
|
|
430
|
+
|
|
431
|
+
// Get all source objects
|
|
432
|
+
const sourceObjects = sourceKeys.map((key) => storage.objects.get(key));
|
|
433
|
+
|
|
434
|
+
if (sourceObjects.some((obj) => !obj)) {
|
|
435
|
+
return yield* Effect.fail(
|
|
436
|
+
UploadistaError.fromCode("FILE_NOT_FOUND"),
|
|
437
|
+
);
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
// Calculate total size
|
|
441
|
+
const totalSize = sourceObjects.reduce(
|
|
442
|
+
(sum, obj) => sum + (obj?.data.byteLength || 0),
|
|
443
|
+
0,
|
|
444
|
+
);
|
|
445
|
+
|
|
446
|
+
// Combine data
|
|
447
|
+
const combinedData = new Uint8Array(totalSize);
|
|
448
|
+
let offset = 0;
|
|
449
|
+
for (const obj of sourceObjects) {
|
|
450
|
+
if (obj) {
|
|
451
|
+
combinedData.set(obj.data, offset);
|
|
452
|
+
offset += obj.data.byteLength;
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
// Store combined object
|
|
457
|
+
storage.objects.set(destinationKey, {
|
|
458
|
+
key: destinationKey,
|
|
459
|
+
data: combinedData,
|
|
460
|
+
metadata: {
|
|
461
|
+
metadata: context?.metadata,
|
|
462
|
+
},
|
|
463
|
+
contentType: context?.contentType || sourceObjects[0]?.contentType,
|
|
464
|
+
createdAt: new Date(),
|
|
465
|
+
updatedAt: new Date(),
|
|
466
|
+
});
|
|
467
|
+
|
|
468
|
+
return destinationKey;
|
|
469
|
+
}),
|
|
470
|
+
|
|
471
|
+
// Temporary file operations
|
|
472
|
+
putTemporaryObject: (key: string, body: Uint8Array, context?: Partial<GCSOperationContext>) =>
|
|
473
|
+
Effect.gen(function* () {
|
|
474
|
+
yield* simulateLatency();
|
|
475
|
+
yield* trackOperation("putTemporaryObject");
|
|
476
|
+
yield* checkInjectedError("putTemporaryObject");
|
|
477
|
+
yield* maybeInjectError("putTemporaryObject");
|
|
478
|
+
|
|
479
|
+
storage.objects.set(key, {
|
|
480
|
+
key,
|
|
481
|
+
data: body,
|
|
482
|
+
metadata: {
|
|
483
|
+
metadata: context?.metadata,
|
|
484
|
+
},
|
|
485
|
+
contentType: context?.contentType,
|
|
486
|
+
createdAt: new Date(),
|
|
487
|
+
updatedAt: new Date(),
|
|
488
|
+
});
|
|
489
|
+
|
|
490
|
+
return key;
|
|
491
|
+
}),
|
|
492
|
+
|
|
493
|
+
getTemporaryObject: (key: string) =>
|
|
494
|
+
Effect.gen(function* () {
|
|
495
|
+
yield* simulateLatency();
|
|
496
|
+
yield* trackOperation("getTemporaryObject");
|
|
497
|
+
|
|
498
|
+
const obj = storage.objects.get(key);
|
|
499
|
+
if (!obj) {
|
|
500
|
+
return undefined;
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
// Convert Uint8Array to ReadableStream
|
|
504
|
+
const stream = new ReadableStream({
|
|
505
|
+
start(controller) {
|
|
506
|
+
controller.enqueue(obj.data);
|
|
507
|
+
controller.close();
|
|
508
|
+
},
|
|
509
|
+
});
|
|
510
|
+
|
|
511
|
+
return stream;
|
|
512
|
+
}),
|
|
513
|
+
|
|
514
|
+
deleteTemporaryObject: (key: string) =>
|
|
515
|
+
Effect.gen(function* () {
|
|
516
|
+
yield* simulateLatency();
|
|
517
|
+
yield* trackOperation("deleteTemporaryObject");
|
|
518
|
+
|
|
519
|
+
storage.objects.delete(key);
|
|
520
|
+
}),
|
|
521
|
+
|
|
522
|
+
// Test-only methods
|
|
523
|
+
getStorage: () => Effect.succeed(storage),
|
|
524
|
+
|
|
525
|
+
getMetrics: () =>
|
|
526
|
+
Effect.gen(function* () {
|
|
527
|
+
const totalObjects = storage.objects.size;
|
|
528
|
+
const totalBytes = Array.from(storage.objects.values()).reduce(
|
|
529
|
+
(sum, obj) => sum + obj.data.byteLength,
|
|
530
|
+
0,
|
|
531
|
+
);
|
|
532
|
+
|
|
533
|
+
return {
|
|
534
|
+
operationCounts: storage.operationCounts,
|
|
535
|
+
totalObjects,
|
|
536
|
+
totalBytes,
|
|
537
|
+
};
|
|
538
|
+
}),
|
|
539
|
+
|
|
540
|
+
clearStorage: () =>
|
|
541
|
+
Effect.sync(() => {
|
|
542
|
+
storage.objects.clear();
|
|
543
|
+
storage.resumableUploads.clear();
|
|
544
|
+
storage.operationCounts.clear();
|
|
545
|
+
}),
|
|
546
|
+
|
|
547
|
+
injectError: (operation: string, error: Error) =>
|
|
548
|
+
Effect.sync(() => {
|
|
549
|
+
injectedErrors.set(operation, error);
|
|
550
|
+
}),
|
|
551
|
+
};
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
/**
|
|
555
|
+
* Create a layer that provides a mock GCS client
|
|
556
|
+
*/
|
|
557
|
+
export const MockGCSClientLayer = (
|
|
558
|
+
bucket: string,
|
|
559
|
+
config: MockGCSConfig = {
|
|
560
|
+
simulateLatency: 0,
|
|
561
|
+
errorRate: 0,
|
|
562
|
+
uploadFailureRate: 0,
|
|
563
|
+
enableErrorInjection: true,
|
|
564
|
+
},
|
|
565
|
+
): Layer.Layer<GCSClientService, never, never> => {
|
|
566
|
+
return Layer.succeed(GCSClientService, createMockGCSClient(bucket, config));
|
|
567
|
+
};
|