@chunkflowjs/upload-server 0.0.1-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,536 @@
1
+ import { createReadStream, existsSync, promises } from "fs";
2
+ import { dirname, join } from "path";
3
+ import { randomBytes } from "crypto";
4
+ import { sign, verify } from "jsonwebtoken";
5
+
6
+ //#region src/local-storage-adapter.ts
7
+ /**
8
+ * Local filesystem storage adapter
9
+ *
10
+ * Stores chunks in the local filesystem using a directory structure
11
+ * based on the first two characters of the chunk hash for better performance.
12
+ *
13
+ * Directory structure: baseDir/ab/abcdef123456...
14
+ */
15
+ var LocalStorageAdapter = class {
16
+ baseDir;
17
+ initialized = false;
18
+ constructor(options = {}) {
19
+ this.baseDir = options.baseDir || "./uploads";
20
+ }
21
+ /**
22
+ * Initialize the storage adapter by creating the base directory
23
+ */
24
+ async initialize() {
25
+ if (this.initialized) return;
26
+ try {
27
+ await promises.mkdir(this.baseDir, { recursive: true });
28
+ this.initialized = true;
29
+ } catch (error) {
30
+ throw new Error(`Failed to initialize local storage: ${error.message}`);
31
+ }
32
+ }
33
+ /**
34
+ * Get the file path for a chunk hash
35
+ * Uses the first two characters as a subdirectory for better performance
36
+ */
37
+ getChunkPath(chunkHash) {
38
+ if (chunkHash.length < 2) throw new Error("Chunk hash must be at least 2 characters long");
39
+ const subDir = chunkHash.substring(0, 2);
40
+ return join(this.baseDir, subDir, chunkHash);
41
+ }
42
+ /**
43
+ * Save a chunk to the local filesystem
44
+ */
45
+ async saveChunk(chunkHash, data) {
46
+ if (!this.initialized) await this.initialize();
47
+ const chunkPath = this.getChunkPath(chunkHash);
48
+ const chunkDir = dirname(chunkPath);
49
+ try {
50
+ await promises.mkdir(chunkDir, { recursive: true });
51
+ await promises.writeFile(chunkPath, data);
52
+ } catch (error) {
53
+ throw new Error(`Failed to save chunk ${chunkHash}: ${error.message}`);
54
+ }
55
+ }
56
+ /**
57
+ * Get a chunk from the local filesystem
58
+ */
59
+ async getChunk(chunkHash) {
60
+ if (!this.initialized) await this.initialize();
61
+ const chunkPath = this.getChunkPath(chunkHash);
62
+ try {
63
+ return await promises.readFile(chunkPath);
64
+ } catch (error) {
65
+ if (error.code === "ENOENT") return null;
66
+ throw new Error(`Failed to get chunk ${chunkHash}: ${error.message}`);
67
+ }
68
+ }
69
+ /**
70
+ * Check if a chunk exists in the local filesystem
71
+ */
72
+ async chunkExists(chunkHash) {
73
+ if (!this.initialized) await this.initialize();
74
+ const chunkPath = this.getChunkPath(chunkHash);
75
+ try {
76
+ await promises.access(chunkPath);
77
+ return true;
78
+ } catch {
79
+ return false;
80
+ }
81
+ }
82
+ /**
83
+ * Check if multiple chunks exist in the local filesystem
84
+ */
85
+ async chunksExist(chunkHashes) {
86
+ if (!this.initialized) await this.initialize();
87
+ return await Promise.all(chunkHashes.map((hash) => this.chunkExists(hash)));
88
+ }
89
+ /**
90
+ * Get a readable stream for a chunk
91
+ */
92
+ async getChunkStream(chunkHash) {
93
+ if (!this.initialized) await this.initialize();
94
+ const chunkPath = this.getChunkPath(chunkHash);
95
+ if (!existsSync(chunkPath)) return null;
96
+ try {
97
+ return createReadStream(chunkPath);
98
+ } catch (error) {
99
+ throw new Error(`Failed to create stream for chunk ${chunkHash}: ${error.message}`);
100
+ }
101
+ }
102
+ /**
103
+ * Delete a chunk from the local filesystem
104
+ */
105
+ async deleteChunk(chunkHash) {
106
+ if (!this.initialized) await this.initialize();
107
+ const chunkPath = this.getChunkPath(chunkHash);
108
+ try {
109
+ await promises.unlink(chunkPath);
110
+ } catch (error) {
111
+ if (error.code === "ENOENT") return;
112
+ throw new Error(`Failed to delete chunk ${chunkHash}: ${error.message}`);
113
+ }
114
+ }
115
+ /**
116
+ * Clean up resources (no-op for local storage)
117
+ */
118
+ async cleanup() {
119
+ this.initialized = false;
120
+ }
121
+ };
122
+
123
+ //#endregion
124
+ //#region src/memory-database-adapter.ts
125
+ /**
126
+ * In-memory database adapter for testing and development
127
+ *
128
+ * This adapter stores all data in memory and is useful for:
129
+ * - Unit testing without database dependencies
130
+ * - Development and prototyping
131
+ * - Simple deployments that don't require persistence
132
+ */
133
+ var MemoryDatabaseAdapter = class {
134
+ files = /* @__PURE__ */ new Map();
135
+ chunks = /* @__PURE__ */ new Map();
136
+ fileChunks = /* @__PURE__ */ new Map();
137
+ async initialize() {}
138
+ async createFile(fileId, options) {
139
+ if (this.files.has(fileId)) throw new Error(`File ${fileId} already exists`);
140
+ const now = /* @__PURE__ */ new Date();
141
+ const file = {
142
+ fileId,
143
+ filename: options.filename,
144
+ size: options.size,
145
+ mimeType: options.mimeType,
146
+ fileHash: options.fileHash,
147
+ uploadToken: options.uploadToken,
148
+ chunkSize: options.chunkSize,
149
+ totalChunks: options.totalChunks,
150
+ uploadedChunks: 0,
151
+ status: "pending",
152
+ createdAt: now,
153
+ updatedAt: now
154
+ };
155
+ this.files.set(fileId, file);
156
+ return { ...file };
157
+ }
158
+ async getFile(fileId) {
159
+ const file = this.files.get(fileId);
160
+ return file ? { ...file } : null;
161
+ }
162
+ async getFileByHash(fileHash) {
163
+ for (const file of this.files.values()) if (file.fileHash === fileHash) return { ...file };
164
+ return null;
165
+ }
166
+ async getFileByToken(uploadToken) {
167
+ for (const file of this.files.values()) if (file.uploadToken === uploadToken) return { ...file };
168
+ return null;
169
+ }
170
+ async updateFile(fileId, options) {
171
+ const file = this.files.get(fileId);
172
+ if (!file) throw new Error(`File ${fileId} not found`);
173
+ const updated = {
174
+ ...file,
175
+ ...options,
176
+ updatedAt: /* @__PURE__ */ new Date()
177
+ };
178
+ this.files.set(fileId, updated);
179
+ return { ...updated };
180
+ }
181
+ async deleteFile(fileId) {
182
+ this.files.delete(fileId);
183
+ }
184
+ async upsertChunk(chunkHash, size) {
185
+ const existing = this.chunks.get(chunkHash);
186
+ if (existing) {
187
+ const updated = {
188
+ ...existing,
189
+ refCount: existing.refCount + 1
190
+ };
191
+ this.chunks.set(chunkHash, updated);
192
+ return { ...updated };
193
+ }
194
+ const chunk = {
195
+ chunkHash,
196
+ size,
197
+ refCount: 1,
198
+ createdAt: /* @__PURE__ */ new Date()
199
+ };
200
+ this.chunks.set(chunkHash, chunk);
201
+ return { ...chunk };
202
+ }
203
+ async getChunk(chunkHash) {
204
+ const chunk = this.chunks.get(chunkHash);
205
+ return chunk ? { ...chunk } : null;
206
+ }
207
+ async chunkExists(chunkHash) {
208
+ return this.chunks.has(chunkHash);
209
+ }
210
+ async chunksExist(chunkHashes) {
211
+ return chunkHashes.map((hash) => this.chunks.has(hash));
212
+ }
213
+ async decrementChunkRef(chunkHash) {
214
+ const chunk = this.chunks.get(chunkHash);
215
+ if (!chunk) return;
216
+ if (chunk.refCount <= 1) this.chunks.delete(chunkHash);
217
+ else {
218
+ const updated = {
219
+ ...chunk,
220
+ refCount: chunk.refCount - 1
221
+ };
222
+ this.chunks.set(chunkHash, updated);
223
+ }
224
+ }
225
+ async createFileChunk(fileId, chunkHash, chunkIndex) {
226
+ const fileChunk = {
227
+ fileId,
228
+ chunkHash,
229
+ chunkIndex,
230
+ createdAt: /* @__PURE__ */ new Date()
231
+ };
232
+ const fileChunks = this.fileChunks.get(fileId) || [];
233
+ fileChunks.push(fileChunk);
234
+ this.fileChunks.set(fileId, fileChunks);
235
+ return { ...fileChunk };
236
+ }
237
+ async getFileChunks(fileId) {
238
+ return (this.fileChunks.get(fileId) || []).slice().sort((a, b) => a.chunkIndex - b.chunkIndex).map((fc) => ({ ...fc }));
239
+ }
240
+ async getFileChunkHashes(fileId) {
241
+ return (await this.getFileChunks(fileId)).map((fc) => fc.chunkHash);
242
+ }
243
+ async deleteFileChunks(fileId) {
244
+ this.fileChunks.delete(fileId);
245
+ }
246
+ async transaction(callback) {
247
+ return callback();
248
+ }
249
+ async cleanup() {
250
+ this.files.clear();
251
+ this.chunks.clear();
252
+ this.fileChunks.clear();
253
+ }
254
+ };
255
+
256
+ //#endregion
257
+ //#region src/upload-service.ts
258
+ /**
259
+ * Upload service for handling file uploads with chunking and deduplication
260
+ */
261
+ var UploadService = class {
262
+ storageAdapter;
263
+ databaseAdapter;
264
+ jwtSecret;
265
+ tokenExpiration;
266
+ minChunkSize;
267
+ maxChunkSize;
268
+ constructor(options) {
269
+ this.storageAdapter = options.storageAdapter;
270
+ this.databaseAdapter = options.databaseAdapter;
271
+ this.jwtSecret = options.jwtSecret;
272
+ this.tokenExpiration = options.tokenExpiration || 1440 * 60;
273
+ this.minChunkSize = options.minChunkSize || 256 * 1024;
274
+ this.maxChunkSize = options.maxChunkSize || 10 * 1024 * 1024;
275
+ }
276
+ /**
277
+ * Initialize the upload service
278
+ */
279
+ async initialize() {
280
+ await this.storageAdapter.initialize();
281
+ await this.databaseAdapter.initialize();
282
+ }
283
+ /**
284
+ * Clean up resources
285
+ */
286
+ async cleanup() {
287
+ await this.storageAdapter.cleanup();
288
+ await this.databaseAdapter.cleanup();
289
+ }
290
+ /**
291
+ * Generate a unique file ID
292
+ */
293
+ generateFileId() {
294
+ return randomBytes(16).toString("hex");
295
+ }
296
+ /**
297
+ * Generate an upload token
298
+ */
299
+ generateUploadToken(fileId) {
300
+ return sign({
301
+ fileId,
302
+ type: "upload"
303
+ }, this.jwtSecret, { expiresIn: this.tokenExpiration });
304
+ }
305
+ /**
306
+ * Verify an upload token
307
+ */
308
+ verifyUploadToken(token) {
309
+ try {
310
+ const payload = verify(token, this.jwtSecret);
311
+ if (payload.type !== "upload") throw new Error("Invalid token type");
312
+ return { fileId: payload.fileId };
313
+ } catch (error) {
314
+ throw new Error(`Invalid upload token: ${error.message}`);
315
+ }
316
+ }
317
+ /**
318
+ * Negotiate chunk size based on file size and client preference
319
+ */
320
+ negotiateChunkSize(fileSize, clientChunkSize) {
321
+ if (clientChunkSize) {
322
+ if (clientChunkSize > this.maxChunkSize) return this.maxChunkSize;
323
+ return clientChunkSize;
324
+ }
325
+ if (fileSize < 10 * 1024 * 1024) return Math.max(256 * 1024, this.minChunkSize);
326
+ else if (fileSize < 100 * 1024 * 1024) return Math.max(1024 * 1024, this.minChunkSize);
327
+ else if (fileSize < 1024 * 1024 * 1024) return Math.max(2 * 1024 * 1024, this.minChunkSize);
328
+ else return Math.max(5 * 1024 * 1024, this.minChunkSize);
329
+ }
330
+ /**
331
+ * Create a new file upload session
332
+ *
333
+ * Generates a unique file ID and upload token, negotiates chunk size,
334
+ * and saves file metadata to the database.
335
+ */
336
+ async createFile(request) {
337
+ const fileId = this.generateFileId();
338
+ const uploadToken = this.generateUploadToken(fileId);
339
+ const chunkSize = this.negotiateChunkSize(request.fileSize, request.preferredChunkSize);
340
+ const totalChunks = Math.ceil(request.fileSize / chunkSize);
341
+ await this.databaseAdapter.createFile(fileId, {
342
+ filename: request.fileName,
343
+ size: request.fileSize,
344
+ mimeType: request.fileType,
345
+ fileHash: "",
346
+ uploadToken,
347
+ chunkSize,
348
+ totalChunks
349
+ });
350
+ return {
351
+ uploadToken,
352
+ negotiatedChunkSize: chunkSize
353
+ };
354
+ }
355
+ /**
356
+ * Verify file and chunk hashes for instant upload (秒传)
357
+ *
358
+ * Checks if the file hash already exists (full instant upload)
359
+ * or if any chunks already exist (partial instant upload).
360
+ * For existing chunks, automatically creates file-chunk relationships.
361
+ */
362
+ async verifyHash(request) {
363
+ const { fileId } = this.verifyUploadToken(request.uploadToken);
364
+ const file = await this.databaseAdapter.getFile(fileId);
365
+ if (!file) throw new Error("File not found");
366
+ if (request.fileHash) {
367
+ const existingFile = await this.databaseAdapter.getFileByHash(request.fileHash);
368
+ if (existingFile && existingFile.status === "completed") return {
369
+ fileExists: true,
370
+ fileUrl: existingFile.url,
371
+ existingChunks: [],
372
+ missingChunks: []
373
+ };
374
+ }
375
+ if (request.chunkHashes && request.chunkHashes.length > 0) {
376
+ const chunkExistence = await this.databaseAdapter.chunksExist(request.chunkHashes);
377
+ const existingChunks = [];
378
+ const missingChunks = [];
379
+ for (let index = 0; index < chunkExistence.length; index++) if (chunkExistence[index]) {
380
+ existingChunks.push(index);
381
+ const chunkHash = request.chunkHashes[index];
382
+ try {
383
+ if (!(await this.databaseAdapter.getFileChunks(fileId)).some((fc) => fc.chunkIndex === index && fc.chunkHash === chunkHash)) {
384
+ await this.databaseAdapter.createFileChunk(fileId, chunkHash, index);
385
+ const chunk = await this.databaseAdapter.getChunk(chunkHash);
386
+ if (chunk) await this.databaseAdapter.upsertChunk(chunkHash, chunk.size);
387
+ }
388
+ } catch (error) {
389
+ console.error(`Failed to create file-chunk relationship for chunk ${index}:`, error);
390
+ }
391
+ } else missingChunks.push(index);
392
+ const uploadedChunks = (await this.databaseAdapter.getFileChunks(fileId)).length;
393
+ const status = uploadedChunks === file.totalChunks ? "completed" : uploadedChunks > 0 ? "uploading" : "pending";
394
+ await this.databaseAdapter.updateFile(fileId, {
395
+ uploadedChunks,
396
+ status
397
+ });
398
+ return {
399
+ fileExists: false,
400
+ existingChunks,
401
+ missingChunks
402
+ };
403
+ }
404
+ return {
405
+ fileExists: false,
406
+ existingChunks: [],
407
+ missingChunks: []
408
+ };
409
+ }
410
+ /**
411
+ * Upload a chunk
412
+ *
413
+ * Validates the upload token and chunk hash, saves the chunk to storage
414
+ * with deduplication, and updates file metadata.
415
+ */
416
+ async uploadChunk(request) {
417
+ const { fileId } = this.verifyUploadToken(request.uploadToken);
418
+ const file = await this.databaseAdapter.getFile(fileId);
419
+ if (!file) throw new Error("File not found");
420
+ let chunkBuffer;
421
+ if (Buffer.isBuffer(request.chunk)) chunkBuffer = request.chunk;
422
+ else {
423
+ const arrayBuffer = await request.chunk.arrayBuffer();
424
+ chunkBuffer = Buffer.from(arrayBuffer);
425
+ }
426
+ if ((await import("spark-md5")).default.ArrayBuffer.hash(chunkBuffer) !== request.chunkHash) throw new Error("Chunk hash mismatch");
427
+ if (!await this.storageAdapter.chunkExists(request.chunkHash)) await this.storageAdapter.saveChunk(request.chunkHash, chunkBuffer);
428
+ await this.databaseAdapter.upsertChunk(request.chunkHash, chunkBuffer.length);
429
+ await this.databaseAdapter.createFileChunk(fileId, request.chunkHash, request.chunkIndex);
430
+ const uploadedChunks = (await this.databaseAdapter.getFileChunks(fileId)).length;
431
+ const status = uploadedChunks === file.totalChunks ? "completed" : "uploading";
432
+ await this.databaseAdapter.updateFile(fileId, {
433
+ uploadedChunks,
434
+ status
435
+ });
436
+ return {
437
+ success: true,
438
+ chunkHash: request.chunkHash
439
+ };
440
+ }
441
+ /**
442
+ * Merge file chunks (logical merge)
443
+ *
444
+ * Verifies all chunks are uploaded and updates file status to completed.
445
+ * Generates a file access URL.
446
+ */
447
+ async mergeFile(request) {
448
+ const { fileId } = this.verifyUploadToken(request.uploadToken);
449
+ const file = await this.databaseAdapter.getFile(fileId);
450
+ if (!file) throw new Error("File not found");
451
+ if (file.uploadedChunks !== file.totalChunks) throw new Error(`Not all chunks uploaded: ${file.uploadedChunks}/${file.totalChunks}`);
452
+ const url = `/upload/files/${fileId}`;
453
+ await this.databaseAdapter.updateFile(fileId, {
454
+ fileHash: request.fileHash,
455
+ status: "completed",
456
+ completedAt: /* @__PURE__ */ new Date(),
457
+ url
458
+ });
459
+ return {
460
+ success: true,
461
+ fileUrl: url,
462
+ fileId
463
+ };
464
+ }
465
+ /**
466
+ * Get file metadata
467
+ *
468
+ * Retrieves file metadata from database without creating a stream.
469
+ */
470
+ async getFileMetadata(fileId) {
471
+ return await this.databaseAdapter.getFile(fileId);
472
+ }
473
+ /**
474
+ * Get file stream for download
475
+ *
476
+ * Reads chunks in order and creates a stream pipeline for file output.
477
+ * Supports Range requests for partial content.
478
+ */
479
+ async getFileStream(fileId, range) {
480
+ const file = await this.databaseAdapter.getFile(fileId);
481
+ if (!file || file.status !== "completed") return null;
482
+ const chunkHashes = await this.databaseAdapter.getFileChunkHashes(fileId);
483
+ const { Readable } = await import("stream");
484
+ const storageAdapter = this.storageAdapter;
485
+ let currentChunkIndex = 0;
486
+ let bytesRead = 0;
487
+ const startByte = range?.start || 0;
488
+ const endByte = range?.end || file.size - 1;
489
+ return {
490
+ stream: new Readable({ async read() {
491
+ try {
492
+ while (currentChunkIndex < chunkHashes.length) {
493
+ const chunkHash = chunkHashes[currentChunkIndex];
494
+ const chunkData = await storageAdapter.getChunk(chunkHash);
495
+ if (!chunkData) {
496
+ this.destroy(/* @__PURE__ */ new Error(`Chunk ${chunkHash} not found`));
497
+ return;
498
+ }
499
+ const chunkStart = currentChunkIndex * file.chunkSize;
500
+ const chunkEnd = chunkStart + chunkData.length - 1;
501
+ if (chunkEnd < startByte) {
502
+ currentChunkIndex++;
503
+ continue;
504
+ }
505
+ if (chunkStart > endByte) {
506
+ this.push(null);
507
+ return;
508
+ }
509
+ let sliceStart = 0;
510
+ let sliceEnd = chunkData.length;
511
+ if (chunkStart < startByte) sliceStart = startByte - chunkStart;
512
+ if (chunkEnd > endByte) sliceEnd = endByte - chunkStart + 1;
513
+ const slicedData = chunkData.slice(sliceStart, sliceEnd);
514
+ this.push(slicedData);
515
+ bytesRead += slicedData.length;
516
+ currentChunkIndex++;
517
+ if (bytesRead >= endByte - startByte + 1) {
518
+ this.push(null);
519
+ return;
520
+ }
521
+ return;
522
+ }
523
+ this.push(null);
524
+ } catch (error) {
525
+ this.destroy(error);
526
+ }
527
+ } }),
528
+ size: range ? endByte - startByte + 1 : file.size,
529
+ mimeType: file.mimeType
530
+ };
531
+ }
532
+ };
533
+
534
+ //#endregion
535
+ export { LocalStorageAdapter, MemoryDatabaseAdapter, UploadService };
536
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.mjs","names":["fs"],"sources":["../src/local-storage-adapter.ts","../src/memory-database-adapter.ts","../src/upload-service.ts"],"sourcesContent":["import { promises as fs } from \"fs\";\nimport { createReadStream, existsSync } from \"fs\";\nimport { join, dirname } from \"path\";\nimport { Readable } from \"stream\";\nimport type { StorageAdapter } from \"./storage-adapter\";\n\n/**\n * Options for LocalStorageAdapter\n */\nexport interface LocalStorageAdapterOptions {\n /**\n * Base directory for storing chunks\n * @default './uploads'\n */\n baseDir?: string;\n}\n\n/**\n * Local filesystem storage adapter\n *\n * Stores chunks in the local filesystem using a directory structure\n * based on the first two characters of the chunk hash for better performance.\n *\n * Directory structure: baseDir/ab/abcdef123456...\n */\nexport class LocalStorageAdapter implements StorageAdapter {\n private baseDir: string;\n private initialized = false;\n\n constructor(options: LocalStorageAdapterOptions = {}) {\n this.baseDir = options.baseDir || \"./uploads\";\n }\n\n /**\n * Initialize the storage adapter by creating the base directory\n */\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n try {\n await fs.mkdir(this.baseDir, { recursive: true });\n this.initialized = true;\n } catch (error) {\n throw new Error(`Failed to initialize local storage: ${(error as Error).message}`);\n }\n }\n\n /**\n * Get the file path for a chunk hash\n * Uses the first two characters as a subdirectory for better performance\n */\n private getChunkPath(chunkHash: string): string {\n if (chunkHash.length < 2) {\n throw new Error(\"Chunk hash must be at least 2 characters long\");\n }\n\n const subDir = chunkHash.substring(0, 2);\n return join(this.baseDir, subDir, chunkHash);\n }\n\n /**\n * Save a chunk to the local filesystem\n */\n async saveChunk(chunkHash: string, data: Buffer): Promise<void> {\n if (!this.initialized) {\n await this.initialize();\n }\n\n const chunkPath = this.getChunkPath(chunkHash);\n const chunkDir = dirname(chunkPath);\n\n try {\n // Create subdirectory if it doesn't exist\n await fs.mkdir(chunkDir, { recursive: true });\n\n // Write chunk data to file\n await fs.writeFile(chunkPath, data);\n } catch (error) {\n throw new Error(`Failed to save chunk ${chunkHash}: ${(error as Error).message}`);\n }\n }\n\n /**\n * Get a chunk from the local filesystem\n */\n async getChunk(chunkHash: string): Promise<Buffer | null> {\n if (!this.initialized) {\n await this.initialize();\n }\n\n const chunkPath = this.getChunkPath(chunkHash);\n\n try {\n const data = await fs.readFile(chunkPath);\n return data;\n } catch (error: any) {\n if (error.code === \"ENOENT\") {\n return null;\n }\n throw new Error(`Failed to get chunk ${chunkHash}: ${error.message}`);\n }\n }\n\n /**\n * Check if a chunk exists in the local filesystem\n */\n async chunkExists(chunkHash: string): Promise<boolean> {\n if (!this.initialized) {\n await this.initialize();\n }\n\n const chunkPath = this.getChunkPath(chunkHash);\n\n try {\n await fs.access(chunkPath);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Check if multiple chunks exist in the local filesystem\n */\n async chunksExist(chunkHashes: string[]): Promise<boolean[]> {\n if (!this.initialized) {\n await this.initialize();\n }\n\n const results = await Promise.all(chunkHashes.map((hash) => this.chunkExists(hash)));\n\n return results;\n }\n\n /**\n * Get a readable stream for a chunk\n */\n async getChunkStream(chunkHash: string): Promise<Readable | null> {\n if (!this.initialized) {\n await this.initialize();\n }\n\n const chunkPath = this.getChunkPath(chunkHash);\n\n // Check if file exists synchronously for stream creation\n if (!existsSync(chunkPath)) {\n return null;\n }\n\n try {\n const stream = createReadStream(chunkPath);\n return stream;\n } catch (error) {\n throw new Error(\n `Failed to create stream for chunk ${chunkHash}: ${(error as Error).message}`,\n );\n }\n }\n\n /**\n * Delete a chunk from the local filesystem\n */\n async deleteChunk(chunkHash: string): Promise<void> {\n if (!this.initialized) {\n await this.initialize();\n }\n\n const chunkPath = this.getChunkPath(chunkHash);\n\n try {\n await fs.unlink(chunkPath);\n } catch (error: any) {\n if (error.code === \"ENOENT\") {\n // File doesn't exist, consider it deleted\n return;\n }\n throw new Error(`Failed to delete chunk ${chunkHash}: ${error.message}`);\n }\n }\n\n /**\n * Clean up resources (no-op for local storage)\n */\n async cleanup(): Promise<void> {\n // No cleanup needed for local filesystem\n this.initialized = false;\n }\n}\n","import type {\n DatabaseAdapter,\n FileMetadata,\n ChunkEntity,\n FileChunkEntity,\n CreateFileOptions,\n UpdateFileOptions,\n} from \"./database-adapter\";\n\n/**\n * In-memory database adapter for testing and development\n *\n * This adapter stores all data in memory and is useful for:\n * - Unit testing without database dependencies\n * - Development and prototyping\n * - Simple deployments that don't require persistence\n */\nexport class MemoryDatabaseAdapter implements DatabaseAdapter {\n private files: Map<string, FileMetadata> = new Map();\n private chunks: Map<string, ChunkEntity> = new Map();\n private fileChunks: Map<string, FileChunkEntity[]> = new Map();\n\n async initialize(): Promise<void> {\n // No initialization needed for in-memory adapter\n }\n\n async createFile(fileId: string, options: CreateFileOptions): Promise<FileMetadata> {\n if (this.files.has(fileId)) {\n throw new Error(`File ${fileId} already exists`);\n }\n\n const now = new Date();\n const file: FileMetadata = {\n fileId,\n filename: options.filename,\n size: options.size,\n mimeType: options.mimeType,\n fileHash: options.fileHash,\n uploadToken: options.uploadToken,\n chunkSize: options.chunkSize,\n totalChunks: options.totalChunks,\n uploadedChunks: 0,\n status: \"pending\",\n createdAt: now,\n updatedAt: now,\n };\n\n this.files.set(fileId, file);\n return { ...file };\n }\n\n async getFile(fileId: string): Promise<FileMetadata | null> {\n const file = this.files.get(fileId);\n return file ? { ...file } : null;\n }\n\n async getFileByHash(fileHash: string): Promise<FileMetadata | null> {\n for (const file of this.files.values()) {\n if (file.fileHash === fileHash) {\n return { ...file };\n }\n }\n return null;\n }\n\n async getFileByToken(uploadToken: string): Promise<FileMetadata | null> {\n for (const file of this.files.values()) {\n if (file.uploadToken === uploadToken) {\n return { ...file };\n }\n }\n return null;\n }\n\n async updateFile(fileId: string, options: UpdateFileOptions): Promise<FileMetadata> {\n const file = this.files.get(fileId);\n if (!file) {\n throw new Error(`File ${fileId} not found`);\n }\n\n const updated: FileMetadata = {\n ...file,\n ...options,\n updatedAt: new Date(),\n };\n\n this.files.set(fileId, updated);\n return { ...updated };\n }\n\n async deleteFile(fileId: string): Promise<void> {\n this.files.delete(fileId);\n }\n\n async upsertChunk(chunkHash: string, size: number): Promise<ChunkEntity> {\n const existing = this.chunks.get(chunkHash);\n\n if (existing) {\n // Increment reference count\n const updated: ChunkEntity = {\n ...existing,\n refCount: existing.refCount + 1,\n };\n this.chunks.set(chunkHash, updated);\n return { ...updated };\n }\n\n // Create new chunk\n const chunk: ChunkEntity = {\n chunkHash,\n size,\n refCount: 1,\n createdAt: new Date(),\n };\n\n this.chunks.set(chunkHash, chunk);\n return { ...chunk };\n }\n\n async getChunk(chunkHash: string): Promise<ChunkEntity | null> {\n const chunk = this.chunks.get(chunkHash);\n return chunk ? { ...chunk } : null;\n }\n\n async chunkExists(chunkHash: string): Promise<boolean> {\n return this.chunks.has(chunkHash);\n }\n\n async chunksExist(chunkHashes: string[]): Promise<boolean[]> {\n return chunkHashes.map((hash) => this.chunks.has(hash));\n }\n\n async decrementChunkRef(chunkHash: string): Promise<void> {\n const chunk = this.chunks.get(chunkHash);\n if (!chunk) {\n return;\n }\n\n if (chunk.refCount <= 1) {\n // Delete chunk if reference count reaches 0\n this.chunks.delete(chunkHash);\n } else {\n // Decrement reference count\n const updated: ChunkEntity = {\n ...chunk,\n refCount: chunk.refCount - 1,\n };\n this.chunks.set(chunkHash, updated);\n }\n }\n\n async createFileChunk(\n fileId: string,\n chunkHash: string,\n chunkIndex: number,\n ): Promise<FileChunkEntity> {\n const fileChunk: FileChunkEntity = {\n fileId,\n chunkHash,\n chunkIndex,\n createdAt: new Date(),\n };\n\n const fileChunks = this.fileChunks.get(fileId) || [];\n fileChunks.push(fileChunk);\n this.fileChunks.set(fileId, fileChunks);\n\n return { ...fileChunk };\n }\n\n async getFileChunks(fileId: string): Promise<FileChunkEntity[]> {\n const fileChunks = this.fileChunks.get(fileId) || [];\n // Sort by chunk index\n return fileChunks\n .slice()\n .sort((a, b) => a.chunkIndex - b.chunkIndex)\n .map((fc) => ({ ...fc }));\n }\n\n async getFileChunkHashes(fileId: string): Promise<string[]> {\n const fileChunks = await this.getFileChunks(fileId);\n return fileChunks.map((fc) => fc.chunkHash);\n }\n\n async deleteFileChunks(fileId: string): Promise<void> {\n this.fileChunks.delete(fileId);\n }\n\n async transaction<T>(callback: () => Promise<T>): Promise<T> {\n // For in-memory adapter, just execute the callback\n // In a real database adapter, this would start a transaction\n return callback();\n }\n\n async cleanup(): Promise<void> {\n this.files.clear();\n this.chunks.clear();\n this.fileChunks.clear();\n }\n}\n","import { randomBytes } from \"crypto\";\nimport { sign, verify } from \"jsonwebtoken\";\nimport type { StorageAdapter } from \"./storage-adapter\";\nimport type { DatabaseAdapter, FileMetadata } from \"./database-adapter\";\nimport type {\n CreateFileRequest,\n CreateFileResponse,\n VerifyHashRequest,\n VerifyHashResponse,\n UploadChunkRequest,\n UploadChunkResponse,\n MergeFileRequest,\n MergeFileResponse,\n UploadToken,\n} from \"@chunkflowjs/protocol\";\n\n/**\n * Options for UploadService\n */\nexport interface UploadServiceOptions {\n /** Storage adapter for chunk storage */\n storageAdapter: StorageAdapter;\n\n /** Database adapter for metadata storage */\n databaseAdapter: DatabaseAdapter;\n\n /** JWT secret for token generation and verification */\n jwtSecret: string;\n\n /** Token expiration time in seconds (default: 24 hours) */\n tokenExpiration?: number;\n\n /** Minimum chunk size in bytes (default: 256KB) */\n minChunkSize?: number;\n\n /** Maximum chunk size in bytes (default: 10MB) */\n maxChunkSize?: number;\n\n /** Default chunk size in bytes (default: 2MB) */\n defaultChunkSize?: number;\n}\n\n/**\n * Upload service for handling file uploads with chunking and deduplication\n */\nexport class UploadService {\n private storageAdapter: StorageAdapter;\n private databaseAdapter: DatabaseAdapter;\n private jwtSecret: string;\n private tokenExpiration: number;\n private minChunkSize: number;\n private maxChunkSize: number;\n\n constructor(options: UploadServiceOptions) {\n this.storageAdapter = options.storageAdapter;\n this.databaseAdapter = options.databaseAdapter;\n this.jwtSecret = options.jwtSecret;\n this.tokenExpiration = options.tokenExpiration || 24 * 60 * 60; // 24 hours\n this.minChunkSize = options.minChunkSize || 256 * 1024; // 256KB\n this.maxChunkSize = options.maxChunkSize || 10 * 1024 * 1024; // 10MB\n }\n\n /**\n * Initialize the upload service\n */\n async initialize(): Promise<void> {\n await this.storageAdapter.initialize();\n await this.databaseAdapter.initialize();\n }\n\n /**\n * Clean up resources\n */\n async cleanup(): Promise<void> {\n await this.storageAdapter.cleanup();\n await this.databaseAdapter.cleanup();\n }\n\n /**\n * Generate a unique file ID\n */\n private generateFileId(): string {\n return randomBytes(16).toString(\"hex\");\n }\n\n /**\n * Generate an upload token\n */\n private generateUploadToken(fileId: string): string {\n const payload = {\n fileId,\n type: \"upload\",\n };\n\n return sign(payload, this.jwtSecret, {\n expiresIn: this.tokenExpiration,\n });\n }\n\n /**\n * Verify an upload token\n */\n private verifyUploadToken(token: string): { fileId: string } {\n try {\n const payload = verify(token, this.jwtSecret) as { fileId: string; type: string };\n\n if (payload.type !== \"upload\") {\n throw new Error(\"Invalid token type\");\n }\n\n return { fileId: payload.fileId };\n } catch (error) {\n throw new Error(`Invalid upload token: ${(error as Error).message}`);\n }\n }\n\n /**\n * Negotiate chunk size based on file size and client preference\n */\n private negotiateChunkSize(fileSize: number, clientChunkSize?: number): number {\n // If client provides a chunk size, validate and use it\n if (clientChunkSize) {\n // Allow small chunk sizes for testing, but enforce max\n if (clientChunkSize > this.maxChunkSize) {\n return this.maxChunkSize;\n }\n // Only enforce minimum for auto-negotiated sizes\n return clientChunkSize;\n }\n\n // Auto-negotiate based on file size\n if (fileSize < 10 * 1024 * 1024) {\n // < 10MB: use 256KB chunks\n return Math.max(256 * 1024, this.minChunkSize);\n } else if (fileSize < 100 * 1024 * 1024) {\n // < 100MB: use 1MB chunks\n return Math.max(1024 * 1024, this.minChunkSize);\n } else if (fileSize < 1024 * 1024 * 1024) {\n // < 1GB: use 2MB chunks\n return Math.max(2 * 1024 * 1024, this.minChunkSize);\n } else {\n // >= 1GB: use 5MB chunks\n return Math.max(5 * 1024 * 1024, this.minChunkSize);\n }\n }\n\n /**\n * Create a new file upload session\n *\n * Generates a unique file ID and upload token, negotiates chunk size,\n * and saves file metadata to the database.\n */\n async createFile(request: CreateFileRequest): Promise<CreateFileResponse> {\n // Generate file ID and upload token\n const fileId = this.generateFileId();\n const uploadToken = this.generateUploadToken(fileId);\n\n // Negotiate chunk size\n const chunkSize = this.negotiateChunkSize(request.fileSize, request.preferredChunkSize);\n\n // Calculate total chunks\n const totalChunks = Math.ceil(request.fileSize / chunkSize);\n\n // Save file metadata to database\n await this.databaseAdapter.createFile(fileId, {\n filename: request.fileName,\n size: request.fileSize,\n mimeType: request.fileType,\n fileHash: \"\", // Will be set during hash verification\n uploadToken,\n chunkSize,\n totalChunks,\n });\n\n return {\n uploadToken: uploadToken as unknown as UploadToken,\n negotiatedChunkSize: chunkSize,\n };\n }\n\n /**\n * Verify file and chunk hashes for instant upload (秒传)\n *\n * Checks if the file hash already exists (full instant upload)\n * or if any chunks already exist (partial instant upload).\n * For existing chunks, automatically creates file-chunk relationships.\n */\n async verifyHash(request: VerifyHashRequest): Promise<VerifyHashResponse> {\n // Verify upload token\n const { fileId } = this.verifyUploadToken(request.uploadToken);\n\n // Get file metadata\n const file = await this.databaseAdapter.getFile(fileId);\n if (!file) {\n throw new Error(\"File not found\");\n }\n\n // Check if file hash already exists (full instant upload)\n if (request.fileHash) {\n const existingFile = await this.databaseAdapter.getFileByHash(request.fileHash);\n if (existingFile && existingFile.status === \"completed\") {\n // File already exists, return instant upload response\n return {\n fileExists: true,\n fileUrl: existingFile.url,\n existingChunks: [],\n missingChunks: [],\n };\n }\n }\n\n // Check which chunks already exist (partial instant upload)\n if (request.chunkHashes && request.chunkHashes.length > 0) {\n const chunkExistence = await this.databaseAdapter.chunksExist(request.chunkHashes);\n\n const existingChunks: number[] = [];\n const missingChunks: number[] = [];\n\n // Process each chunk\n for (let index = 0; index < chunkExistence.length; index++) {\n const exists = chunkExistence[index];\n if (exists) {\n existingChunks.push(index);\n\n // Automatically create file-chunk relationship for existing chunks\n const chunkHash = request.chunkHashes[index];\n try {\n // Check if relationship already exists\n const existingRelations = await this.databaseAdapter.getFileChunks(fileId);\n const relationExists = existingRelations.some(\n (fc) => fc.chunkIndex === index && fc.chunkHash === chunkHash,\n );\n\n if (!relationExists) {\n // Create file-chunk relationship\n await this.databaseAdapter.createFileChunk(fileId, chunkHash, index);\n\n // Increment chunk reference count\n const chunk = await this.databaseAdapter.getChunk(chunkHash);\n if (chunk) {\n await this.databaseAdapter.upsertChunk(chunkHash, chunk.size);\n }\n }\n } catch (error) {\n // Log error but don't fail the verification\n console.error(`Failed to create file-chunk relationship for chunk ${index}:`, error);\n }\n } else {\n missingChunks.push(index);\n }\n }\n\n // Update file metadata with uploaded chunks count\n const fileChunks = await this.databaseAdapter.getFileChunks(fileId);\n const uploadedChunks = fileChunks.length;\n const status =\n uploadedChunks === file.totalChunks\n ? \"completed\"\n : uploadedChunks > 0\n ? \"uploading\"\n : \"pending\";\n\n await this.databaseAdapter.updateFile(fileId, {\n uploadedChunks,\n status,\n });\n\n return {\n fileExists: false,\n existingChunks,\n missingChunks,\n };\n }\n\n return {\n fileExists: false,\n existingChunks: [],\n missingChunks: [],\n };\n }\n\n /**\n * Upload a chunk\n *\n * Validates the upload token and chunk hash, saves the chunk to storage\n * with deduplication, and updates file metadata.\n */\n async uploadChunk(request: UploadChunkRequest): Promise<UploadChunkResponse> {\n // Verify upload token\n const { fileId } = this.verifyUploadToken(request.uploadToken);\n\n // Get file metadata\n const file = await this.databaseAdapter.getFile(fileId);\n if (!file) {\n throw new Error(\"File not found\");\n }\n\n // Convert chunk to Buffer if it's a Blob\n let chunkBuffer: Buffer;\n if (Buffer.isBuffer(request.chunk)) {\n chunkBuffer = request.chunk;\n } else {\n // It's a Blob, convert to Buffer\n const arrayBuffer = await request.chunk.arrayBuffer();\n chunkBuffer = Buffer.from(arrayBuffer);\n }\n\n // Verify chunk hash\n const SparkMD5 = (await import(\"spark-md5\")).default;\n // Use the same method as in tests - hash the buffer directly\n // @ts-ignore - SparkMD5 accepts Buffer but TypeScript doesn't recognize it\n const calculatedHash = SparkMD5.ArrayBuffer.hash(chunkBuffer);\n if (calculatedHash !== request.chunkHash) {\n throw new Error(\"Chunk hash mismatch\");\n }\n\n // Check if chunk already exists in storage\n const chunkExists = await this.storageAdapter.chunkExists(request.chunkHash);\n\n if (!chunkExists) {\n // Save chunk to storage (deduplication happens here)\n await this.storageAdapter.saveChunk(request.chunkHash, chunkBuffer);\n }\n\n // Upsert chunk in database (increment ref count if exists)\n await this.databaseAdapter.upsertChunk(request.chunkHash, chunkBuffer.length);\n\n // Create file-chunk relationship\n await this.databaseAdapter.createFileChunk(fileId, request.chunkHash, request.chunkIndex);\n\n // Get actual uploaded chunks count\n const fileChunks = await this.databaseAdapter.getFileChunks(fileId);\n const uploadedChunks = fileChunks.length;\n const status = uploadedChunks === file.totalChunks ? \"completed\" : \"uploading\";\n\n await this.databaseAdapter.updateFile(fileId, {\n uploadedChunks,\n status,\n });\n\n return {\n success: true,\n chunkHash: request.chunkHash,\n };\n }\n\n /**\n * Merge file chunks (logical merge)\n *\n * Verifies all chunks are uploaded and updates file status to completed.\n * Generates a file access URL.\n */\n async mergeFile(request: MergeFileRequest): Promise<MergeFileResponse> {\n // Verify upload token\n const { fileId } = this.verifyUploadToken(request.uploadToken);\n\n // Get file metadata\n const file = await this.databaseAdapter.getFile(fileId);\n if (!file) {\n throw new Error(\"File not found\");\n }\n\n // Verify all chunks are uploaded\n if (file.uploadedChunks !== file.totalChunks) {\n throw new Error(`Not all chunks uploaded: ${file.uploadedChunks}/${file.totalChunks}`);\n }\n\n // Generate file access URL\n const url = `/upload/files/${fileId}`;\n\n // Update file with hash, status, and URL\n await this.databaseAdapter.updateFile(fileId, {\n fileHash: request.fileHash,\n status: \"completed\",\n completedAt: new Date(),\n url,\n });\n\n return {\n success: true,\n fileUrl: url,\n fileId,\n };\n }\n\n /**\n * Get file metadata\n *\n * Retrieves file metadata from database without creating a stream.\n */\n async getFileMetadata(fileId: string): Promise<FileMetadata | null> {\n return await this.databaseAdapter.getFile(fileId);\n }\n\n /**\n * Get file stream for download\n *\n * Reads chunks in order and creates a stream pipeline for file output.\n * Supports Range requests for partial content.\n */\n async getFileStream(\n fileId: string,\n range?: { start: number; end: number },\n ): Promise<{ stream: NodeJS.ReadableStream; size: number; mimeType: string } | null> {\n // Get file metadata\n const file = await this.databaseAdapter.getFile(fileId);\n if (!file || file.status !== \"completed\") {\n return null;\n }\n\n // Get chunk hashes in order\n const chunkHashes = await this.databaseAdapter.getFileChunkHashes(fileId);\n\n // Create a readable stream that reads chunks in order\n const { Readable } = await import(\"stream\");\n const storageAdapter = this.storageAdapter;\n\n let currentChunkIndex = 0;\n let bytesRead = 0;\n const startByte = range?.start || 0;\n const endByte = range?.end || file.size - 1;\n\n const stream = new Readable({\n async read() {\n try {\n // Skip chunks until we find one in the requested range\n while (currentChunkIndex < chunkHashes.length) {\n const chunkHash = chunkHashes[currentChunkIndex];\n const chunkData = await storageAdapter.getChunk(chunkHash);\n\n if (!chunkData) {\n this.destroy(new Error(`Chunk ${chunkHash} not found`));\n return;\n }\n\n // Handle range requests\n const chunkStart = currentChunkIndex * file.chunkSize;\n const chunkEnd = chunkStart + chunkData.length - 1;\n\n // Skip chunks outside the requested range\n if (chunkEnd < startByte) {\n currentChunkIndex++;\n continue;\n }\n\n // Stop if we've passed the requested range\n if (chunkStart > endByte) {\n this.push(null); // End of stream\n return;\n }\n\n // This chunk is in the requested range\n let sliceStart = 0;\n let sliceEnd = chunkData.length;\n\n if (chunkStart < startByte) {\n sliceStart = startByte - chunkStart;\n }\n\n if (chunkEnd > endByte) {\n sliceEnd = endByte - chunkStart + 1;\n }\n\n const slicedData = chunkData.slice(sliceStart, sliceEnd);\n this.push(slicedData);\n\n bytesRead += slicedData.length;\n currentChunkIndex++;\n\n // Check if we've sent all requested bytes\n if (bytesRead >= endByte - startByte + 1) {\n this.push(null); // End of stream\n return;\n }\n\n // Exit the loop to allow backpressure handling\n return;\n }\n\n // All chunks processed\n this.push(null); // End of stream\n } catch (error) {\n this.destroy(error as Error);\n }\n },\n });\n\n const size = range ? endByte - startByte + 1 : file.size;\n\n return {\n stream,\n size,\n mimeType: file.mimeType,\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AAyBA,IAAa,sBAAb,MAA2D;CACzD,AAAQ;CACR,AAAQ,cAAc;CAEtB,YAAY,UAAsC,EAAE,EAAE;AACpD,OAAK,UAAU,QAAQ,WAAW;;;;;CAMpC,MAAM,aAA4B;AAChC,MAAI,KAAK,YACP;AAGF,MAAI;AACF,SAAMA,SAAG,MAAM,KAAK,SAAS,EAAE,WAAW,MAAM,CAAC;AACjD,QAAK,cAAc;WACZ,OAAO;AACd,SAAM,IAAI,MAAM,uCAAwC,MAAgB,UAAU;;;;;;;CAQtF,AAAQ,aAAa,WAA2B;AAC9C,MAAI,UAAU,SAAS,EACrB,OAAM,IAAI,MAAM,gDAAgD;EAGlE,MAAM,SAAS,UAAU,UAAU,GAAG,EAAE;AACxC,SAAO,KAAK,KAAK,SAAS,QAAQ,UAAU;;;;;CAM9C,MAAM,UAAU,WAAmB,MAA6B;AAC9D,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;EAGzB,MAAM,YAAY,KAAK,aAAa,UAAU;EAC9C,MAAM,WAAW,QAAQ,UAAU;AAEnC,MAAI;AAEF,SAAMA,SAAG,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;AAG7C,SAAMA,SAAG,UAAU,WAAW,KAAK;WAC5B,OAAO;AACd,SAAM,IAAI,MAAM,wBAAwB,UAAU,IAAK,MAAgB,UAAU;;;;;;CAOrF,MAAM,SAAS,WAA2C;AACxD,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;EAGzB,MAAM,YAAY,KAAK,aAAa,UAAU;AAE9C,MAAI;AAEF,UADa,MAAMA,SAAG,SAAS,UAAU;WAElC,OAAY;AACnB,OAAI,MAAM,SAAS,SACjB,QAAO;AAET,SAAM,IAAI,MAAM,uBAAuB,UAAU,IAAI,MAAM,UAAU;;;;;;CAOzE,MAAM,YAAY,WAAqC;AACrD,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;EAGzB,MAAM,YAAY,KAAK,aAAa,UAAU;AAE9C,MAAI;AACF,SAAMA,SAAG,OAAO,UAAU;AAC1B,UAAO;UACD;AACN,UAAO;;;;;;CAOX,MAAM,YAAY,aAA2C;AAC3D,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;AAKzB,SAFgB,MAAM,QAAQ,IAAI,YAAY,KAAK,SAAS,KAAK,YAAY,KAAK,CAAC,CAAC;;;;;CAQtF,MAAM,eAAe,WAA6C;AAChE,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;EAGzB,MAAM,YAAY,KAAK,aAAa,UAAU;AAG9C,MAAI,CAAC,WAAW,UAAU,CACxB,QAAO;AAGT,MAAI;AAEF,UADe,iBAAiB,UAAU;WAEnC,OAAO;AACd,SAAM,IAAI,MACR,qCAAqC,UAAU,IAAK,MAAgB,UACrE;;;;;;CAOL,MAAM,YAAY,WAAkC;AAClD,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;EAGzB,MAAM,YAAY,KAAK,aAAa,UAAU;AAE9C,MAAI;AACF,SAAMA,SAAG,OAAO,UAAU;WACnB,OAAY;AACnB,OAAI,MAAM,SAAS,SAEjB;AAEF,SAAM,IAAI,MAAM,0BAA0B,UAAU,IAAI,MAAM,UAAU;;;;;;CAO5E,MAAM,UAAyB;AAE7B,OAAK,cAAc;;;;;;;;;;;;;;AC1KvB,IAAa,wBAAb,MAA8D;CAC5D,AAAQ,wBAAmC,IAAI,KAAK;CACpD,AAAQ,yBAAmC,IAAI,KAAK;CACpD,AAAQ,6BAA6C,IAAI,KAAK;CAE9D,MAAM,aAA4B;CAIlC,MAAM,WAAW,QAAgB,SAAmD;AAClF,MAAI,KAAK,MAAM,IAAI,OAAO,CACxB,OAAM,IAAI,MAAM,QAAQ,OAAO,iBAAiB;EAGlD,MAAM,sBAAM,IAAI,MAAM;EACtB,MAAM,OAAqB;GACzB;GACA,UAAU,QAAQ;GAClB,MAAM,QAAQ;GACd,UAAU,QAAQ;GAClB,UAAU,QAAQ;GAClB,aAAa,QAAQ;GACrB,WAAW,QAAQ;GACnB,aAAa,QAAQ;GACrB,gBAAgB;GAChB,QAAQ;GACR,WAAW;GACX,WAAW;GACZ;AAED,OAAK,MAAM,IAAI,QAAQ,KAAK;AAC5B,SAAO,EAAE,GAAG,MAAM;;CAGpB,MAAM,QAAQ,QAA8C;EAC1D,MAAM,OAAO,KAAK,MAAM,IAAI,OAAO;AACnC,SAAO,OAAO,EAAE,GAAG,MAAM,GAAG;;CAG9B,MAAM,cAAc,UAAgD;AAClE,OAAK,MAAM,QAAQ,KAAK,MAAM,QAAQ,CACpC,KAAI,KAAK,aAAa,SACpB,QAAO,EAAE,GAAG,MAAM;AAGtB,SAAO;;CAGT,MAAM,eAAe,aAAmD;AACtE,OAAK,MAAM,QAAQ,KAAK,MAAM,QAAQ,CACpC,KAAI,KAAK,gBAAgB,YACvB,QAAO,EAAE,GAAG,MAAM;AAGtB,SAAO;;CAGT,MAAM,WAAW,QAAgB,SAAmD;EAClF,MAAM,OAAO,KAAK,MAAM,IAAI,OAAO;AACnC,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,QAAQ,OAAO,YAAY;EAG7C,MAAM,UAAwB;GAC5B,GAAG;GACH,GAAG;GACH,2BAAW,IAAI,MAAM;GACtB;AAED,OAAK,MAAM,IAAI,QAAQ,QAAQ;AAC/B,SAAO,EAAE,GAAG,SAAS;;CAGvB,MAAM,WAAW,QAA+B;AAC9C,OAAK,MAAM,OAAO,OAAO;;CAG3B,MAAM,YAAY,WAAmB,MAAoC;EACvE,MAAM,WAAW,KAAK,OAAO,IAAI,UAAU;AAE3C,MAAI,UAAU;GAEZ,MAAM,UAAuB;IAC3B,GAAG;IACH,UAAU,SAAS,WAAW;IAC/B;AACD,QAAK,OAAO,IAAI,WAAW,QAAQ;AACnC,UAAO,EAAE,GAAG,SAAS;;EAIvB,MAAM,QAAqB;GACzB;GACA;GACA,UAAU;GACV,2BAAW,IAAI,MAAM;GACtB;AAED,OAAK,OAAO,IAAI,WAAW,MAAM;AACjC,SAAO,EAAE,GAAG,OAAO;;CAGrB,MAAM,SAAS,WAAgD;EAC7D,MAAM,QAAQ,KAAK,OAAO,IAAI,UAAU;AACxC,SAAO,QAAQ,EAAE,GAAG,OAAO,GAAG;;CAGhC,MAAM,YAAY,WAAqC;AACrD,SAAO,KAAK,OAAO,IAAI,UAAU;;CAGnC,MAAM,YAAY,aAA2C;AAC3D,SAAO,YAAY,KAAK,SAAS,KAAK,OAAO,IAAI,KAAK,CAAC;;CAGzD,MAAM,kBAAkB,WAAkC;EACxD,MAAM,QAAQ,KAAK,OAAO,IAAI,UAAU;AACxC,MAAI,CAAC,MACH;AAGF,MAAI,MAAM,YAAY,EAEpB,MAAK,OAAO,OAAO,UAAU;OACxB;GAEL,MAAM,UAAuB;IAC3B,GAAG;IACH,UAAU,MAAM,WAAW;IAC5B;AACD,QAAK,OAAO,IAAI,WAAW,QAAQ;;;CAIvC,MAAM,gBACJ,QACA,WACA,YAC0B;EAC1B,MAAM,YAA6B;GACjC;GACA;GACA;GACA,2BAAW,IAAI,MAAM;GACtB;EAED,MAAM,aAAa,KAAK,WAAW,IAAI,OAAO,IAAI,EAAE;AACpD,aAAW,KAAK,UAAU;AAC1B,OAAK,WAAW,IAAI,QAAQ,WAAW;AAEvC,SAAO,EAAE,GAAG,WAAW;;CAGzB,MAAM,cAAc,QAA4C;AAG9D,UAFmB,KAAK,WAAW,IAAI,OAAO,IAAI,EAAE,EAGjD,OAAO,CACP,MAAM,GAAG,MAAM,EAAE,aAAa,EAAE,WAAW,CAC3C,KAAK,QAAQ,EAAE,GAAG,IAAI,EAAE;;CAG7B,MAAM,mBAAmB,QAAmC;AAE1D,UADmB,MAAM,KAAK,cAAc,OAAO,EACjC,KAAK,OAAO,GAAG,UAAU;;CAG7C,MAAM,iBAAiB,QAA+B;AACpD,OAAK,WAAW,OAAO,OAAO;;CAGhC,MAAM,YAAe,UAAwC;AAG3D,SAAO,UAAU;;CAGnB,MAAM,UAAyB;AAC7B,OAAK,MAAM,OAAO;AAClB,OAAK,OAAO,OAAO;AACnB,OAAK,WAAW,OAAO;;;;;;;;;ACxJ3B,IAAa,gBAAb,MAA2B;CACzB,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,YAAY,SAA+B;AACzC,OAAK,iBAAiB,QAAQ;AAC9B,OAAK,kBAAkB,QAAQ;AAC/B,OAAK,YAAY,QAAQ;AACzB,OAAK,kBAAkB,QAAQ,mBAAmB,OAAU;AAC5D,OAAK,eAAe,QAAQ,gBAAgB,MAAM;AAClD,OAAK,eAAe,QAAQ,gBAAgB,KAAK,OAAO;;;;;CAM1D,MAAM,aAA4B;AAChC,QAAM,KAAK,eAAe,YAAY;AACtC,QAAM,KAAK,gBAAgB,YAAY;;;;;CAMzC,MAAM,UAAyB;AAC7B,QAAM,KAAK,eAAe,SAAS;AACnC,QAAM,KAAK,gBAAgB,SAAS;;;;;CAMtC,AAAQ,iBAAyB;AAC/B,SAAO,YAAY,GAAG,CAAC,SAAS,MAAM;;;;;CAMxC,AAAQ,oBAAoB,QAAwB;AAMlD,SAAO,KALS;GACd;GACA,MAAM;GACP,EAEoB,KAAK,WAAW,EACnC,WAAW,KAAK,iBACjB,CAAC;;;;;CAMJ,AAAQ,kBAAkB,OAAmC;AAC3D,MAAI;GACF,MAAM,UAAU,OAAO,OAAO,KAAK,UAAU;AAE7C,OAAI,QAAQ,SAAS,SACnB,OAAM,IAAI,MAAM,qBAAqB;AAGvC,UAAO,EAAE,QAAQ,QAAQ,QAAQ;WAC1B,OAAO;AACd,SAAM,IAAI,MAAM,yBAA0B,MAAgB,UAAU;;;;;;CAOxE,AAAQ,mBAAmB,UAAkB,iBAAkC;AAE7E,MAAI,iBAAiB;AAEnB,OAAI,kBAAkB,KAAK,aACzB,QAAO,KAAK;AAGd,UAAO;;AAIT,MAAI,WAAW,KAAK,OAAO,KAEzB,QAAO,KAAK,IAAI,MAAM,MAAM,KAAK,aAAa;WACrC,WAAW,MAAM,OAAO,KAEjC,QAAO,KAAK,IAAI,OAAO,MAAM,KAAK,aAAa;WACtC,WAAW,OAAO,OAAO,KAElC,QAAO,KAAK,IAAI,IAAI,OAAO,MAAM,KAAK,aAAa;MAGnD,QAAO,KAAK,IAAI,IAAI,OAAO,MAAM,KAAK,aAAa;;;;;;;;CAUvD,MAAM,WAAW,SAAyD;EAExE,MAAM,SAAS,KAAK,gBAAgB;EACpC,MAAM,cAAc,KAAK,oBAAoB,OAAO;EAGpD,MAAM,YAAY,KAAK,mBAAmB,QAAQ,UAAU,QAAQ,mBAAmB;EAGvF,MAAM,cAAc,KAAK,KAAK,QAAQ,WAAW,UAAU;AAG3D,QAAM,KAAK,gBAAgB,WAAW,QAAQ;GAC5C,UAAU,QAAQ;GAClB,MAAM,QAAQ;GACd,UAAU,QAAQ;GAClB,UAAU;GACV;GACA;GACA;GACD,CAAC;AAEF,SAAO;GACQ;GACb,qBAAqB;GACtB;;;;;;;;;CAUH,MAAM,WAAW,SAAyD;EAExE,MAAM,EAAE,WAAW,KAAK,kBAAkB,QAAQ,YAAY;EAG9D,MAAM,OAAO,MAAM,KAAK,gBAAgB,QAAQ,OAAO;AACvD,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,iBAAiB;AAInC,MAAI,QAAQ,UAAU;GACpB,MAAM,eAAe,MAAM,KAAK,gBAAgB,cAAc,QAAQ,SAAS;AAC/E,OAAI,gBAAgB,aAAa,WAAW,YAE1C,QAAO;IACL,YAAY;IACZ,SAAS,aAAa;IACtB,gBAAgB,EAAE;IAClB,eAAe,EAAE;IAClB;;AAKL,MAAI,QAAQ,eAAe,QAAQ,YAAY,SAAS,GAAG;GACzD,MAAM,iBAAiB,MAAM,KAAK,gBAAgB,YAAY,QAAQ,YAAY;GAElF,MAAM,iBAA2B,EAAE;GACnC,MAAM,gBAA0B,EAAE;AAGlC,QAAK,IAAI,QAAQ,GAAG,QAAQ,eAAe,QAAQ,QAEjD,KADe,eAAe,QAClB;AACV,mBAAe,KAAK,MAAM;IAG1B,MAAM,YAAY,QAAQ,YAAY;AACtC,QAAI;AAOF,SAAI,EALsB,MAAM,KAAK,gBAAgB,cAAc,OAAO,EACjC,MACtC,OAAO,GAAG,eAAe,SAAS,GAAG,cAAc,UACrD,EAEoB;AAEnB,YAAM,KAAK,gBAAgB,gBAAgB,QAAQ,WAAW,MAAM;MAGpE,MAAM,QAAQ,MAAM,KAAK,gBAAgB,SAAS,UAAU;AAC5D,UAAI,MACF,OAAM,KAAK,gBAAgB,YAAY,WAAW,MAAM,KAAK;;aAG1D,OAAO;AAEd,aAAQ,MAAM,sDAAsD,MAAM,IAAI,MAAM;;SAGtF,eAAc,KAAK,MAAM;GAM7B,MAAM,kBADa,MAAM,KAAK,gBAAgB,cAAc,OAAO,EACjC;GAClC,MAAM,SACJ,mBAAmB,KAAK,cACpB,cACA,iBAAiB,IACf,cACA;AAER,SAAM,KAAK,gBAAgB,WAAW,QAAQ;IAC5C;IACA;IACD,CAAC;AAEF,UAAO;IACL,YAAY;IACZ;IACA;IACD;;AAGH,SAAO;GACL,YAAY;GACZ,gBAAgB,EAAE;GAClB,eAAe,EAAE;GAClB;;;;;;;;CASH,MAAM,YAAY,SAA2D;EAE3E,MAAM,EAAE,WAAW,KAAK,kBAAkB,QAAQ,YAAY;EAG9D,MAAM,OAAO,MAAM,KAAK,gBAAgB,QAAQ,OAAO;AACvD,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,iBAAiB;EAInC,IAAI;AACJ,MAAI,OAAO,SAAS,QAAQ,MAAM,CAChC,eAAc,QAAQ;OACjB;GAEL,MAAM,cAAc,MAAM,QAAQ,MAAM,aAAa;AACrD,iBAAc,OAAO,KAAK,YAAY;;AAQxC,OAJkB,MAAM,OAAO,cAAc,QAGb,YAAY,KAAK,YAAY,KACtC,QAAQ,UAC7B,OAAM,IAAI,MAAM,sBAAsB;AAMxC,MAAI,CAFgB,MAAM,KAAK,eAAe,YAAY,QAAQ,UAAU,CAI1E,OAAM,KAAK,eAAe,UAAU,QAAQ,WAAW,YAAY;AAIrE,QAAM,KAAK,gBAAgB,YAAY,QAAQ,WAAW,YAAY,OAAO;AAG7E,QAAM,KAAK,gBAAgB,gBAAgB,QAAQ,QAAQ,WAAW,QAAQ,WAAW;EAIzF,MAAM,kBADa,MAAM,KAAK,gBAAgB,cAAc,OAAO,EACjC;EAClC,MAAM,SAAS,mBAAmB,KAAK,cAAc,cAAc;AAEnE,QAAM,KAAK,gBAAgB,WAAW,QAAQ;GAC5C;GACA;GACD,CAAC;AAEF,SAAO;GACL,SAAS;GACT,WAAW,QAAQ;GACpB;;;;;;;;CASH,MAAM,UAAU,SAAuD;EAErE,MAAM,EAAE,WAAW,KAAK,kBAAkB,QAAQ,YAAY;EAG9D,MAAM,OAAO,MAAM,KAAK,gBAAgB,QAAQ,OAAO;AACvD,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,iBAAiB;AAInC,MAAI,KAAK,mBAAmB,KAAK,YAC/B,OAAM,IAAI,MAAM,4BAA4B,KAAK,eAAe,GAAG,KAAK,cAAc;EAIxF,MAAM,MAAM,iBAAiB;AAG7B,QAAM,KAAK,gBAAgB,WAAW,QAAQ;GAC5C,UAAU,QAAQ;GAClB,QAAQ;GACR,6BAAa,IAAI,MAAM;GACvB;GACD,CAAC;AAEF,SAAO;GACL,SAAS;GACT,SAAS;GACT;GACD;;;;;;;CAQH,MAAM,gBAAgB,QAA8C;AAClE,SAAO,MAAM,KAAK,gBAAgB,QAAQ,OAAO;;;;;;;;CASnD,MAAM,cACJ,QACA,OACmF;EAEnF,MAAM,OAAO,MAAM,KAAK,gBAAgB,QAAQ,OAAO;AACvD,MAAI,CAAC,QAAQ,KAAK,WAAW,YAC3B,QAAO;EAIT,MAAM,cAAc,MAAM,KAAK,gBAAgB,mBAAmB,OAAO;EAGzE,MAAM,EAAE,aAAa,MAAM,OAAO;EAClC,MAAM,iBAAiB,KAAK;EAE5B,IAAI,oBAAoB;EACxB,IAAI,YAAY;EAChB,MAAM,YAAY,OAAO,SAAS;EAClC,MAAM,UAAU,OAAO,OAAO,KAAK,OAAO;AAqE1C,SAAO;GACL,QApEa,IAAI,SAAS,EAC1B,MAAM,OAAO;AACX,QAAI;AAEF,YAAO,oBAAoB,YAAY,QAAQ;MAC7C,MAAM,YAAY,YAAY;MAC9B,MAAM,YAAY,MAAM,eAAe,SAAS,UAAU;AAE1D,UAAI,CAAC,WAAW;AACd,YAAK,wBAAQ,IAAI,MAAM,SAAS,UAAU,YAAY,CAAC;AACvD;;MAIF,MAAM,aAAa,oBAAoB,KAAK;MAC5C,MAAM,WAAW,aAAa,UAAU,SAAS;AAGjD,UAAI,WAAW,WAAW;AACxB;AACA;;AAIF,UAAI,aAAa,SAAS;AACxB,YAAK,KAAK,KAAK;AACf;;MAIF,IAAI,aAAa;MACjB,IAAI,WAAW,UAAU;AAEzB,UAAI,aAAa,UACf,cAAa,YAAY;AAG3B,UAAI,WAAW,QACb,YAAW,UAAU,aAAa;MAGpC,MAAM,aAAa,UAAU,MAAM,YAAY,SAAS;AACxD,WAAK,KAAK,WAAW;AAErB,mBAAa,WAAW;AACxB;AAGA,UAAI,aAAa,UAAU,YAAY,GAAG;AACxC,YAAK,KAAK,KAAK;AACf;;AAIF;;AAIF,UAAK,KAAK,KAAK;aACR,OAAO;AACd,UAAK,QAAQ,MAAe;;MAGjC,CAAC;GAMA,MAJW,QAAQ,UAAU,YAAY,IAAI,KAAK;GAKlD,UAAU,KAAK;GAChB"}
@@ -0,0 +1,20 @@
1
+ import { Readable } from "stream";
2
+ import type { StorageAdapter } from "./storage-adapter";
3
+ export interface LocalStorageAdapterOptions {
4
+ baseDir?: string;
5
+ }
6
+ export declare class LocalStorageAdapter implements StorageAdapter {
7
+ private baseDir;
8
+ private initialized;
9
+ constructor(options?: LocalStorageAdapterOptions);
10
+ initialize(): Promise<void>;
11
+ private getChunkPath;
12
+ saveChunk(chunkHash: string, data: Buffer): Promise<void>;
13
+ getChunk(chunkHash: string): Promise<Buffer | null>;
14
+ chunkExists(chunkHash: string): Promise<boolean>;
15
+ chunksExist(chunkHashes: string[]): Promise<boolean[]>;
16
+ getChunkStream(chunkHash: string): Promise<Readable | null>;
17
+ deleteChunk(chunkHash: string): Promise<void>;
18
+ cleanup(): Promise<void>;
19
+ }
20
+ //# sourceMappingURL=local-storage-adapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"local-storage-adapter.d.ts","sourceRoot":"","sources":["../src/local-storage-adapter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAClC,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAKxD,MAAM,WAAW,0BAA0B;IAKzC,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAUD,qBAAa,mBAAoB,YAAW,cAAc;IACxD,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,WAAW,CAAS;gBAEhB,OAAO,GAAE,0BAA+B;IAO9C,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAiBjC,OAAO,CAAC,YAAY;IAYd,SAAS,CAAC,SAAS,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAsBzD,QAAQ,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IAqBnD,WAAW,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAkBhD,WAAW,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;IAatD,cAAc,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,QAAQ,GAAG,IAAI,CAAC;IAyB3D,WAAW,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAqB7C,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;CAI/B"}
@@ -0,0 +1,25 @@
1
+ import type { DatabaseAdapter, FileMetadata, ChunkEntity, FileChunkEntity, CreateFileOptions, UpdateFileOptions } from "./database-adapter";
2
+ export declare class MemoryDatabaseAdapter implements DatabaseAdapter {
3
+ private files;
4
+ private chunks;
5
+ private fileChunks;
6
+ initialize(): Promise<void>;
7
+ createFile(fileId: string, options: CreateFileOptions): Promise<FileMetadata>;
8
+ getFile(fileId: string): Promise<FileMetadata | null>;
9
+ getFileByHash(fileHash: string): Promise<FileMetadata | null>;
10
+ getFileByToken(uploadToken: string): Promise<FileMetadata | null>;
11
+ updateFile(fileId: string, options: UpdateFileOptions): Promise<FileMetadata>;
12
+ deleteFile(fileId: string): Promise<void>;
13
+ upsertChunk(chunkHash: string, size: number): Promise<ChunkEntity>;
14
+ getChunk(chunkHash: string): Promise<ChunkEntity | null>;
15
+ chunkExists(chunkHash: string): Promise<boolean>;
16
+ chunksExist(chunkHashes: string[]): Promise<boolean[]>;
17
+ decrementChunkRef(chunkHash: string): Promise<void>;
18
+ createFileChunk(fileId: string, chunkHash: string, chunkIndex: number): Promise<FileChunkEntity>;
19
+ getFileChunks(fileId: string): Promise<FileChunkEntity[]>;
20
+ getFileChunkHashes(fileId: string): Promise<string[]>;
21
+ deleteFileChunks(fileId: string): Promise<void>;
22
+ transaction<T>(callback: () => Promise<T>): Promise<T>;
23
+ cleanup(): Promise<void>;
24
+ }
25
+ //# sourceMappingURL=memory-database-adapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"memory-database-adapter.d.ts","sourceRoot":"","sources":["../src/memory-database-adapter.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,eAAe,EACf,YAAY,EACZ,WAAW,EACX,eAAe,EACf,iBAAiB,EACjB,iBAAiB,EAClB,MAAM,oBAAoB,CAAC;AAU5B,qBAAa,qBAAsB,YAAW,eAAe;IAC3D,OAAO,CAAC,KAAK,CAAwC;IACrD,OAAO,CAAC,MAAM,CAAuC;IACrD,OAAO,CAAC,UAAU,CAA6C;IAEzD,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAI3B,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,YAAY,CAAC;IAyB7E,OAAO,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC;IAKrD,aAAa,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC;IAS7D,cAAc,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC;IASjE,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,YAAY,CAAC;IAgB7E,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIzC,WAAW,CAAC,SAAS,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC;IAyBlE,QAAQ,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC;IAKxD,WAAW,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAIhD,WAAW,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;IAItD,iBAAiB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAmBnD,eAAe,CACnB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,MAAM,EACjB,UAAU,EAAE,MAAM,GACjB,OAAO,CAAC,eAAe,CAAC;IAerB,aAAa,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;IASzD,kBAAkB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAKrD,gBAAgB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAI/C,WAAW,CAAC,CAAC,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;IAMtD,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;CAK/B"}