@componentor/fs 1.1.7 → 1.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +123 -27
- package/dist/index.js.map +1 -1
- package/dist/opfs-hybrid.js +123 -27
- package/dist/opfs-hybrid.js.map +1 -1
- package/dist/opfs-worker.js +123 -27
- package/dist/opfs-worker.js.map +1 -1
- package/package.json +4 -4
- package/src/index.ts +3 -3
- package/src/packed-storage.ts +170 -33
- package/src/types.ts +4 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@componentor/fs",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.8",
|
|
4
4
|
"description": "A blazing-fast, Node.js-compatible filesystem for the browser using OPFS",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -29,7 +29,7 @@
|
|
|
29
29
|
],
|
|
30
30
|
"repository": {
|
|
31
31
|
"type": "git",
|
|
32
|
-
"url": "https://github.com/componentor/
|
|
32
|
+
"url": "https://github.com/componentor/fs.git"
|
|
33
33
|
},
|
|
34
34
|
"author": "Componentor",
|
|
35
35
|
"license": "MIT",
|
|
@@ -51,9 +51,9 @@
|
|
|
51
51
|
"typescript"
|
|
52
52
|
],
|
|
53
53
|
"bugs": {
|
|
54
|
-
"url": "https://github.com/componentor/
|
|
54
|
+
"url": "https://github.com/componentor/fs/issues"
|
|
55
55
|
},
|
|
56
|
-
"homepage": "https://github.com/componentor/
|
|
56
|
+
"homepage": "https://github.com/componentor/fs#readme",
|
|
57
57
|
"devDependencies": {
|
|
58
58
|
"@isomorphic-git/lightning-fs": "^4.6.2",
|
|
59
59
|
"@types/node": "^25.0.3",
|
package/src/index.ts
CHANGED
|
@@ -68,7 +68,7 @@ export default class OPFS {
|
|
|
68
68
|
public readonly constants = constants
|
|
69
69
|
|
|
70
70
|
constructor(options: OPFSExtendedOptions = {}) {
|
|
71
|
-
const { useSync = true, verbose = false, workerUrl, read, write } = options
|
|
71
|
+
const { useSync = true, verbose = false, useCompression = false, useChecksum = true, workerUrl, read, write } = options
|
|
72
72
|
this.verbose = verbose
|
|
73
73
|
|
|
74
74
|
// If workerUrl is provided, use hybrid mode
|
|
@@ -83,13 +83,13 @@ export default class OPFS {
|
|
|
83
83
|
this.useSync = false
|
|
84
84
|
this.handleManager = new HandleManager()
|
|
85
85
|
this.symlinkManager = new SymlinkManager(this.handleManager, false)
|
|
86
|
-
this.packedStorage = new PackedStorage(this.handleManager, false)
|
|
86
|
+
this.packedStorage = new PackedStorage(this.handleManager, false, useCompression, useChecksum)
|
|
87
87
|
} else {
|
|
88
88
|
this.useSync = useSync && typeof FileSystemFileHandle !== 'undefined' &&
|
|
89
89
|
'createSyncAccessHandle' in FileSystemFileHandle.prototype
|
|
90
90
|
this.handleManager = new HandleManager()
|
|
91
91
|
this.symlinkManager = new SymlinkManager(this.handleManager, this.useSync)
|
|
92
|
-
this.packedStorage = new PackedStorage(this.handleManager, this.useSync)
|
|
92
|
+
this.packedStorage = new PackedStorage(this.handleManager, this.useSync, useCompression, useChecksum)
|
|
93
93
|
}
|
|
94
94
|
}
|
|
95
95
|
|
package/src/packed-storage.ts
CHANGED
|
@@ -8,14 +8,84 @@
|
|
|
8
8
|
* [index length: 4 bytes][CRC32: 4 bytes][JSON index][file data...]
|
|
9
9
|
*
|
|
10
10
|
* Index format:
|
|
11
|
-
* { "path": { offset: number, size: number }, ... }
|
|
11
|
+
* { "path": { offset: number, size: number, originalSize?: number }, ... }
|
|
12
12
|
*
|
|
13
|
+
* When originalSize is present, data is compressed (size = compressed, originalSize = uncompressed)
|
|
13
14
|
* CRC32 is calculated over [JSON index][file data...] for integrity verification.
|
|
14
15
|
*/
|
|
15
16
|
|
|
16
17
|
import type { HandleManager } from './handle-manager.js'
|
|
17
18
|
import { createECORRUPTED } from './errors.js'
|
|
18
19
|
|
|
20
|
+
// ============ Compression ============
|
|
21
|
+
// Uses browser's native CompressionStream API
|
|
22
|
+
|
|
23
|
+
async function compress(data: Uint8Array): Promise<Uint8Array> {
|
|
24
|
+
// Skip compression for small data (overhead not worth it)
|
|
25
|
+
if (data.length < 100) return data
|
|
26
|
+
|
|
27
|
+
try {
|
|
28
|
+
const stream = new CompressionStream('gzip')
|
|
29
|
+
const writer = stream.writable.getWriter()
|
|
30
|
+
writer.write(data)
|
|
31
|
+
writer.close()
|
|
32
|
+
|
|
33
|
+
const chunks: Uint8Array[] = []
|
|
34
|
+
const reader = stream.readable.getReader()
|
|
35
|
+
let totalSize = 0
|
|
36
|
+
|
|
37
|
+
while (true) {
|
|
38
|
+
const { done, value } = await reader.read()
|
|
39
|
+
if (done) break
|
|
40
|
+
chunks.push(value)
|
|
41
|
+
totalSize += value.length
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Only use compressed if it's actually smaller
|
|
45
|
+
if (totalSize >= data.length) return data
|
|
46
|
+
|
|
47
|
+
const result = new Uint8Array(totalSize)
|
|
48
|
+
let offset = 0
|
|
49
|
+
for (const chunk of chunks) {
|
|
50
|
+
result.set(chunk, offset)
|
|
51
|
+
offset += chunk.length
|
|
52
|
+
}
|
|
53
|
+
return result
|
|
54
|
+
} catch {
|
|
55
|
+
// Compression not available, return original
|
|
56
|
+
return data
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function decompress(data: Uint8Array): Promise<Uint8Array> {
|
|
61
|
+
// Decompression MUST succeed for compressed data - throw on failure
|
|
62
|
+
const stream = new DecompressionStream('gzip')
|
|
63
|
+
const writer = stream.writable.getWriter()
|
|
64
|
+
writer.write(data)
|
|
65
|
+
writer.close()
|
|
66
|
+
|
|
67
|
+
const chunks: Uint8Array[] = []
|
|
68
|
+
const reader = stream.readable.getReader()
|
|
69
|
+
let totalSize = 0
|
|
70
|
+
|
|
71
|
+
while (true) {
|
|
72
|
+
const { done, value } = await reader.read()
|
|
73
|
+
if (done) break
|
|
74
|
+
chunks.push(value)
|
|
75
|
+
totalSize += value.length
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const result = new Uint8Array(totalSize)
|
|
79
|
+
let offset = 0
|
|
80
|
+
for (const chunk of chunks) {
|
|
81
|
+
result.set(chunk, offset)
|
|
82
|
+
offset += chunk.length
|
|
83
|
+
}
|
|
84
|
+
return result
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// ============ CRC32 ============
|
|
88
|
+
|
|
19
89
|
// CRC32 lookup table (pre-computed for performance)
|
|
20
90
|
const CRC32_TABLE = new Uint32Array(256)
|
|
21
91
|
for (let i = 0; i < 256; i++) {
|
|
@@ -37,8 +107,16 @@ function crc32(data: Uint8Array): number {
|
|
|
37
107
|
return (crc ^ 0xffffffff) >>> 0
|
|
38
108
|
}
|
|
39
109
|
|
|
110
|
+
// ============ Types ============
|
|
111
|
+
|
|
112
|
+
interface PackIndexEntry {
|
|
113
|
+
offset: number
|
|
114
|
+
size: number
|
|
115
|
+
originalSize?: number // Present if compressed
|
|
116
|
+
}
|
|
117
|
+
|
|
40
118
|
interface PackIndex {
|
|
41
|
-
[path: string]:
|
|
119
|
+
[path: string]: PackIndexEntry
|
|
42
120
|
}
|
|
43
121
|
|
|
44
122
|
const PACK_FILE = '/.opfs-pack'
|
|
@@ -46,12 +124,17 @@ const PACK_FILE = '/.opfs-pack'
|
|
|
46
124
|
export class PackedStorage {
|
|
47
125
|
private handleManager: HandleManager
|
|
48
126
|
private useSync: boolean
|
|
127
|
+
private useCompression: boolean
|
|
128
|
+
private useChecksum: boolean
|
|
49
129
|
private index: PackIndex | null = null
|
|
50
130
|
private indexLoaded = false
|
|
51
131
|
|
|
52
|
-
constructor(handleManager: HandleManager, useSync: boolean) {
|
|
132
|
+
constructor(handleManager: HandleManager, useSync: boolean, useCompression = false, useChecksum = true) {
|
|
53
133
|
this.handleManager = handleManager
|
|
54
134
|
this.useSync = useSync
|
|
135
|
+
// Only enable compression if API is available
|
|
136
|
+
this.useCompression = useCompression && typeof CompressionStream !== 'undefined'
|
|
137
|
+
this.useChecksum = useChecksum
|
|
55
138
|
}
|
|
56
139
|
|
|
57
140
|
/**
|
|
@@ -110,10 +193,12 @@ export class PackedStorage {
|
|
|
110
193
|
access.read(content, { at: 8 })
|
|
111
194
|
access.close()
|
|
112
195
|
|
|
113
|
-
// Verify CRC32
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
196
|
+
// Verify CRC32 if enabled
|
|
197
|
+
if (this.useChecksum && storedCrc !== 0) {
|
|
198
|
+
const calculatedCrc = crc32(content)
|
|
199
|
+
if (calculatedCrc !== storedCrc) {
|
|
200
|
+
throw createECORRUPTED(PACK_FILE)
|
|
201
|
+
}
|
|
117
202
|
}
|
|
118
203
|
|
|
119
204
|
// Parse index from content
|
|
@@ -130,11 +215,13 @@ export class PackedStorage {
|
|
|
130
215
|
const indexLen = view.getUint32(0, true)
|
|
131
216
|
const storedCrc = view.getUint32(4, true)
|
|
132
217
|
|
|
133
|
-
// Verify CRC32 over content (everything after header)
|
|
218
|
+
// Verify CRC32 over content (everything after header) if enabled
|
|
134
219
|
const content = data.subarray(8)
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
220
|
+
if (this.useChecksum && storedCrc !== 0) {
|
|
221
|
+
const calculatedCrc = crc32(content)
|
|
222
|
+
if (calculatedCrc !== storedCrc) {
|
|
223
|
+
throw createECORRUPTED(PACK_FILE)
|
|
224
|
+
}
|
|
138
225
|
}
|
|
139
226
|
|
|
140
227
|
const indexJson = new TextDecoder().decode(content.subarray(0, indexLen))
|
|
@@ -155,15 +242,18 @@ export class PackedStorage {
|
|
|
155
242
|
|
|
156
243
|
/**
|
|
157
244
|
* Get file size from pack (for stat)
|
|
245
|
+
* Returns originalSize if compressed, otherwise size
|
|
158
246
|
*/
|
|
159
247
|
async getSize(path: string): Promise<number | null> {
|
|
160
248
|
const index = await this.loadIndex()
|
|
161
249
|
const entry = index[path]
|
|
162
|
-
|
|
250
|
+
if (!entry) return null
|
|
251
|
+
return entry.originalSize ?? entry.size
|
|
163
252
|
}
|
|
164
253
|
|
|
165
254
|
/**
|
|
166
255
|
* Read a file from the pack
|
|
256
|
+
* Handles decompression if file was stored compressed
|
|
167
257
|
*/
|
|
168
258
|
async read(path: string): Promise<Uint8Array | null> {
|
|
169
259
|
const index = await this.loadIndex()
|
|
@@ -173,16 +263,22 @@ export class PackedStorage {
|
|
|
173
263
|
const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
|
|
174
264
|
if (!fileHandle) return null
|
|
175
265
|
|
|
176
|
-
|
|
266
|
+
let buffer: Uint8Array
|
|
177
267
|
|
|
178
268
|
if (this.useSync) {
|
|
179
269
|
const access = await fileHandle.createSyncAccessHandle()
|
|
270
|
+
buffer = new Uint8Array(entry.size)
|
|
180
271
|
access.read(buffer, { at: entry.offset })
|
|
181
272
|
access.close()
|
|
182
273
|
} else {
|
|
183
274
|
const file = await fileHandle.getFile()
|
|
184
275
|
const data = new Uint8Array(await file.arrayBuffer())
|
|
185
|
-
buffer
|
|
276
|
+
buffer = data.slice(entry.offset, entry.offset + entry.size)
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// Decompress if needed
|
|
280
|
+
if (entry.originalSize !== undefined) {
|
|
281
|
+
return decompress(buffer)
|
|
186
282
|
}
|
|
187
283
|
|
|
188
284
|
return buffer
|
|
@@ -191,6 +287,7 @@ export class PackedStorage {
|
|
|
191
287
|
/**
|
|
192
288
|
* Read multiple files from the pack in a single operation
|
|
193
289
|
* Loads index once, reads all data in parallel
|
|
290
|
+
* Handles decompression if files were stored compressed
|
|
194
291
|
*/
|
|
195
292
|
async readBatch(paths: string[]): Promise<Map<string, Uint8Array | null>> {
|
|
196
293
|
const results = new Map<string, Uint8Array | null>()
|
|
@@ -199,11 +296,11 @@ export class PackedStorage {
|
|
|
199
296
|
const index = await this.loadIndex()
|
|
200
297
|
|
|
201
298
|
// Find which paths are in the pack
|
|
202
|
-
const toRead: Array<{ path: string; offset: number; size: number }> = []
|
|
299
|
+
const toRead: Array<{ path: string; offset: number; size: number; originalSize?: number }> = []
|
|
203
300
|
for (const path of paths) {
|
|
204
301
|
const entry = index[path]
|
|
205
302
|
if (entry) {
|
|
206
|
-
toRead.push({ path, offset: entry.offset, size: entry.size })
|
|
303
|
+
toRead.push({ path, offset: entry.offset, size: entry.size, originalSize: entry.originalSize })
|
|
207
304
|
} else {
|
|
208
305
|
results.set(path, null)
|
|
209
306
|
}
|
|
@@ -219,24 +316,42 @@ export class PackedStorage {
|
|
|
219
316
|
return results
|
|
220
317
|
}
|
|
221
318
|
|
|
319
|
+
// Read all files
|
|
320
|
+
const decompressPromises: Array<{ path: string; promise: Promise<Uint8Array> }> = []
|
|
321
|
+
|
|
222
322
|
if (this.useSync) {
|
|
223
323
|
const access = await fileHandle.createSyncAccessHandle()
|
|
224
|
-
for (const { path, offset, size } of toRead) {
|
|
324
|
+
for (const { path, offset, size, originalSize } of toRead) {
|
|
225
325
|
const buffer = new Uint8Array(size)
|
|
226
326
|
access.read(buffer, { at: offset })
|
|
227
|
-
|
|
327
|
+
|
|
328
|
+
if (originalSize !== undefined) {
|
|
329
|
+
// Queue for decompression
|
|
330
|
+
decompressPromises.push({ path, promise: decompress(buffer) })
|
|
331
|
+
} else {
|
|
332
|
+
results.set(path, buffer)
|
|
333
|
+
}
|
|
228
334
|
}
|
|
229
335
|
access.close()
|
|
230
336
|
} else {
|
|
231
337
|
const file = await fileHandle.getFile()
|
|
232
338
|
const data = new Uint8Array(await file.arrayBuffer())
|
|
233
|
-
for (const { path, offset, size } of toRead) {
|
|
234
|
-
const buffer =
|
|
235
|
-
|
|
236
|
-
|
|
339
|
+
for (const { path, offset, size, originalSize } of toRead) {
|
|
340
|
+
const buffer = data.slice(offset, offset + size)
|
|
341
|
+
|
|
342
|
+
if (originalSize !== undefined) {
|
|
343
|
+
decompressPromises.push({ path, promise: decompress(buffer) })
|
|
344
|
+
} else {
|
|
345
|
+
results.set(path, buffer)
|
|
346
|
+
}
|
|
237
347
|
}
|
|
238
348
|
}
|
|
239
349
|
|
|
350
|
+
// Wait for all decompressions
|
|
351
|
+
for (const { path, promise } of decompressPromises) {
|
|
352
|
+
results.set(path, await promise)
|
|
353
|
+
}
|
|
354
|
+
|
|
240
355
|
return results
|
|
241
356
|
}
|
|
242
357
|
|
|
@@ -244,6 +359,7 @@ export class PackedStorage {
|
|
|
244
359
|
* Write multiple files to the pack in a single operation
|
|
245
360
|
* This is the key optimization - 100 files become 1 write!
|
|
246
361
|
* Includes CRC32 checksum for integrity verification.
|
|
362
|
+
* Optionally compresses data for smaller storage.
|
|
247
363
|
* Note: This replaces the entire pack with the new entries
|
|
248
364
|
*/
|
|
249
365
|
async writeBatch(entries: Array<{ path: string; data: Uint8Array }>): Promise<void> {
|
|
@@ -251,9 +367,26 @@ export class PackedStorage {
|
|
|
251
367
|
|
|
252
368
|
const encoder = new TextEncoder()
|
|
253
369
|
|
|
254
|
-
//
|
|
370
|
+
// Compress data if enabled
|
|
371
|
+
let processedEntries: Array<{ path: string; data: Uint8Array; originalSize?: number }>
|
|
372
|
+
if (this.useCompression) {
|
|
373
|
+
processedEntries = await Promise.all(
|
|
374
|
+
entries.map(async ({ path, data }) => {
|
|
375
|
+
const compressed = await compress(data)
|
|
376
|
+
// Only use compressed if it's actually smaller
|
|
377
|
+
if (compressed.length < data.length) {
|
|
378
|
+
return { path, data: compressed, originalSize: data.length }
|
|
379
|
+
}
|
|
380
|
+
return { path, data }
|
|
381
|
+
})
|
|
382
|
+
)
|
|
383
|
+
} else {
|
|
384
|
+
processedEntries = entries
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
// Calculate total data size (using compressed sizes where applicable)
|
|
255
388
|
let totalDataSize = 0
|
|
256
|
-
for (const { data } of
|
|
389
|
+
for (const { data } of processedEntries) {
|
|
257
390
|
totalDataSize += data.length
|
|
258
391
|
}
|
|
259
392
|
|
|
@@ -269,8 +402,12 @@ export class PackedStorage {
|
|
|
269
402
|
prevHeaderSize = headerSize
|
|
270
403
|
|
|
271
404
|
let currentOffset = headerSize
|
|
272
|
-
for (const { path, data } of
|
|
273
|
-
|
|
405
|
+
for (const { path, data, originalSize } of processedEntries) {
|
|
406
|
+
const entry: PackIndexEntry = { offset: currentOffset, size: data.length }
|
|
407
|
+
if (originalSize !== undefined) {
|
|
408
|
+
entry.originalSize = originalSize
|
|
409
|
+
}
|
|
410
|
+
newIndex[path] = entry
|
|
274
411
|
currentOffset += data.length
|
|
275
412
|
}
|
|
276
413
|
|
|
@@ -288,14 +425,14 @@ export class PackedStorage {
|
|
|
288
425
|
packBuffer.set(finalIndexBuf, 8)
|
|
289
426
|
|
|
290
427
|
// Write data at correct offsets
|
|
291
|
-
for (const { path, data } of
|
|
428
|
+
for (const { path, data } of processedEntries) {
|
|
292
429
|
const entry = newIndex[path]
|
|
293
430
|
packBuffer.set(data, entry.offset)
|
|
294
431
|
}
|
|
295
432
|
|
|
296
|
-
// Calculate CRC32 over content (index + data, everything after header)
|
|
433
|
+
// Calculate CRC32 over content (index + data, everything after header) if enabled
|
|
297
434
|
const content = packBuffer.subarray(8)
|
|
298
|
-
const checksum = crc32(content)
|
|
435
|
+
const checksum = this.useChecksum ? crc32(content) : 0
|
|
299
436
|
|
|
300
437
|
// Write header (index length + CRC32)
|
|
301
438
|
view.setUint32(0, finalIndexBuf.length, true)
|
|
@@ -365,8 +502,8 @@ export class PackedStorage {
|
|
|
365
502
|
newContent.set(dataPortion, newIndexBuf.length)
|
|
366
503
|
}
|
|
367
504
|
|
|
368
|
-
// Calculate new CRC32
|
|
369
|
-
const checksum = crc32(newContent)
|
|
505
|
+
// Calculate new CRC32 if enabled
|
|
506
|
+
const checksum = this.useChecksum ? crc32(newContent) : 0
|
|
370
507
|
|
|
371
508
|
// Build new header
|
|
372
509
|
const newHeader = new Uint8Array(8)
|
|
@@ -398,8 +535,8 @@ export class PackedStorage {
|
|
|
398
535
|
newContent.set(newIndexBuf, 0)
|
|
399
536
|
newContent.set(dataPortion, newIndexBuf.length)
|
|
400
537
|
|
|
401
|
-
// Calculate CRC32
|
|
402
|
-
const checksum = crc32(newContent)
|
|
538
|
+
// Calculate CRC32 if enabled
|
|
539
|
+
const checksum = this.useChecksum ? crc32(newContent) : 0
|
|
403
540
|
|
|
404
541
|
// Build new file
|
|
405
542
|
const newFile = new Uint8Array(8 + newContent.length)
|
package/src/types.ts
CHANGED
|
@@ -30,6 +30,10 @@ export interface OPFSOptions {
|
|
|
30
30
|
useSync?: boolean
|
|
31
31
|
/** Enable verbose logging (default: false) */
|
|
32
32
|
verbose?: boolean
|
|
33
|
+
/** Enable compression for batch writes (default: false) */
|
|
34
|
+
useCompression?: boolean
|
|
35
|
+
/** Enable CRC32 checksum for batch writes (default: true) */
|
|
36
|
+
useChecksum?: boolean
|
|
33
37
|
}
|
|
34
38
|
|
|
35
39
|
/**
|