@componentor/fs 1.2.7 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,604 +0,0 @@
1
- /**
2
- * Packed Storage - Stores multiple files in a single OPFS file
3
- *
4
- * Instead of creating 100 separate files (100 OPFS API calls),
5
- * we write all data to one pack file with an index (1-2 API calls).
6
- *
7
- * Format:
8
- * [index length: 4 bytes][CRC32: 4 bytes][JSON index][file data...]
9
- *
10
- * Index format:
11
- * { "path": { offset: number, size: number, originalSize?: number }, ... }
12
- *
13
- * When originalSize is present, data is compressed (size = compressed, originalSize = uncompressed)
14
- * CRC32 is calculated over [JSON index][file data...] for integrity verification.
15
- */
16
-
17
- import type { HandleManager } from './handle-manager.js'
18
- import { fileLock } from './handle-manager.js'
19
- import { createECORRUPTED } from './errors.js'
20
-
21
- // ============ Compression ============
22
- // Uses browser's native CompressionStream API
23
-
24
- async function compress(data: Uint8Array): Promise<Uint8Array> {
25
- // Skip compression for small data (overhead not worth it)
26
- if (data.length < 100) return data
27
-
28
- try {
29
- const stream = new CompressionStream('gzip')
30
- const writer = stream.writable.getWriter()
31
- writer.write(data)
32
- writer.close()
33
-
34
- const chunks: Uint8Array[] = []
35
- const reader = stream.readable.getReader()
36
- let totalSize = 0
37
-
38
- while (true) {
39
- const { done, value } = await reader.read()
40
- if (done) break
41
- chunks.push(value)
42
- totalSize += value.length
43
- }
44
-
45
- // Only use compressed if it's actually smaller
46
- if (totalSize >= data.length) return data
47
-
48
- const result = new Uint8Array(totalSize)
49
- let offset = 0
50
- for (const chunk of chunks) {
51
- result.set(chunk, offset)
52
- offset += chunk.length
53
- }
54
- return result
55
- } catch {
56
- // Compression not available, return original
57
- return data
58
- }
59
- }
60
-
61
- async function decompress(data: Uint8Array): Promise<Uint8Array> {
62
- // Decompression MUST succeed for compressed data - throw on failure
63
- const stream = new DecompressionStream('gzip')
64
- const writer = stream.writable.getWriter()
65
- writer.write(data)
66
- writer.close()
67
-
68
- const chunks: Uint8Array[] = []
69
- const reader = stream.readable.getReader()
70
- let totalSize = 0
71
-
72
- while (true) {
73
- const { done, value } = await reader.read()
74
- if (done) break
75
- chunks.push(value)
76
- totalSize += value.length
77
- }
78
-
79
- const result = new Uint8Array(totalSize)
80
- let offset = 0
81
- for (const chunk of chunks) {
82
- result.set(chunk, offset)
83
- offset += chunk.length
84
- }
85
- return result
86
- }
87
-
88
- // ============ CRC32 ============
89
-
90
- // CRC32 lookup table (pre-computed for performance)
91
- const CRC32_TABLE = new Uint32Array(256)
92
- for (let i = 0; i < 256; i++) {
93
- let c = i
94
- for (let j = 0; j < 8; j++) {
95
- c = c & 1 ? 0xedb88320 ^ (c >>> 1) : c >>> 1
96
- }
97
- CRC32_TABLE[i] = c
98
- }
99
-
100
- /**
101
- * Calculate CRC32 checksum of data
102
- */
103
- function crc32(data: Uint8Array): number {
104
- let crc = 0xffffffff
105
- for (let i = 0; i < data.length; i++) {
106
- crc = CRC32_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8)
107
- }
108
- return (crc ^ 0xffffffff) >>> 0
109
- }
110
-
111
- // ============ Types ============
112
-
113
- interface PackIndexEntry {
114
- offset: number
115
- size: number
116
- originalSize?: number // Present if compressed
117
- }
118
-
119
- interface PackIndex {
120
- [path: string]: PackIndexEntry
121
- }
122
-
123
- const PACK_FILE = '/.opfs-pack'
124
-
125
- export class PackedStorage {
126
- private handleManager: HandleManager
127
- private useSync: boolean
128
- private useCompression: boolean
129
- private useChecksum: boolean
130
- private index: PackIndex | null = null
131
- private indexLoaded = false
132
-
133
- constructor(handleManager: HandleManager, useSync: boolean, useCompression = false, useChecksum = true) {
134
- this.handleManager = handleManager
135
- this.useSync = useSync
136
- // Only enable compression if API is available
137
- this.useCompression = useCompression && typeof CompressionStream !== 'undefined'
138
- this.useChecksum = useChecksum
139
- }
140
-
141
- /**
142
- * Reset pack storage state (memory only)
143
- */
144
- reset(): void {
145
- this.index = null
146
- this.indexLoaded = false
147
- }
148
-
149
- /**
150
- * Clear pack storage completely (deletes pack file from disk)
151
- */
152
- async clear(): Promise<void> {
153
- this.index = null
154
- this.indexLoaded = false
155
-
156
- try {
157
- const root = await this.handleManager.getRoot()
158
- await root.removeEntry(PACK_FILE.replace(/^\//, ''))
159
- } catch {
160
- // Pack file doesn't exist, that's fine
161
- }
162
- }
163
-
164
- /**
165
- * Load pack index from disk (always reloads to support hybrid mode)
166
- * Verifies CRC32 checksum for integrity
167
- * Note: Caller must hold the lock
168
- */
169
- private async loadIndex(): Promise<PackIndex> {
170
- try {
171
- const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
172
- if (!fileHandle) {
173
- return {}
174
- }
175
-
176
- if (this.useSync) {
177
- const release = await fileLock.acquire(PACK_FILE)
178
- try {
179
- const access = await fileHandle.createSyncAccessHandle()
180
- try {
181
- const size = access.getSize()
182
- if (size < 8) {
183
- return {}
184
- }
185
-
186
- // Read header: index length + CRC32
187
- const header = new Uint8Array(8)
188
- access.read(header, { at: 0 })
189
- const view = new DataView(header.buffer)
190
- const indexLen = view.getUint32(0, true)
191
- const storedCrc = view.getUint32(4, true)
192
-
193
- // Read everything after header (index + data) for CRC verification
194
- const contentSize = size - 8
195
- const content = new Uint8Array(contentSize)
196
- access.read(content, { at: 8 })
197
-
198
- // Verify CRC32 if enabled
199
- if (this.useChecksum && storedCrc !== 0) {
200
- const calculatedCrc = crc32(content)
201
- if (calculatedCrc !== storedCrc) {
202
- throw createECORRUPTED(PACK_FILE)
203
- }
204
- }
205
-
206
- // Parse index from content
207
- const indexJson = new TextDecoder().decode(content.subarray(0, indexLen))
208
- return JSON.parse(indexJson)
209
- } finally {
210
- access.close()
211
- }
212
- } finally {
213
- release()
214
- }
215
- } else {
216
- const file = await fileHandle.getFile()
217
- const data = new Uint8Array(await file.arrayBuffer())
218
- if (data.length < 8) {
219
- return {}
220
- }
221
-
222
- const view = new DataView(data.buffer)
223
- const indexLen = view.getUint32(0, true)
224
- const storedCrc = view.getUint32(4, true)
225
-
226
- // Verify CRC32 over content (everything after header) if enabled
227
- const content = data.subarray(8)
228
- if (this.useChecksum && storedCrc !== 0) {
229
- const calculatedCrc = crc32(content)
230
- if (calculatedCrc !== storedCrc) {
231
- throw createECORRUPTED(PACK_FILE)
232
- }
233
- }
234
-
235
- const indexJson = new TextDecoder().decode(content.subarray(0, indexLen))
236
- return JSON.parse(indexJson)
237
- }
238
- } catch {
239
- return {}
240
- }
241
- }
242
-
243
- /**
244
- * Check if a path exists in the pack
245
- */
246
- async has(path: string): Promise<boolean> {
247
- const index = await this.loadIndex()
248
- return path in index
249
- }
250
-
251
- /**
252
- * Get file size from pack (for stat)
253
- * Returns originalSize if compressed, otherwise size
254
- */
255
- async getSize(path: string): Promise<number | null> {
256
- const index = await this.loadIndex()
257
- const entry = index[path]
258
- if (!entry) return null
259
- return entry.originalSize ?? entry.size
260
- }
261
-
262
- /**
263
- * Read a file from the pack
264
- * Handles decompression if file was stored compressed
265
- */
266
- async read(path: string): Promise<Uint8Array | null> {
267
- const index = await this.loadIndex()
268
- const entry = index[path]
269
- if (!entry) return null
270
-
271
- const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
272
- if (!fileHandle) return null
273
-
274
- let buffer: Uint8Array
275
-
276
- if (this.useSync) {
277
- const release = await fileLock.acquire(PACK_FILE)
278
- try {
279
- const access = await fileHandle.createSyncAccessHandle()
280
- try {
281
- buffer = new Uint8Array(entry.size)
282
- access.read(buffer, { at: entry.offset })
283
- } finally {
284
- access.close()
285
- }
286
- } finally {
287
- release()
288
- }
289
- } else {
290
- const file = await fileHandle.getFile()
291
- const data = new Uint8Array(await file.arrayBuffer())
292
- buffer = data.slice(entry.offset, entry.offset + entry.size)
293
- }
294
-
295
- // Decompress if needed
296
- if (entry.originalSize !== undefined) {
297
- return decompress(buffer)
298
- }
299
-
300
- return buffer
301
- }
302
-
303
- /**
304
- * Read multiple files from the pack in a single operation
305
- * Loads index once, reads all data in parallel
306
- * Handles decompression if files were stored compressed
307
- */
308
- async readBatch(paths: string[]): Promise<Map<string, Uint8Array | null>> {
309
- const results = new Map<string, Uint8Array | null>()
310
- if (paths.length === 0) return results
311
-
312
- const index = await this.loadIndex()
313
-
314
- // Find which paths are in the pack
315
- const toRead: Array<{ path: string; offset: number; size: number; originalSize?: number }> = []
316
- for (const path of paths) {
317
- const entry = index[path]
318
- if (entry) {
319
- toRead.push({ path, offset: entry.offset, size: entry.size, originalSize: entry.originalSize })
320
- } else {
321
- results.set(path, null)
322
- }
323
- }
324
-
325
- if (toRead.length === 0) return results
326
-
327
- const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
328
- if (!fileHandle) {
329
- for (const { path } of toRead) {
330
- results.set(path, null)
331
- }
332
- return results
333
- }
334
-
335
- // Read all files
336
- const decompressPromises: Array<{ path: string; promise: Promise<Uint8Array> }> = []
337
-
338
- if (this.useSync) {
339
- const release = await fileLock.acquire(PACK_FILE)
340
- try {
341
- const access = await fileHandle.createSyncAccessHandle()
342
- try {
343
- for (const { path, offset, size, originalSize } of toRead) {
344
- const buffer = new Uint8Array(size)
345
- access.read(buffer, { at: offset })
346
-
347
- if (originalSize !== undefined) {
348
- // Queue for decompression
349
- decompressPromises.push({ path, promise: decompress(buffer) })
350
- } else {
351
- results.set(path, buffer)
352
- }
353
- }
354
- } finally {
355
- access.close()
356
- }
357
- } finally {
358
- release()
359
- }
360
- } else {
361
- const file = await fileHandle.getFile()
362
- const data = new Uint8Array(await file.arrayBuffer())
363
- for (const { path, offset, size, originalSize } of toRead) {
364
- const buffer = data.slice(offset, offset + size)
365
-
366
- if (originalSize !== undefined) {
367
- decompressPromises.push({ path, promise: decompress(buffer) })
368
- } else {
369
- results.set(path, buffer)
370
- }
371
- }
372
- }
373
-
374
- // Wait for all decompressions
375
- for (const { path, promise } of decompressPromises) {
376
- results.set(path, await promise)
377
- }
378
-
379
- return results
380
- }
381
-
382
- /**
383
- * Write multiple files to the pack in a single operation
384
- * This is the key optimization - 100 files become 1 write!
385
- * Includes CRC32 checksum for integrity verification.
386
- * Optionally compresses data for smaller storage.
387
- * Note: This replaces the entire pack with the new entries
388
- */
389
- async writeBatch(entries: Array<{ path: string; data: Uint8Array }>): Promise<void> {
390
- if (entries.length === 0) return
391
-
392
- const encoder = new TextEncoder()
393
-
394
- // Compress data if enabled
395
- let processedEntries: Array<{ path: string; data: Uint8Array; originalSize?: number }>
396
- if (this.useCompression) {
397
- processedEntries = await Promise.all(
398
- entries.map(async ({ path, data }) => {
399
- const compressed = await compress(data)
400
- // Only use compressed if it's actually smaller
401
- if (compressed.length < data.length) {
402
- return { path, data: compressed, originalSize: data.length }
403
- }
404
- return { path, data }
405
- })
406
- )
407
- } else {
408
- processedEntries = entries
409
- }
410
-
411
- // Calculate total data size (using compressed sizes where applicable)
412
- let totalDataSize = 0
413
- for (const { data } of processedEntries) {
414
- totalDataSize += data.length
415
- }
416
-
417
- // Build index - iterate until offsets stabilize
418
- // (offset changes -> JSON length changes -> header size changes -> offset changes)
419
- // Header format: [index length: 4][CRC32: 4][JSON index][file data...]
420
- const newIndex: PackIndex = {}
421
- let headerSize = 8 // 4 bytes index length + 4 bytes CRC32
422
- let prevHeaderSize = 0
423
-
424
- // Iterate until stable (usually 2-3 iterations)
425
- while (headerSize !== prevHeaderSize) {
426
- prevHeaderSize = headerSize
427
-
428
- let currentOffset = headerSize
429
- for (const { path, data, originalSize } of processedEntries) {
430
- const entry: PackIndexEntry = { offset: currentOffset, size: data.length }
431
- if (originalSize !== undefined) {
432
- entry.originalSize = originalSize
433
- }
434
- newIndex[path] = entry
435
- currentOffset += data.length
436
- }
437
-
438
- const indexBuf = encoder.encode(JSON.stringify(newIndex))
439
- headerSize = 8 + indexBuf.length
440
- }
441
-
442
- // Build the complete pack file
443
- const finalIndexBuf = encoder.encode(JSON.stringify(newIndex))
444
- const totalSize = headerSize + totalDataSize
445
- const packBuffer = new Uint8Array(totalSize)
446
- const view = new DataView(packBuffer.buffer)
447
-
448
- // Write index JSON at offset 8
449
- packBuffer.set(finalIndexBuf, 8)
450
-
451
- // Write data at correct offsets
452
- for (const { path, data } of processedEntries) {
453
- const entry = newIndex[path]
454
- packBuffer.set(data, entry.offset)
455
- }
456
-
457
- // Calculate CRC32 over content (index + data, everything after header) if enabled
458
- const content = packBuffer.subarray(8)
459
- const checksum = this.useChecksum ? crc32(content) : 0
460
-
461
- // Write header (index length + CRC32)
462
- view.setUint32(0, finalIndexBuf.length, true)
463
- view.setUint32(4, checksum, true)
464
-
465
- await this.writePackFile(packBuffer)
466
- this.index = newIndex
467
- }
468
-
469
- /**
470
- * Write the pack file to OPFS
471
- * Note: Caller must hold the lock
472
- */
473
- private async writePackFile(data: Uint8Array): Promise<void> {
474
- const { fileHandle } = await this.handleManager.getHandle(PACK_FILE, { create: true })
475
- if (!fileHandle) return
476
-
477
- if (this.useSync) {
478
- const release = await fileLock.acquire(PACK_FILE)
479
- try {
480
- const access = await fileHandle.createSyncAccessHandle()
481
- try {
482
- access.truncate(data.length)
483
- access.write(data, { at: 0 })
484
- } finally {
485
- access.close()
486
- }
487
- } finally {
488
- release()
489
- }
490
- } else {
491
- const writable = await fileHandle.createWritable()
492
- await writable.write(data)
493
- await writable.close()
494
- }
495
- }
496
-
497
- /**
498
- * Remove a path from the pack index
499
- * Note: Doesn't reclaim space, just removes from index and recalculates CRC32
500
- */
501
- async remove(path: string): Promise<boolean> {
502
- const index = await this.loadIndex()
503
- if (!(path in index)) return false
504
-
505
- delete index[path]
506
-
507
- const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
508
- if (!fileHandle) return true
509
-
510
- // Need to read existing file to recalculate CRC32
511
- const encoder = new TextEncoder()
512
- const newIndexBuf = encoder.encode(JSON.stringify(index))
513
-
514
- if (this.useSync) {
515
- const release = await fileLock.acquire(PACK_FILE)
516
- try {
517
- const access = await fileHandle.createSyncAccessHandle()
518
- try {
519
- const size = access.getSize()
520
-
521
- // Read old header to get old index length
522
- const oldHeader = new Uint8Array(8)
523
- access.read(oldHeader, { at: 0 })
524
- const oldIndexLen = new DataView(oldHeader.buffer).getUint32(0, true)
525
-
526
- // Read data portion (after old index)
527
- const dataStart = 8 + oldIndexLen
528
- const dataSize = size - dataStart
529
- const dataPortion = new Uint8Array(dataSize)
530
- if (dataSize > 0) {
531
- access.read(dataPortion, { at: dataStart })
532
- }
533
-
534
- // Build new content (new index + data)
535
- const newContent = new Uint8Array(newIndexBuf.length + dataSize)
536
- newContent.set(newIndexBuf, 0)
537
- if (dataSize > 0) {
538
- newContent.set(dataPortion, newIndexBuf.length)
539
- }
540
-
541
- // Calculate new CRC32 if enabled
542
- const checksum = this.useChecksum ? crc32(newContent) : 0
543
-
544
- // Build new header
545
- const newHeader = new Uint8Array(8)
546
- const view = new DataView(newHeader.buffer)
547
- view.setUint32(0, newIndexBuf.length, true)
548
- view.setUint32(4, checksum, true)
549
-
550
- // Write new file
551
- const newFile = new Uint8Array(8 + newContent.length)
552
- newFile.set(newHeader, 0)
553
- newFile.set(newContent, 8)
554
-
555
- access.truncate(newFile.length)
556
- access.write(newFile, { at: 0 })
557
- } finally {
558
- access.close()
559
- }
560
- } finally {
561
- release()
562
- }
563
- } else {
564
- // For non-sync, rewrite the whole file
565
- const file = await fileHandle.getFile()
566
- const oldData = new Uint8Array(await file.arrayBuffer())
567
-
568
- if (oldData.length < 8) return true
569
-
570
- const oldIndexLen = new DataView(oldData.buffer).getUint32(0, true)
571
- const dataStart = 8 + oldIndexLen
572
- const dataPortion = oldData.subarray(dataStart)
573
-
574
- // Build new content
575
- const newContent = new Uint8Array(newIndexBuf.length + dataPortion.length)
576
- newContent.set(newIndexBuf, 0)
577
- newContent.set(dataPortion, newIndexBuf.length)
578
-
579
- // Calculate CRC32 if enabled
580
- const checksum = this.useChecksum ? crc32(newContent) : 0
581
-
582
- // Build new file
583
- const newFile = new Uint8Array(8 + newContent.length)
584
- const view = new DataView(newFile.buffer)
585
- view.setUint32(0, newIndexBuf.length, true)
586
- view.setUint32(4, checksum, true)
587
- newFile.set(newContent, 8)
588
-
589
- const writable = await fileHandle.createWritable()
590
- await writable.write(newFile)
591
- await writable.close()
592
- }
593
-
594
- return true
595
- }
596
-
597
- /**
598
- * Check if pack file is being used (has entries)
599
- */
600
- async isEmpty(): Promise<boolean> {
601
- const index = await this.loadIndex()
602
- return Object.keys(index).length === 0
603
- }
604
- }
package/src/path-utils.ts DELETED
@@ -1,97 +0,0 @@
1
- // Path normalization cache - LRU-style with max size
2
- const normalizeCache = new Map<string, string>()
3
- const CACHE_MAX_SIZE = 1000
4
-
5
- /**
6
- * Normalize a path, handling . and .. components
7
- * Results are cached for performance on repeated calls
8
- */
9
- export function normalize(path: string | undefined | null): string {
10
- if (path === undefined || path === null) {
11
- throw new TypeError('Path cannot be undefined or null')
12
- }
13
-
14
- if (typeof path !== 'string') {
15
- throw new TypeError(`Expected string path, got ${typeof path}`)
16
- }
17
-
18
- if (path === '') {
19
- return '/'
20
- }
21
-
22
- // Check cache first
23
- const cached = normalizeCache.get(path)
24
- if (cached !== undefined) {
25
- return cached
26
- }
27
-
28
- const parts = path.split('/')
29
- const stack: string[] = []
30
-
31
- for (const part of parts) {
32
- if (part === '' || part === '.') {
33
- continue
34
- } else if (part === '..') {
35
- if (stack.length > 0) stack.pop()
36
- } else {
37
- stack.push(part)
38
- }
39
- }
40
-
41
- const result = '/' + stack.join('/')
42
-
43
- // Cache the result (simple LRU: clear when full)
44
- if (normalizeCache.size >= CACHE_MAX_SIZE) {
45
- // Delete oldest entries (first 25%)
46
- const deleteCount = CACHE_MAX_SIZE / 4
47
- let count = 0
48
- for (const key of normalizeCache.keys()) {
49
- if (count++ >= deleteCount) break
50
- normalizeCache.delete(key)
51
- }
52
- }
53
- normalizeCache.set(path, result)
54
-
55
- return result
56
- }
57
-
58
- /**
59
- * Get parent directory path
60
- */
61
- export function dirname(path: string): string {
62
- const normalized = normalize(path)
63
- const parts = normalized.split('/').filter(Boolean)
64
- if (parts.length < 2) return '/'
65
- return '/' + parts.slice(0, -1).join('/')
66
- }
67
-
68
- /**
69
- * Get base filename
70
- */
71
- export function basename(path: string): string {
72
- const normalized = normalize(path)
73
- const parts = normalized.split('/').filter(Boolean)
74
- return parts[parts.length - 1] || ''
75
- }
76
-
77
- /**
78
- * Join path segments
79
- */
80
- export function join(...paths: string[]): string {
81
- return normalize(paths.join('/'))
82
- }
83
-
84
- /**
85
- * Check if path is root
86
- */
87
- export function isRoot(path: string): boolean {
88
- const normalized = normalize(path)
89
- return normalized === '/' || normalized === ''
90
- }
91
-
92
- /**
93
- * Get path segments (excluding empty)
94
- */
95
- export function segments(path: string): string[] {
96
- return normalize(path).split('/').filter(Boolean)
97
- }