@componentor/fs 1.1.7 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -5
- package/dist/index.d.ts +4 -0
- package/dist/index.js +123 -27
- package/dist/index.js.map +1 -1
- package/dist/opfs-hybrid.js +123 -27
- package/dist/opfs-hybrid.js.map +1 -1
- package/dist/opfs-worker.js +123 -27
- package/dist/opfs-worker.js.map +1 -1
- package/package.json +4 -4
- package/src/index.ts +3 -3
- package/src/packed-storage.ts +170 -33
- package/src/types.ts +4 -0
package/dist/opfs-hybrid.js
CHANGED
|
@@ -580,6 +580,56 @@ var SymlinkManager = class {
|
|
|
580
580
|
};
|
|
581
581
|
|
|
582
582
|
// src/packed-storage.ts
|
|
583
|
+
async function compress(data) {
|
|
584
|
+
if (data.length < 100) return data;
|
|
585
|
+
try {
|
|
586
|
+
const stream = new CompressionStream("gzip");
|
|
587
|
+
const writer = stream.writable.getWriter();
|
|
588
|
+
writer.write(data);
|
|
589
|
+
writer.close();
|
|
590
|
+
const chunks = [];
|
|
591
|
+
const reader = stream.readable.getReader();
|
|
592
|
+
let totalSize = 0;
|
|
593
|
+
while (true) {
|
|
594
|
+
const { done, value } = await reader.read();
|
|
595
|
+
if (done) break;
|
|
596
|
+
chunks.push(value);
|
|
597
|
+
totalSize += value.length;
|
|
598
|
+
}
|
|
599
|
+
if (totalSize >= data.length) return data;
|
|
600
|
+
const result = new Uint8Array(totalSize);
|
|
601
|
+
let offset = 0;
|
|
602
|
+
for (const chunk of chunks) {
|
|
603
|
+
result.set(chunk, offset);
|
|
604
|
+
offset += chunk.length;
|
|
605
|
+
}
|
|
606
|
+
return result;
|
|
607
|
+
} catch {
|
|
608
|
+
return data;
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
async function decompress(data) {
|
|
612
|
+
const stream = new DecompressionStream("gzip");
|
|
613
|
+
const writer = stream.writable.getWriter();
|
|
614
|
+
writer.write(data);
|
|
615
|
+
writer.close();
|
|
616
|
+
const chunks = [];
|
|
617
|
+
const reader = stream.readable.getReader();
|
|
618
|
+
let totalSize = 0;
|
|
619
|
+
while (true) {
|
|
620
|
+
const { done, value } = await reader.read();
|
|
621
|
+
if (done) break;
|
|
622
|
+
chunks.push(value);
|
|
623
|
+
totalSize += value.length;
|
|
624
|
+
}
|
|
625
|
+
const result = new Uint8Array(totalSize);
|
|
626
|
+
let offset = 0;
|
|
627
|
+
for (const chunk of chunks) {
|
|
628
|
+
result.set(chunk, offset);
|
|
629
|
+
offset += chunk.length;
|
|
630
|
+
}
|
|
631
|
+
return result;
|
|
632
|
+
}
|
|
583
633
|
var CRC32_TABLE = new Uint32Array(256);
|
|
584
634
|
for (let i = 0; i < 256; i++) {
|
|
585
635
|
let c = i;
|
|
@@ -599,11 +649,15 @@ var PACK_FILE = "/.opfs-pack";
|
|
|
599
649
|
var PackedStorage = class {
|
|
600
650
|
handleManager;
|
|
601
651
|
useSync;
|
|
652
|
+
useCompression;
|
|
653
|
+
useChecksum;
|
|
602
654
|
index = null;
|
|
603
655
|
indexLoaded = false;
|
|
604
|
-
constructor(handleManager, useSync) {
|
|
656
|
+
constructor(handleManager, useSync, useCompression = false, useChecksum = true) {
|
|
605
657
|
this.handleManager = handleManager;
|
|
606
658
|
this.useSync = useSync;
|
|
659
|
+
this.useCompression = useCompression && typeof CompressionStream !== "undefined";
|
|
660
|
+
this.useChecksum = useChecksum;
|
|
607
661
|
}
|
|
608
662
|
/**
|
|
609
663
|
* Reset pack storage state (memory only)
|
|
@@ -650,9 +704,11 @@ var PackedStorage = class {
|
|
|
650
704
|
const content = new Uint8Array(contentSize);
|
|
651
705
|
access.read(content, { at: 8 });
|
|
652
706
|
access.close();
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
707
|
+
if (this.useChecksum && storedCrc !== 0) {
|
|
708
|
+
const calculatedCrc = crc32(content);
|
|
709
|
+
if (calculatedCrc !== storedCrc) {
|
|
710
|
+
throw createECORRUPTED(PACK_FILE);
|
|
711
|
+
}
|
|
656
712
|
}
|
|
657
713
|
const indexJson = new TextDecoder().decode(content.subarray(0, indexLen));
|
|
658
714
|
return JSON.parse(indexJson);
|
|
@@ -666,9 +722,11 @@ var PackedStorage = class {
|
|
|
666
722
|
const indexLen = view.getUint32(0, true);
|
|
667
723
|
const storedCrc = view.getUint32(4, true);
|
|
668
724
|
const content = data.subarray(8);
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
725
|
+
if (this.useChecksum && storedCrc !== 0) {
|
|
726
|
+
const calculatedCrc = crc32(content);
|
|
727
|
+
if (calculatedCrc !== storedCrc) {
|
|
728
|
+
throw createECORRUPTED(PACK_FILE);
|
|
729
|
+
}
|
|
672
730
|
}
|
|
673
731
|
const indexJson = new TextDecoder().decode(content.subarray(0, indexLen));
|
|
674
732
|
return JSON.parse(indexJson);
|
|
@@ -686,14 +744,17 @@ var PackedStorage = class {
|
|
|
686
744
|
}
|
|
687
745
|
/**
|
|
688
746
|
* Get file size from pack (for stat)
|
|
747
|
+
* Returns originalSize if compressed, otherwise size
|
|
689
748
|
*/
|
|
690
749
|
async getSize(path) {
|
|
691
750
|
const index = await this.loadIndex();
|
|
692
751
|
const entry = index[path];
|
|
693
|
-
|
|
752
|
+
if (!entry) return null;
|
|
753
|
+
return entry.originalSize ?? entry.size;
|
|
694
754
|
}
|
|
695
755
|
/**
|
|
696
756
|
* Read a file from the pack
|
|
757
|
+
* Handles decompression if file was stored compressed
|
|
697
758
|
*/
|
|
698
759
|
async read(path) {
|
|
699
760
|
const index = await this.loadIndex();
|
|
@@ -701,21 +762,26 @@ var PackedStorage = class {
|
|
|
701
762
|
if (!entry) return null;
|
|
702
763
|
const { fileHandle } = await this.handleManager.getHandle(PACK_FILE);
|
|
703
764
|
if (!fileHandle) return null;
|
|
704
|
-
|
|
765
|
+
let buffer;
|
|
705
766
|
if (this.useSync) {
|
|
706
767
|
const access = await fileHandle.createSyncAccessHandle();
|
|
768
|
+
buffer = new Uint8Array(entry.size);
|
|
707
769
|
access.read(buffer, { at: entry.offset });
|
|
708
770
|
access.close();
|
|
709
771
|
} else {
|
|
710
772
|
const file = await fileHandle.getFile();
|
|
711
773
|
const data = new Uint8Array(await file.arrayBuffer());
|
|
712
|
-
buffer
|
|
774
|
+
buffer = data.slice(entry.offset, entry.offset + entry.size);
|
|
775
|
+
}
|
|
776
|
+
if (entry.originalSize !== void 0) {
|
|
777
|
+
return decompress(buffer);
|
|
713
778
|
}
|
|
714
779
|
return buffer;
|
|
715
780
|
}
|
|
716
781
|
/**
|
|
717
782
|
* Read multiple files from the pack in a single operation
|
|
718
783
|
* Loads index once, reads all data in parallel
|
|
784
|
+
* Handles decompression if files were stored compressed
|
|
719
785
|
*/
|
|
720
786
|
async readBatch(paths) {
|
|
721
787
|
const results = /* @__PURE__ */ new Map();
|
|
@@ -725,7 +791,7 @@ var PackedStorage = class {
|
|
|
725
791
|
for (const path of paths) {
|
|
726
792
|
const entry = index[path];
|
|
727
793
|
if (entry) {
|
|
728
|
-
toRead.push({ path, offset: entry.offset, size: entry.size });
|
|
794
|
+
toRead.push({ path, offset: entry.offset, size: entry.size, originalSize: entry.originalSize });
|
|
729
795
|
} else {
|
|
730
796
|
results.set(path, null);
|
|
731
797
|
}
|
|
@@ -738,36 +804,62 @@ var PackedStorage = class {
|
|
|
738
804
|
}
|
|
739
805
|
return results;
|
|
740
806
|
}
|
|
807
|
+
const decompressPromises = [];
|
|
741
808
|
if (this.useSync) {
|
|
742
809
|
const access = await fileHandle.createSyncAccessHandle();
|
|
743
|
-
for (const { path, offset, size } of toRead) {
|
|
810
|
+
for (const { path, offset, size, originalSize } of toRead) {
|
|
744
811
|
const buffer = new Uint8Array(size);
|
|
745
812
|
access.read(buffer, { at: offset });
|
|
746
|
-
|
|
813
|
+
if (originalSize !== void 0) {
|
|
814
|
+
decompressPromises.push({ path, promise: decompress(buffer) });
|
|
815
|
+
} else {
|
|
816
|
+
results.set(path, buffer);
|
|
817
|
+
}
|
|
747
818
|
}
|
|
748
819
|
access.close();
|
|
749
820
|
} else {
|
|
750
821
|
const file = await fileHandle.getFile();
|
|
751
822
|
const data = new Uint8Array(await file.arrayBuffer());
|
|
752
|
-
for (const { path, offset, size } of toRead) {
|
|
753
|
-
const buffer =
|
|
754
|
-
|
|
755
|
-
|
|
823
|
+
for (const { path, offset, size, originalSize } of toRead) {
|
|
824
|
+
const buffer = data.slice(offset, offset + size);
|
|
825
|
+
if (originalSize !== void 0) {
|
|
826
|
+
decompressPromises.push({ path, promise: decompress(buffer) });
|
|
827
|
+
} else {
|
|
828
|
+
results.set(path, buffer);
|
|
829
|
+
}
|
|
756
830
|
}
|
|
757
831
|
}
|
|
832
|
+
for (const { path, promise } of decompressPromises) {
|
|
833
|
+
results.set(path, await promise);
|
|
834
|
+
}
|
|
758
835
|
return results;
|
|
759
836
|
}
|
|
760
837
|
/**
|
|
761
838
|
* Write multiple files to the pack in a single operation
|
|
762
839
|
* This is the key optimization - 100 files become 1 write!
|
|
763
840
|
* Includes CRC32 checksum for integrity verification.
|
|
841
|
+
* Optionally compresses data for smaller storage.
|
|
764
842
|
* Note: This replaces the entire pack with the new entries
|
|
765
843
|
*/
|
|
766
844
|
async writeBatch(entries) {
|
|
767
845
|
if (entries.length === 0) return;
|
|
768
846
|
const encoder = new TextEncoder();
|
|
847
|
+
let processedEntries;
|
|
848
|
+
if (this.useCompression) {
|
|
849
|
+
processedEntries = await Promise.all(
|
|
850
|
+
entries.map(async ({ path, data }) => {
|
|
851
|
+
const compressed = await compress(data);
|
|
852
|
+
if (compressed.length < data.length) {
|
|
853
|
+
return { path, data: compressed, originalSize: data.length };
|
|
854
|
+
}
|
|
855
|
+
return { path, data };
|
|
856
|
+
})
|
|
857
|
+
);
|
|
858
|
+
} else {
|
|
859
|
+
processedEntries = entries;
|
|
860
|
+
}
|
|
769
861
|
let totalDataSize = 0;
|
|
770
|
-
for (const { data } of
|
|
862
|
+
for (const { data } of processedEntries) {
|
|
771
863
|
totalDataSize += data.length;
|
|
772
864
|
}
|
|
773
865
|
const newIndex = {};
|
|
@@ -776,8 +868,12 @@ var PackedStorage = class {
|
|
|
776
868
|
while (headerSize !== prevHeaderSize) {
|
|
777
869
|
prevHeaderSize = headerSize;
|
|
778
870
|
let currentOffset = headerSize;
|
|
779
|
-
for (const { path, data } of
|
|
780
|
-
|
|
871
|
+
for (const { path, data, originalSize } of processedEntries) {
|
|
872
|
+
const entry = { offset: currentOffset, size: data.length };
|
|
873
|
+
if (originalSize !== void 0) {
|
|
874
|
+
entry.originalSize = originalSize;
|
|
875
|
+
}
|
|
876
|
+
newIndex[path] = entry;
|
|
781
877
|
currentOffset += data.length;
|
|
782
878
|
}
|
|
783
879
|
const indexBuf = encoder.encode(JSON.stringify(newIndex));
|
|
@@ -788,12 +884,12 @@ var PackedStorage = class {
|
|
|
788
884
|
const packBuffer = new Uint8Array(totalSize);
|
|
789
885
|
const view = new DataView(packBuffer.buffer);
|
|
790
886
|
packBuffer.set(finalIndexBuf, 8);
|
|
791
|
-
for (const { path, data } of
|
|
887
|
+
for (const { path, data } of processedEntries) {
|
|
792
888
|
const entry = newIndex[path];
|
|
793
889
|
packBuffer.set(data, entry.offset);
|
|
794
890
|
}
|
|
795
891
|
const content = packBuffer.subarray(8);
|
|
796
|
-
const checksum = crc32(content);
|
|
892
|
+
const checksum = this.useChecksum ? crc32(content) : 0;
|
|
797
893
|
view.setUint32(0, finalIndexBuf.length, true);
|
|
798
894
|
view.setUint32(4, checksum, true);
|
|
799
895
|
await this.writePackFile(packBuffer);
|
|
@@ -845,7 +941,7 @@ var PackedStorage = class {
|
|
|
845
941
|
if (dataSize > 0) {
|
|
846
942
|
newContent.set(dataPortion, newIndexBuf.length);
|
|
847
943
|
}
|
|
848
|
-
const checksum = crc32(newContent);
|
|
944
|
+
const checksum = this.useChecksum ? crc32(newContent) : 0;
|
|
849
945
|
const newHeader = new Uint8Array(8);
|
|
850
946
|
const view = new DataView(newHeader.buffer);
|
|
851
947
|
view.setUint32(0, newIndexBuf.length, true);
|
|
@@ -866,7 +962,7 @@ var PackedStorage = class {
|
|
|
866
962
|
const newContent = new Uint8Array(newIndexBuf.length + dataPortion.length);
|
|
867
963
|
newContent.set(newIndexBuf, 0);
|
|
868
964
|
newContent.set(dataPortion, newIndexBuf.length);
|
|
869
|
-
const checksum = crc32(newContent);
|
|
965
|
+
const checksum = this.useChecksum ? crc32(newContent) : 0;
|
|
870
966
|
const newFile = new Uint8Array(8 + newContent.length);
|
|
871
967
|
const view = new DataView(newFile.buffer);
|
|
872
968
|
view.setUint32(0, newIndexBuf.length, true);
|
|
@@ -1027,7 +1123,7 @@ var OPFS = class {
|
|
|
1027
1123
|
/** File system constants */
|
|
1028
1124
|
constants = constants;
|
|
1029
1125
|
constructor(options = {}) {
|
|
1030
|
-
const { useSync = true, verbose = false, workerUrl, read, write } = options;
|
|
1126
|
+
const { useSync = true, verbose = false, useCompression = false, useChecksum = true, workerUrl, read, write } = options;
|
|
1031
1127
|
this.verbose = verbose;
|
|
1032
1128
|
if (workerUrl) {
|
|
1033
1129
|
this.hybrid = new OPFSHybrid({
|
|
@@ -1039,12 +1135,12 @@ var OPFS = class {
|
|
|
1039
1135
|
this.useSync = false;
|
|
1040
1136
|
this.handleManager = new HandleManager();
|
|
1041
1137
|
this.symlinkManager = new SymlinkManager(this.handleManager, false);
|
|
1042
|
-
this.packedStorage = new PackedStorage(this.handleManager, false);
|
|
1138
|
+
this.packedStorage = new PackedStorage(this.handleManager, false, useCompression, useChecksum);
|
|
1043
1139
|
} else {
|
|
1044
1140
|
this.useSync = useSync && typeof FileSystemFileHandle !== "undefined" && "createSyncAccessHandle" in FileSystemFileHandle.prototype;
|
|
1045
1141
|
this.handleManager = new HandleManager();
|
|
1046
1142
|
this.symlinkManager = new SymlinkManager(this.handleManager, this.useSync);
|
|
1047
|
-
this.packedStorage = new PackedStorage(this.handleManager, this.useSync);
|
|
1143
|
+
this.packedStorage = new PackedStorage(this.handleManager, this.useSync, useCompression, useChecksum);
|
|
1048
1144
|
}
|
|
1049
1145
|
}
|
|
1050
1146
|
/**
|