@net-protocol/storage 0.1.7 → 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +97 -3
- package/dist/index.d.ts +97 -3
- package/dist/index.js +132 -1
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +128 -2
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -655,7 +655,7 @@ function getChunkCount(data) {
|
|
|
655
655
|
const hexWithoutPrefix = dataBytes.slice(2);
|
|
656
656
|
return Math.max(1, Math.ceil(hexWithoutPrefix.length / (CHUNK_SIZE * 2)));
|
|
657
657
|
}
|
|
658
|
-
function assembleChunks(chunks) {
|
|
658
|
+
function assembleChunks(chunks, returnHex) {
|
|
659
659
|
try {
|
|
660
660
|
let assembled = chunks[0] || "0x";
|
|
661
661
|
for (let i = 1; i < chunks.length; i++) {
|
|
@@ -668,6 +668,9 @@ function assembleChunks(chunks) {
|
|
|
668
668
|
try {
|
|
669
669
|
const decompressed = pako.ungzip(bytes);
|
|
670
670
|
const hexString = Buffer.from(decompressed).toString("utf8");
|
|
671
|
+
if (returnHex) {
|
|
672
|
+
return hexString.startsWith("0x") ? hexString : `0x${hexString}`;
|
|
673
|
+
}
|
|
671
674
|
const result = hexToString(hexString);
|
|
672
675
|
return result;
|
|
673
676
|
} catch (error) {
|
|
@@ -2199,7 +2202,130 @@ function base64ToDataUri(base64Data) {
|
|
|
2199
2202
|
const mimeType = detectFileTypeFromBase64(base64Data) || "application/octet-stream";
|
|
2200
2203
|
return `data:${mimeType};base64,${base64Data}`;
|
|
2201
2204
|
}
|
|
2205
|
+
var STREAMING_CHUNK_SIZE = 80 * 1e3;
|
|
2206
|
+
var BINARY_CHUNK_SIZE = 79998;
|
|
2207
|
+
function isBinaryFile(file) {
|
|
2208
|
+
const mimeType = file.type.toLowerCase();
|
|
2209
|
+
const textTypes = [
|
|
2210
|
+
"text/",
|
|
2211
|
+
"application/json",
|
|
2212
|
+
"application/xml",
|
|
2213
|
+
"application/javascript",
|
|
2214
|
+
"application/typescript",
|
|
2215
|
+
"application/x-javascript",
|
|
2216
|
+
"application/ecmascript"
|
|
2217
|
+
];
|
|
2218
|
+
for (const textType of textTypes) {
|
|
2219
|
+
if (mimeType.startsWith(textType)) {
|
|
2220
|
+
return false;
|
|
2221
|
+
}
|
|
2222
|
+
}
|
|
2223
|
+
if (!mimeType || mimeType === "application/octet-stream") {
|
|
2224
|
+
const extension = file.name.split(".").pop()?.toLowerCase() || "";
|
|
2225
|
+
const textExtensions = [
|
|
2226
|
+
"txt",
|
|
2227
|
+
"md",
|
|
2228
|
+
"json",
|
|
2229
|
+
"xml",
|
|
2230
|
+
"html",
|
|
2231
|
+
"htm",
|
|
2232
|
+
"css",
|
|
2233
|
+
"js",
|
|
2234
|
+
"ts",
|
|
2235
|
+
"jsx",
|
|
2236
|
+
"tsx",
|
|
2237
|
+
"yaml",
|
|
2238
|
+
"yml",
|
|
2239
|
+
"toml",
|
|
2240
|
+
"ini",
|
|
2241
|
+
"cfg",
|
|
2242
|
+
"conf",
|
|
2243
|
+
"log",
|
|
2244
|
+
"csv",
|
|
2245
|
+
"svg"
|
|
2246
|
+
];
|
|
2247
|
+
return !textExtensions.includes(extension);
|
|
2248
|
+
}
|
|
2249
|
+
return true;
|
|
2250
|
+
}
|
|
2251
|
+
async function readFileSlice(file, offset, size, isBinary, isFirstChunk) {
|
|
2252
|
+
const blob = file.slice(offset, offset + size);
|
|
2253
|
+
if (isBinary) {
|
|
2254
|
+
const arrayBuffer = await blob.arrayBuffer();
|
|
2255
|
+
const bytes = new Uint8Array(arrayBuffer);
|
|
2256
|
+
let base64 = "";
|
|
2257
|
+
const chunkSize = 32766;
|
|
2258
|
+
for (let i = 0; i < bytes.length; i += chunkSize) {
|
|
2259
|
+
const chunk = bytes.slice(i, i + chunkSize);
|
|
2260
|
+
base64 += btoa(String.fromCharCode(...chunk));
|
|
2261
|
+
}
|
|
2262
|
+
if (isFirstChunk) {
|
|
2263
|
+
const mimeType = detectFileTypeFromBase64(base64) || file.type || "application/octet-stream";
|
|
2264
|
+
return `data:${mimeType};base64,${base64}`;
|
|
2265
|
+
}
|
|
2266
|
+
return base64;
|
|
2267
|
+
} else {
|
|
2268
|
+
return await blob.text();
|
|
2269
|
+
}
|
|
2270
|
+
}
|
|
2271
|
+
async function* processFileStreaming(file, chunkSize = STREAMING_CHUNK_SIZE) {
|
|
2272
|
+
const binary = isBinaryFile(file);
|
|
2273
|
+
const effectiveChunkSize = binary ? BINARY_CHUNK_SIZE : chunkSize;
|
|
2274
|
+
let offset = 0;
|
|
2275
|
+
let chunkIndex = 0;
|
|
2276
|
+
while (offset < file.size) {
|
|
2277
|
+
const chunkString = await readFileSlice(
|
|
2278
|
+
file,
|
|
2279
|
+
offset,
|
|
2280
|
+
effectiveChunkSize,
|
|
2281
|
+
binary,
|
|
2282
|
+
chunkIndex === 0
|
|
2283
|
+
);
|
|
2284
|
+
const hash = keccak256HashString(chunkString);
|
|
2285
|
+
const compressedChunks = chunkDataForStorage(chunkString);
|
|
2286
|
+
yield {
|
|
2287
|
+
chunkIndex,
|
|
2288
|
+
hash,
|
|
2289
|
+
compressedChunks
|
|
2290
|
+
};
|
|
2291
|
+
offset += effectiveChunkSize;
|
|
2292
|
+
chunkIndex++;
|
|
2293
|
+
}
|
|
2294
|
+
if (chunkIndex === 0) {
|
|
2295
|
+
const emptyString = binary ? `data:${file.type || "application/octet-stream"};base64,` : "";
|
|
2296
|
+
const hash = keccak256HashString(emptyString);
|
|
2297
|
+
const compressedChunks = chunkDataForStorage(emptyString);
|
|
2298
|
+
yield {
|
|
2299
|
+
chunkIndex: 0,
|
|
2300
|
+
hash,
|
|
2301
|
+
compressedChunks
|
|
2302
|
+
};
|
|
2303
|
+
}
|
|
2304
|
+
}
|
|
2305
|
+
async function processFileStreamingComplete(file, onProgress) {
|
|
2306
|
+
const totalChunks = Math.max(1, Math.ceil(file.size / STREAMING_CHUNK_SIZE));
|
|
2307
|
+
const hashes = [];
|
|
2308
|
+
const allCompressedChunks = [];
|
|
2309
|
+
const binary = isBinaryFile(file);
|
|
2310
|
+
let processed = 0;
|
|
2311
|
+
for await (const result of processFileStreaming(file)) {
|
|
2312
|
+
hashes.push(result.hash);
|
|
2313
|
+
allCompressedChunks.push(result.compressedChunks);
|
|
2314
|
+
processed++;
|
|
2315
|
+
onProgress?.(processed, totalChunks);
|
|
2316
|
+
}
|
|
2317
|
+
return {
|
|
2318
|
+
hashes,
|
|
2319
|
+
allCompressedChunks,
|
|
2320
|
+
totalChunks: hashes.length,
|
|
2321
|
+
isBinary: binary
|
|
2322
|
+
};
|
|
2323
|
+
}
|
|
2324
|
+
function estimateChunkCount(fileSize, isBinary = true) {
|
|
2325
|
+
const chunkSize = isBinary ? BINARY_CHUNK_SIZE : STREAMING_CHUNK_SIZE;
|
|
2326
|
+
return Math.max(1, Math.ceil(fileSize / chunkSize));
|
|
2327
|
+
}
|
|
2202
2328
|
|
|
2203
|
-
export { CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, parseNetReferences, processDataForStorage, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, useBulkStorage, useStorage, useStorageForOperator, useStorageForOperatorAndKey, useStorageFromRouter, useStorageTotalWrites, useXmlStorage, validateDataSize };
|
|
2329
|
+
export { CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, estimateChunkCount, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, isBinaryFile, parseNetReferences, processDataForStorage, processFileStreaming, processFileStreamingComplete, readFileSlice, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, useBulkStorage, useStorage, useStorageForOperator, useStorageForOperatorAndKey, useStorageFromRouter, useStorageTotalWrites, useXmlStorage, validateDataSize };
|
|
2204
2330
|
//# sourceMappingURL=index.mjs.map
|
|
2205
2331
|
//# sourceMappingURL=index.mjs.map
|