@net-protocol/storage 0.1.7 → 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +97 -3
- package/dist/index.d.ts +97 -3
- package/dist/index.js +132 -1
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +128 -2
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.d.mts
CHANGED
|
@@ -411,9 +411,10 @@ declare function getChunkCount(data: string): number;
|
|
|
411
411
|
* Assemble chunks into a single string and decompress
|
|
412
412
|
* Pure function - can be used in both client and server code
|
|
413
413
|
* @param chunks - Array of hex-encoded chunk strings
|
|
414
|
-
* @returns
|
|
414
|
+
* @param returnHex - If true, returns hex string instead of converting to UTF-8
|
|
415
|
+
* @returns Decompressed string (or hex string if returnHex=true) or undefined if decompression fails
|
|
415
416
|
*/
|
|
416
|
-
declare function assembleChunks(chunks: string[]): string | undefined;
|
|
417
|
+
declare function assembleChunks(chunks: string[], returnHex?: boolean): string | undefined;
|
|
417
418
|
|
|
418
419
|
/**
|
|
419
420
|
* XML reference type for chunk metadata
|
|
@@ -534,6 +535,99 @@ declare function detectFileTypeFromBase64(base64Data: string): string | undefine
|
|
|
534
535
|
*/
|
|
535
536
|
declare function base64ToDataUri(base64Data: string): string;
|
|
536
537
|
|
|
538
|
+
/**
|
|
539
|
+
* Streaming utilities for processing large files without loading them entirely into memory.
|
|
540
|
+
* Uses file.slice() to read in chunks, keeping memory usage low.
|
|
541
|
+
*/
|
|
542
|
+
/**
|
|
543
|
+
* Result from processing a single file chunk
|
|
544
|
+
*/
|
|
545
|
+
interface StreamingChunkResult {
|
|
546
|
+
/** Index of this chunk (0-based) */
|
|
547
|
+
chunkIndex: number;
|
|
548
|
+
/** Keccak256 hash of the chunk content (66 chars) */
|
|
549
|
+
hash: string;
|
|
550
|
+
/** Compressed chunks ready for ChunkedStorage (array of hex strings) */
|
|
551
|
+
compressedChunks: string[];
|
|
552
|
+
}
|
|
553
|
+
/**
|
|
554
|
+
* Result from processing an entire file via streaming
|
|
555
|
+
*/
|
|
556
|
+
interface StreamingProcessResult {
|
|
557
|
+
/** All chunk hashes in order */
|
|
558
|
+
hashes: string[];
|
|
559
|
+
/** All compressed chunk arrays in order */
|
|
560
|
+
allCompressedChunks: string[][];
|
|
561
|
+
/** Total number of chunks */
|
|
562
|
+
totalChunks: number;
|
|
563
|
+
/** Whether the file was treated as binary */
|
|
564
|
+
isBinary: boolean;
|
|
565
|
+
}
|
|
566
|
+
/**
|
|
567
|
+
* Detects if a file should be treated as binary based on its MIME type.
|
|
568
|
+
* Binary files will be base64 encoded; text files will be read as text.
|
|
569
|
+
*
|
|
570
|
+
* @param file - The file to check
|
|
571
|
+
* @returns true if the file should be treated as binary
|
|
572
|
+
*/
|
|
573
|
+
declare function isBinaryFile(file: File): boolean;
|
|
574
|
+
/**
|
|
575
|
+
* Reads a slice of a file as a string.
|
|
576
|
+
* For binary files, returns base64-encoded data.
|
|
577
|
+
* For text files, returns the raw text.
|
|
578
|
+
*
|
|
579
|
+
* @param file - The file to read from
|
|
580
|
+
* @param offset - Start byte offset
|
|
581
|
+
* @param size - Number of bytes to read
|
|
582
|
+
* @param isBinary - Whether to read as binary (base64) or text
|
|
583
|
+
* @param isFirstChunk - Whether this is the first chunk (for data URI prefix)
|
|
584
|
+
* @returns The chunk as a string
|
|
585
|
+
*/
|
|
586
|
+
declare function readFileSlice(file: File, offset: number, size: number, isBinary: boolean, isFirstChunk: boolean): Promise<string>;
|
|
587
|
+
/**
|
|
588
|
+
* Async generator that processes a file in streaming chunks.
|
|
589
|
+
* Reads the file in 80KB slices, hashes and compresses each,
|
|
590
|
+
* yielding results one at a time to keep memory usage low.
|
|
591
|
+
*
|
|
592
|
+
* @param file - The file to process
|
|
593
|
+
* @param chunkSize - Size of each chunk in bytes (default: 80KB)
|
|
594
|
+
* @yields StreamingChunkResult for each chunk processed
|
|
595
|
+
*
|
|
596
|
+
* @example
|
|
597
|
+
* ```typescript
|
|
598
|
+
* const hashes: string[] = [];
|
|
599
|
+
* const transactions: TransactionConfig[] = [];
|
|
600
|
+
*
|
|
601
|
+
* for await (const result of processFileStreaming(file)) {
|
|
602
|
+
* hashes.push(result.hash);
|
|
603
|
+
* transactions.push(createTransaction(result.compressedChunks));
|
|
604
|
+
* }
|
|
605
|
+
*
|
|
606
|
+
* const metadata = generateXmlMetadata(hashes, 0, operatorAddress);
|
|
607
|
+
* ```
|
|
608
|
+
*/
|
|
609
|
+
declare function processFileStreaming(file: File, chunkSize?: number): AsyncGenerator<StreamingChunkResult>;
|
|
610
|
+
/**
|
|
611
|
+
* Processes an entire file via streaming and returns all results.
|
|
612
|
+
* This is a convenience function that collects all generator results.
|
|
613
|
+
*
|
|
614
|
+
* For very large files, prefer using processFileStreaming directly
|
|
615
|
+
* to process chunks as they're generated.
|
|
616
|
+
*
|
|
617
|
+
* @param file - The file to process
|
|
618
|
+
* @param onProgress - Optional callback for progress updates
|
|
619
|
+
* @returns All hashes and compressed chunks
|
|
620
|
+
*/
|
|
621
|
+
declare function processFileStreamingComplete(file: File, onProgress?: (current: number, total: number) => void): Promise<StreamingProcessResult>;
|
|
622
|
+
/**
|
|
623
|
+
* Estimates the number of chunks for a file without reading it.
|
|
624
|
+
*
|
|
625
|
+
* @param fileSize - Size of the file in bytes
|
|
626
|
+
* @param isBinary - Whether the file is binary (uses smaller chunk size for base64 alignment)
|
|
627
|
+
* @returns Estimated number of chunks
|
|
628
|
+
*/
|
|
629
|
+
declare function estimateChunkCount(fileSize: number, isBinary?: boolean): number;
|
|
630
|
+
|
|
537
631
|
declare const STORAGE_CONTRACT: {
|
|
538
632
|
abi: Abi;
|
|
539
633
|
address: `0x${string}`;
|
|
@@ -569,4 +663,4 @@ declare const CONCURRENT_XML_FETCHES = 3;
|
|
|
569
663
|
*/
|
|
570
664
|
declare function resolveXmlRecursive(content: string, defaultOperator: string, client: PublicClient, maxDepth: number, visited?: Set<string>, inheritedOperator?: string): Promise<string>;
|
|
571
665
|
|
|
572
|
-
export { type BulkStorageKey, CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, type ChunkedMetadata, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, type StorageClientOptions, type StorageData, type UseStorageOptions, type UseXmlStorageOptions, type XmlReference$1 as XmlReference, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, parseNetReferences, processDataForStorage, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, useBulkStorage, useStorage, useStorageForOperator, useStorageForOperatorAndKey, useStorageFromRouter, useStorageTotalWrites, useXmlStorage, validateDataSize };
|
|
666
|
+
export { type BulkStorageKey, CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, type ChunkedMetadata, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, type StorageClientOptions, type StorageData, type StreamingChunkResult, type StreamingProcessResult, type UseStorageOptions, type UseXmlStorageOptions, type XmlReference$1 as XmlReference, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, estimateChunkCount, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, isBinaryFile, parseNetReferences, processDataForStorage, processFileStreaming, processFileStreamingComplete, readFileSlice, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, useBulkStorage, useStorage, useStorageForOperator, useStorageForOperatorAndKey, useStorageFromRouter, useStorageTotalWrites, useXmlStorage, validateDataSize };
|
package/dist/index.d.ts
CHANGED
|
@@ -411,9 +411,10 @@ declare function getChunkCount(data: string): number;
|
|
|
411
411
|
* Assemble chunks into a single string and decompress
|
|
412
412
|
* Pure function - can be used in both client and server code
|
|
413
413
|
* @param chunks - Array of hex-encoded chunk strings
|
|
414
|
-
* @returns
|
|
414
|
+
* @param returnHex - If true, returns hex string instead of converting to UTF-8
|
|
415
|
+
* @returns Decompressed string (or hex string if returnHex=true) or undefined if decompression fails
|
|
415
416
|
*/
|
|
416
|
-
declare function assembleChunks(chunks: string[]): string | undefined;
|
|
417
|
+
declare function assembleChunks(chunks: string[], returnHex?: boolean): string | undefined;
|
|
417
418
|
|
|
418
419
|
/**
|
|
419
420
|
* XML reference type for chunk metadata
|
|
@@ -534,6 +535,99 @@ declare function detectFileTypeFromBase64(base64Data: string): string | undefine
|
|
|
534
535
|
*/
|
|
535
536
|
declare function base64ToDataUri(base64Data: string): string;
|
|
536
537
|
|
|
538
|
+
/**
|
|
539
|
+
* Streaming utilities for processing large files without loading them entirely into memory.
|
|
540
|
+
* Uses file.slice() to read in chunks, keeping memory usage low.
|
|
541
|
+
*/
|
|
542
|
+
/**
|
|
543
|
+
* Result from processing a single file chunk
|
|
544
|
+
*/
|
|
545
|
+
interface StreamingChunkResult {
|
|
546
|
+
/** Index of this chunk (0-based) */
|
|
547
|
+
chunkIndex: number;
|
|
548
|
+
/** Keccak256 hash of the chunk content (66 chars) */
|
|
549
|
+
hash: string;
|
|
550
|
+
/** Compressed chunks ready for ChunkedStorage (array of hex strings) */
|
|
551
|
+
compressedChunks: string[];
|
|
552
|
+
}
|
|
553
|
+
/**
|
|
554
|
+
* Result from processing an entire file via streaming
|
|
555
|
+
*/
|
|
556
|
+
interface StreamingProcessResult {
|
|
557
|
+
/** All chunk hashes in order */
|
|
558
|
+
hashes: string[];
|
|
559
|
+
/** All compressed chunk arrays in order */
|
|
560
|
+
allCompressedChunks: string[][];
|
|
561
|
+
/** Total number of chunks */
|
|
562
|
+
totalChunks: number;
|
|
563
|
+
/** Whether the file was treated as binary */
|
|
564
|
+
isBinary: boolean;
|
|
565
|
+
}
|
|
566
|
+
/**
|
|
567
|
+
* Detects if a file should be treated as binary based on its MIME type.
|
|
568
|
+
* Binary files will be base64 encoded; text files will be read as text.
|
|
569
|
+
*
|
|
570
|
+
* @param file - The file to check
|
|
571
|
+
* @returns true if the file should be treated as binary
|
|
572
|
+
*/
|
|
573
|
+
declare function isBinaryFile(file: File): boolean;
|
|
574
|
+
/**
|
|
575
|
+
* Reads a slice of a file as a string.
|
|
576
|
+
* For binary files, returns base64-encoded data.
|
|
577
|
+
* For text files, returns the raw text.
|
|
578
|
+
*
|
|
579
|
+
* @param file - The file to read from
|
|
580
|
+
* @param offset - Start byte offset
|
|
581
|
+
* @param size - Number of bytes to read
|
|
582
|
+
* @param isBinary - Whether to read as binary (base64) or text
|
|
583
|
+
* @param isFirstChunk - Whether this is the first chunk (for data URI prefix)
|
|
584
|
+
* @returns The chunk as a string
|
|
585
|
+
*/
|
|
586
|
+
declare function readFileSlice(file: File, offset: number, size: number, isBinary: boolean, isFirstChunk: boolean): Promise<string>;
|
|
587
|
+
/**
|
|
588
|
+
* Async generator that processes a file in streaming chunks.
|
|
589
|
+
* Reads the file in 80KB slices, hashes and compresses each,
|
|
590
|
+
* yielding results one at a time to keep memory usage low.
|
|
591
|
+
*
|
|
592
|
+
* @param file - The file to process
|
|
593
|
+
* @param chunkSize - Size of each chunk in bytes (default: 80KB)
|
|
594
|
+
* @yields StreamingChunkResult for each chunk processed
|
|
595
|
+
*
|
|
596
|
+
* @example
|
|
597
|
+
* ```typescript
|
|
598
|
+
* const hashes: string[] = [];
|
|
599
|
+
* const transactions: TransactionConfig[] = [];
|
|
600
|
+
*
|
|
601
|
+
* for await (const result of processFileStreaming(file)) {
|
|
602
|
+
* hashes.push(result.hash);
|
|
603
|
+
* transactions.push(createTransaction(result.compressedChunks));
|
|
604
|
+
* }
|
|
605
|
+
*
|
|
606
|
+
* const metadata = generateXmlMetadata(hashes, 0, operatorAddress);
|
|
607
|
+
* ```
|
|
608
|
+
*/
|
|
609
|
+
declare function processFileStreaming(file: File, chunkSize?: number): AsyncGenerator<StreamingChunkResult>;
|
|
610
|
+
/**
|
|
611
|
+
* Processes an entire file via streaming and returns all results.
|
|
612
|
+
* This is a convenience function that collects all generator results.
|
|
613
|
+
*
|
|
614
|
+
* For very large files, prefer using processFileStreaming directly
|
|
615
|
+
* to process chunks as they're generated.
|
|
616
|
+
*
|
|
617
|
+
* @param file - The file to process
|
|
618
|
+
* @param onProgress - Optional callback for progress updates
|
|
619
|
+
* @returns All hashes and compressed chunks
|
|
620
|
+
*/
|
|
621
|
+
declare function processFileStreamingComplete(file: File, onProgress?: (current: number, total: number) => void): Promise<StreamingProcessResult>;
|
|
622
|
+
/**
|
|
623
|
+
* Estimates the number of chunks for a file without reading it.
|
|
624
|
+
*
|
|
625
|
+
* @param fileSize - Size of the file in bytes
|
|
626
|
+
* @param isBinary - Whether the file is binary (uses smaller chunk size for base64 alignment)
|
|
627
|
+
* @returns Estimated number of chunks
|
|
628
|
+
*/
|
|
629
|
+
declare function estimateChunkCount(fileSize: number, isBinary?: boolean): number;
|
|
630
|
+
|
|
537
631
|
declare const STORAGE_CONTRACT: {
|
|
538
632
|
abi: Abi;
|
|
539
633
|
address: `0x${string}`;
|
|
@@ -569,4 +663,4 @@ declare const CONCURRENT_XML_FETCHES = 3;
|
|
|
569
663
|
*/
|
|
570
664
|
declare function resolveXmlRecursive(content: string, defaultOperator: string, client: PublicClient, maxDepth: number, visited?: Set<string>, inheritedOperator?: string): Promise<string>;
|
|
571
665
|
|
|
572
|
-
export { type BulkStorageKey, CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, type ChunkedMetadata, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, type StorageClientOptions, type StorageData, type UseStorageOptions, type UseXmlStorageOptions, type XmlReference$1 as XmlReference, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, parseNetReferences, processDataForStorage, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, useBulkStorage, useStorage, useStorageForOperator, useStorageForOperatorAndKey, useStorageFromRouter, useStorageTotalWrites, useXmlStorage, validateDataSize };
|
|
666
|
+
export { type BulkStorageKey, CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, type ChunkedMetadata, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, type StorageClientOptions, type StorageData, type StreamingChunkResult, type StreamingProcessResult, type UseStorageOptions, type UseXmlStorageOptions, type XmlReference$1 as XmlReference, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, estimateChunkCount, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, isBinaryFile, parseNetReferences, processDataForStorage, processFileStreaming, processFileStreamingComplete, readFileSlice, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, useBulkStorage, useStorage, useStorageForOperator, useStorageForOperatorAndKey, useStorageFromRouter, useStorageTotalWrites, useXmlStorage, validateDataSize };
|
package/dist/index.js
CHANGED
|
@@ -662,7 +662,7 @@ function getChunkCount(data) {
|
|
|
662
662
|
const hexWithoutPrefix = dataBytes.slice(2);
|
|
663
663
|
return Math.max(1, Math.ceil(hexWithoutPrefix.length / (CHUNK_SIZE * 2)));
|
|
664
664
|
}
|
|
665
|
-
function assembleChunks(chunks) {
|
|
665
|
+
function assembleChunks(chunks, returnHex) {
|
|
666
666
|
try {
|
|
667
667
|
let assembled = chunks[0] || "0x";
|
|
668
668
|
for (let i = 1; i < chunks.length; i++) {
|
|
@@ -675,6 +675,9 @@ function assembleChunks(chunks) {
|
|
|
675
675
|
try {
|
|
676
676
|
const decompressed = pako__default.default.ungzip(bytes);
|
|
677
677
|
const hexString = Buffer.from(decompressed).toString("utf8");
|
|
678
|
+
if (returnHex) {
|
|
679
|
+
return hexString.startsWith("0x") ? hexString : `0x${hexString}`;
|
|
680
|
+
}
|
|
678
681
|
const result = viem.hexToString(hexString);
|
|
679
682
|
return result;
|
|
680
683
|
} catch (error) {
|
|
@@ -2206,6 +2209,129 @@ function base64ToDataUri(base64Data) {
|
|
|
2206
2209
|
const mimeType = detectFileTypeFromBase64(base64Data) || "application/octet-stream";
|
|
2207
2210
|
return `data:${mimeType};base64,${base64Data}`;
|
|
2208
2211
|
}
|
|
2212
|
+
var STREAMING_CHUNK_SIZE = 80 * 1e3;
|
|
2213
|
+
var BINARY_CHUNK_SIZE = 79998;
|
|
2214
|
+
function isBinaryFile(file) {
|
|
2215
|
+
const mimeType = file.type.toLowerCase();
|
|
2216
|
+
const textTypes = [
|
|
2217
|
+
"text/",
|
|
2218
|
+
"application/json",
|
|
2219
|
+
"application/xml",
|
|
2220
|
+
"application/javascript",
|
|
2221
|
+
"application/typescript",
|
|
2222
|
+
"application/x-javascript",
|
|
2223
|
+
"application/ecmascript"
|
|
2224
|
+
];
|
|
2225
|
+
for (const textType of textTypes) {
|
|
2226
|
+
if (mimeType.startsWith(textType)) {
|
|
2227
|
+
return false;
|
|
2228
|
+
}
|
|
2229
|
+
}
|
|
2230
|
+
if (!mimeType || mimeType === "application/octet-stream") {
|
|
2231
|
+
const extension = file.name.split(".").pop()?.toLowerCase() || "";
|
|
2232
|
+
const textExtensions = [
|
|
2233
|
+
"txt",
|
|
2234
|
+
"md",
|
|
2235
|
+
"json",
|
|
2236
|
+
"xml",
|
|
2237
|
+
"html",
|
|
2238
|
+
"htm",
|
|
2239
|
+
"css",
|
|
2240
|
+
"js",
|
|
2241
|
+
"ts",
|
|
2242
|
+
"jsx",
|
|
2243
|
+
"tsx",
|
|
2244
|
+
"yaml",
|
|
2245
|
+
"yml",
|
|
2246
|
+
"toml",
|
|
2247
|
+
"ini",
|
|
2248
|
+
"cfg",
|
|
2249
|
+
"conf",
|
|
2250
|
+
"log",
|
|
2251
|
+
"csv",
|
|
2252
|
+
"svg"
|
|
2253
|
+
];
|
|
2254
|
+
return !textExtensions.includes(extension);
|
|
2255
|
+
}
|
|
2256
|
+
return true;
|
|
2257
|
+
}
|
|
2258
|
+
async function readFileSlice(file, offset, size, isBinary, isFirstChunk) {
|
|
2259
|
+
const blob = file.slice(offset, offset + size);
|
|
2260
|
+
if (isBinary) {
|
|
2261
|
+
const arrayBuffer = await blob.arrayBuffer();
|
|
2262
|
+
const bytes = new Uint8Array(arrayBuffer);
|
|
2263
|
+
let base64 = "";
|
|
2264
|
+
const chunkSize = 32766;
|
|
2265
|
+
for (let i = 0; i < bytes.length; i += chunkSize) {
|
|
2266
|
+
const chunk = bytes.slice(i, i + chunkSize);
|
|
2267
|
+
base64 += btoa(String.fromCharCode(...chunk));
|
|
2268
|
+
}
|
|
2269
|
+
if (isFirstChunk) {
|
|
2270
|
+
const mimeType = detectFileTypeFromBase64(base64) || file.type || "application/octet-stream";
|
|
2271
|
+
return `data:${mimeType};base64,${base64}`;
|
|
2272
|
+
}
|
|
2273
|
+
return base64;
|
|
2274
|
+
} else {
|
|
2275
|
+
return await blob.text();
|
|
2276
|
+
}
|
|
2277
|
+
}
|
|
2278
|
+
async function* processFileStreaming(file, chunkSize = STREAMING_CHUNK_SIZE) {
|
|
2279
|
+
const binary = isBinaryFile(file);
|
|
2280
|
+
const effectiveChunkSize = binary ? BINARY_CHUNK_SIZE : chunkSize;
|
|
2281
|
+
let offset = 0;
|
|
2282
|
+
let chunkIndex = 0;
|
|
2283
|
+
while (offset < file.size) {
|
|
2284
|
+
const chunkString = await readFileSlice(
|
|
2285
|
+
file,
|
|
2286
|
+
offset,
|
|
2287
|
+
effectiveChunkSize,
|
|
2288
|
+
binary,
|
|
2289
|
+
chunkIndex === 0
|
|
2290
|
+
);
|
|
2291
|
+
const hash = core.keccak256HashString(chunkString);
|
|
2292
|
+
const compressedChunks = chunkDataForStorage(chunkString);
|
|
2293
|
+
yield {
|
|
2294
|
+
chunkIndex,
|
|
2295
|
+
hash,
|
|
2296
|
+
compressedChunks
|
|
2297
|
+
};
|
|
2298
|
+
offset += effectiveChunkSize;
|
|
2299
|
+
chunkIndex++;
|
|
2300
|
+
}
|
|
2301
|
+
if (chunkIndex === 0) {
|
|
2302
|
+
const emptyString = binary ? `data:${file.type || "application/octet-stream"};base64,` : "";
|
|
2303
|
+
const hash = core.keccak256HashString(emptyString);
|
|
2304
|
+
const compressedChunks = chunkDataForStorage(emptyString);
|
|
2305
|
+
yield {
|
|
2306
|
+
chunkIndex: 0,
|
|
2307
|
+
hash,
|
|
2308
|
+
compressedChunks
|
|
2309
|
+
};
|
|
2310
|
+
}
|
|
2311
|
+
}
|
|
2312
|
+
async function processFileStreamingComplete(file, onProgress) {
|
|
2313
|
+
const totalChunks = Math.max(1, Math.ceil(file.size / STREAMING_CHUNK_SIZE));
|
|
2314
|
+
const hashes = [];
|
|
2315
|
+
const allCompressedChunks = [];
|
|
2316
|
+
const binary = isBinaryFile(file);
|
|
2317
|
+
let processed = 0;
|
|
2318
|
+
for await (const result of processFileStreaming(file)) {
|
|
2319
|
+
hashes.push(result.hash);
|
|
2320
|
+
allCompressedChunks.push(result.compressedChunks);
|
|
2321
|
+
processed++;
|
|
2322
|
+
onProgress?.(processed, totalChunks);
|
|
2323
|
+
}
|
|
2324
|
+
return {
|
|
2325
|
+
hashes,
|
|
2326
|
+
allCompressedChunks,
|
|
2327
|
+
totalChunks: hashes.length,
|
|
2328
|
+
isBinary: binary
|
|
2329
|
+
};
|
|
2330
|
+
}
|
|
2331
|
+
function estimateChunkCount(fileSize, isBinary = true) {
|
|
2332
|
+
const chunkSize = isBinary ? BINARY_CHUNK_SIZE : STREAMING_CHUNK_SIZE;
|
|
2333
|
+
return Math.max(1, Math.ceil(fileSize / chunkSize));
|
|
2334
|
+
}
|
|
2209
2335
|
|
|
2210
2336
|
exports.CHUNKED_STORAGE_CONTRACT = CHUNKED_STORAGE_CONTRACT;
|
|
2211
2337
|
exports.CHUNKED_STORAGE_READER_CONTRACT = CHUNKED_STORAGE_READER_CONTRACT;
|
|
@@ -2224,6 +2350,7 @@ exports.containsXmlReferences = containsXmlReferences;
|
|
|
2224
2350
|
exports.detectFileTypeFromBase64 = detectFileTypeFromBase64;
|
|
2225
2351
|
exports.detectStorageType = detectStorageType;
|
|
2226
2352
|
exports.encodeStorageKeyForUrl = encodeStorageKeyForUrl;
|
|
2353
|
+
exports.estimateChunkCount = estimateChunkCount;
|
|
2227
2354
|
exports.fileToDataUri = fileToDataUri;
|
|
2228
2355
|
exports.formatStorageKeyForDisplay = formatStorageKeyForDisplay;
|
|
2229
2356
|
exports.generateStorageEmbedTag = generateStorageEmbedTag;
|
|
@@ -2232,8 +2359,12 @@ exports.generateXmlMetadataWithSource = generateXmlMetadataWithSource;
|
|
|
2232
2359
|
exports.getChunkCount = getChunkCount;
|
|
2233
2360
|
exports.getReferenceKey = getReferenceKey;
|
|
2234
2361
|
exports.getStorageKeyBytes = getStorageKeyBytes;
|
|
2362
|
+
exports.isBinaryFile = isBinaryFile;
|
|
2235
2363
|
exports.parseNetReferences = parseNetReferences;
|
|
2236
2364
|
exports.processDataForStorage = processDataForStorage;
|
|
2365
|
+
exports.processFileStreaming = processFileStreaming;
|
|
2366
|
+
exports.processFileStreamingComplete = processFileStreamingComplete;
|
|
2367
|
+
exports.readFileSlice = readFileSlice;
|
|
2237
2368
|
exports.resolveOperator = resolveOperator;
|
|
2238
2369
|
exports.resolveXmlRecursive = resolveXmlRecursive;
|
|
2239
2370
|
exports.shouldSuggestXmlStorage = shouldSuggestXmlStorage;
|