@net-protocol/storage 0.1.8 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +97 -155
- package/dist/index.d.ts +97 -155
- package/dist/index.js +275 -716
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +272 -710
- package/dist/index.mjs.map +1 -1
- package/dist/react.d.mts +70 -0
- package/dist/react.d.ts +70 -0
- package/dist/react.js +1380 -0
- package/dist/react.js.map +1 -0
- package/dist/react.mjs +1367 -0
- package/dist/react.mjs.map +1 -0
- package/dist/types-BnOI6cJS.d.mts +87 -0
- package/dist/types-BnOI6cJS.d.ts +87 -0
- package/package.json +15 -2
package/dist/index.d.mts
CHANGED
|
@@ -1,158 +1,7 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
1
|
+
import { S as StorageClientOptions, a as StorageData, B as BulkStorageKey, b as BulkStorageResult } from './types-BnOI6cJS.mjs';
|
|
2
|
+
export { C as ChunkedMetadata, U as UseStorageOptions, c as UseXmlStorageOptions, X as XmlReference } from './types-BnOI6cJS.mjs';
|
|
3
3
|
import { WriteTransactionConfig } from '@net-protocol/core';
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* Storage data object: { text, value }
|
|
7
|
-
*/
|
|
8
|
-
type StorageData = {
|
|
9
|
-
text: string;
|
|
10
|
-
value: string;
|
|
11
|
-
};
|
|
12
|
-
/**
|
|
13
|
-
* Bulk storage key for batch operations
|
|
14
|
-
*/
|
|
15
|
-
type BulkStorageKey = {
|
|
16
|
-
key: string;
|
|
17
|
-
operator: string;
|
|
18
|
-
keyFormat?: "raw" | "bytes32";
|
|
19
|
-
};
|
|
20
|
-
/**
|
|
21
|
-
* Bulk storage result
|
|
22
|
-
*/
|
|
23
|
-
type BulkStorageResult = {
|
|
24
|
-
text: string;
|
|
25
|
-
value: string;
|
|
26
|
-
};
|
|
27
|
-
/**
|
|
28
|
-
* Options for useStorage hook
|
|
29
|
-
*/
|
|
30
|
-
type UseStorageOptions = {
|
|
31
|
-
chainId: number;
|
|
32
|
-
key?: string;
|
|
33
|
-
operatorAddress?: string;
|
|
34
|
-
enabled?: boolean;
|
|
35
|
-
index?: number;
|
|
36
|
-
keyFormat?: "raw" | "bytes32";
|
|
37
|
-
useRouter?: boolean;
|
|
38
|
-
outputFormat?: "hex" | "string";
|
|
39
|
-
};
|
|
40
|
-
/**
|
|
41
|
-
* Options for StorageClient
|
|
42
|
-
*/
|
|
43
|
-
type StorageClientOptions = {
|
|
44
|
-
chainId: number;
|
|
45
|
-
overrides?: {
|
|
46
|
-
rpcUrls: string[];
|
|
47
|
-
};
|
|
48
|
-
};
|
|
49
|
-
/**
|
|
50
|
-
* XML reference structure
|
|
51
|
-
*/
|
|
52
|
-
interface XmlReference$1 {
|
|
53
|
-
hash: string;
|
|
54
|
-
version: string;
|
|
55
|
-
index?: number;
|
|
56
|
-
operator?: string;
|
|
57
|
-
source?: string;
|
|
58
|
-
}
|
|
59
|
-
/**
|
|
60
|
-
* Chunked storage metadata
|
|
61
|
-
*/
|
|
62
|
-
type ChunkedMetadata = {
|
|
63
|
-
chunkCount: number;
|
|
64
|
-
originalText: string;
|
|
65
|
-
};
|
|
66
|
-
/**
|
|
67
|
-
* Options for useXmlStorage hook
|
|
68
|
-
*/
|
|
69
|
-
type UseXmlStorageOptions = {
|
|
70
|
-
chainId: number;
|
|
71
|
-
key?: string;
|
|
72
|
-
operatorAddress: string;
|
|
73
|
-
skipXmlParsing?: boolean;
|
|
74
|
-
enabled?: boolean;
|
|
75
|
-
content?: string;
|
|
76
|
-
index?: number;
|
|
77
|
-
keyFormat?: "raw" | "bytes32";
|
|
78
|
-
useRouter?: boolean;
|
|
79
|
-
outputFormat?: "hex" | "string";
|
|
80
|
-
};
|
|
81
|
-
/**
|
|
82
|
-
* Options for useStorageFromRouter hook
|
|
83
|
-
*/
|
|
84
|
-
type UseStorageFromRouterOptions = {
|
|
85
|
-
chainId: number;
|
|
86
|
-
storageKey: `0x${string}`;
|
|
87
|
-
operatorAddress: string;
|
|
88
|
-
enabled?: boolean;
|
|
89
|
-
};
|
|
90
|
-
|
|
91
|
-
declare function useStorage({ chainId, key, operatorAddress, enabled, index, keyFormat, useRouter, outputFormat, }: UseStorageOptions): {
|
|
92
|
-
data: StorageData | undefined;
|
|
93
|
-
isLoading: boolean;
|
|
94
|
-
error: Error | undefined;
|
|
95
|
-
};
|
|
96
|
-
declare function useStorageForOperator({ chainId, operatorAddress, }: UseStorageOptions): {
|
|
97
|
-
data: any[][];
|
|
98
|
-
isLoading: boolean;
|
|
99
|
-
error: undefined;
|
|
100
|
-
};
|
|
101
|
-
declare function useStorageForOperatorAndKey({ chainId, key, operatorAddress, keyFormat, outputFormat, }: UseStorageOptions): {
|
|
102
|
-
data: {
|
|
103
|
-
text: string;
|
|
104
|
-
value: string;
|
|
105
|
-
} | undefined;
|
|
106
|
-
isLoading: boolean;
|
|
107
|
-
error: Error | undefined;
|
|
108
|
-
};
|
|
109
|
-
declare function useBulkStorage({ chainId, keys, safe, keyFormat, }: {
|
|
110
|
-
chainId: number;
|
|
111
|
-
keys: BulkStorageKey[];
|
|
112
|
-
safe?: boolean;
|
|
113
|
-
keyFormat?: "raw" | "bytes32";
|
|
114
|
-
}): {
|
|
115
|
-
data: {
|
|
116
|
-
text: string;
|
|
117
|
-
value: string;
|
|
118
|
-
}[] | undefined;
|
|
119
|
-
isLoading: boolean;
|
|
120
|
-
error: Error | undefined;
|
|
121
|
-
};
|
|
122
|
-
/**
|
|
123
|
-
* Get total number of versions (writes) for a storage key
|
|
124
|
-
* Tries both chunked storage and regular storage
|
|
125
|
-
*/
|
|
126
|
-
declare function useStorageTotalWrites({ chainId, key, operatorAddress, enabled, keyFormat, }: {
|
|
127
|
-
chainId: number;
|
|
128
|
-
key?: string;
|
|
129
|
-
operatorAddress?: string;
|
|
130
|
-
enabled?: boolean;
|
|
131
|
-
keyFormat?: "raw" | "bytes32";
|
|
132
|
-
}): {
|
|
133
|
-
data: number | undefined;
|
|
134
|
-
isLoading: boolean;
|
|
135
|
-
error: viem.ReadContractErrorType | null | undefined;
|
|
136
|
-
};
|
|
137
|
-
|
|
138
|
-
declare function useXmlStorage({ chainId, key, operatorAddress, skipXmlParsing, enabled, content, index, keyFormat, useRouter, outputFormat, }: UseXmlStorageOptions): {
|
|
139
|
-
text: string;
|
|
140
|
-
value: string;
|
|
141
|
-
isLoading: boolean;
|
|
142
|
-
error: Error | undefined;
|
|
143
|
-
isXml: boolean;
|
|
144
|
-
};
|
|
145
|
-
|
|
146
|
-
/**
|
|
147
|
-
* Generic hook to fetch storage content from StorageRouter
|
|
148
|
-
* Handles both regular storage and chunked storage seamlessly
|
|
149
|
-
* Works for any storage key (not just canvases)
|
|
150
|
-
*/
|
|
151
|
-
declare function useStorageFromRouter({ chainId, storageKey, operatorAddress, enabled, }: UseStorageFromRouterOptions): {
|
|
152
|
-
data: StorageData | undefined;
|
|
153
|
-
isLoading: boolean;
|
|
154
|
-
error: Error | undefined;
|
|
155
|
-
};
|
|
4
|
+
import { Abi, PublicClient } from 'viem';
|
|
156
5
|
|
|
157
6
|
/**
|
|
158
7
|
* StorageClient - Client for interacting with Net protocol storage
|
|
@@ -535,6 +384,99 @@ declare function detectFileTypeFromBase64(base64Data: string): string | undefine
|
|
|
535
384
|
*/
|
|
536
385
|
declare function base64ToDataUri(base64Data: string): string;
|
|
537
386
|
|
|
387
|
+
/**
|
|
388
|
+
* Streaming utilities for processing large files without loading them entirely into memory.
|
|
389
|
+
* Uses file.slice() to read in chunks, keeping memory usage low.
|
|
390
|
+
*/
|
|
391
|
+
/**
|
|
392
|
+
* Result from processing a single file chunk
|
|
393
|
+
*/
|
|
394
|
+
interface StreamingChunkResult {
|
|
395
|
+
/** Index of this chunk (0-based) */
|
|
396
|
+
chunkIndex: number;
|
|
397
|
+
/** Keccak256 hash of the chunk content (66 chars) */
|
|
398
|
+
hash: string;
|
|
399
|
+
/** Compressed chunks ready for ChunkedStorage (array of hex strings) */
|
|
400
|
+
compressedChunks: string[];
|
|
401
|
+
}
|
|
402
|
+
/**
|
|
403
|
+
* Result from processing an entire file via streaming
|
|
404
|
+
*/
|
|
405
|
+
interface StreamingProcessResult {
|
|
406
|
+
/** All chunk hashes in order */
|
|
407
|
+
hashes: string[];
|
|
408
|
+
/** All compressed chunk arrays in order */
|
|
409
|
+
allCompressedChunks: string[][];
|
|
410
|
+
/** Total number of chunks */
|
|
411
|
+
totalChunks: number;
|
|
412
|
+
/** Whether the file was treated as binary */
|
|
413
|
+
isBinary: boolean;
|
|
414
|
+
}
|
|
415
|
+
/**
|
|
416
|
+
* Detects if a file should be treated as binary based on its MIME type.
|
|
417
|
+
* Binary files will be base64 encoded; text files will be read as text.
|
|
418
|
+
*
|
|
419
|
+
* @param file - The file to check
|
|
420
|
+
* @returns true if the file should be treated as binary
|
|
421
|
+
*/
|
|
422
|
+
declare function isBinaryFile(file: File): boolean;
|
|
423
|
+
/**
|
|
424
|
+
* Reads a slice of a file as a string.
|
|
425
|
+
* For binary files, returns base64-encoded data.
|
|
426
|
+
* For text files, returns the raw text.
|
|
427
|
+
*
|
|
428
|
+
* @param file - The file to read from
|
|
429
|
+
* @param offset - Start byte offset
|
|
430
|
+
* @param size - Number of bytes to read
|
|
431
|
+
* @param isBinary - Whether to read as binary (base64) or text
|
|
432
|
+
* @param isFirstChunk - Whether this is the first chunk (for data URI prefix)
|
|
433
|
+
* @returns The chunk as a string
|
|
434
|
+
*/
|
|
435
|
+
declare function readFileSlice(file: File, offset: number, size: number, isBinary: boolean, isFirstChunk: boolean): Promise<string>;
|
|
436
|
+
/**
|
|
437
|
+
* Async generator that processes a file in streaming chunks.
|
|
438
|
+
* Reads the file in 80KB slices, hashes and compresses each,
|
|
439
|
+
* yielding results one at a time to keep memory usage low.
|
|
440
|
+
*
|
|
441
|
+
* @param file - The file to process
|
|
442
|
+
* @param chunkSize - Size of each chunk in bytes (default: 80KB)
|
|
443
|
+
* @yields StreamingChunkResult for each chunk processed
|
|
444
|
+
*
|
|
445
|
+
* @example
|
|
446
|
+
* ```typescript
|
|
447
|
+
* const hashes: string[] = [];
|
|
448
|
+
* const transactions: TransactionConfig[] = [];
|
|
449
|
+
*
|
|
450
|
+
* for await (const result of processFileStreaming(file)) {
|
|
451
|
+
* hashes.push(result.hash);
|
|
452
|
+
* transactions.push(createTransaction(result.compressedChunks));
|
|
453
|
+
* }
|
|
454
|
+
*
|
|
455
|
+
* const metadata = generateXmlMetadata(hashes, 0, operatorAddress);
|
|
456
|
+
* ```
|
|
457
|
+
*/
|
|
458
|
+
declare function processFileStreaming(file: File, chunkSize?: number): AsyncGenerator<StreamingChunkResult>;
|
|
459
|
+
/**
|
|
460
|
+
* Processes an entire file via streaming and returns all results.
|
|
461
|
+
* This is a convenience function that collects all generator results.
|
|
462
|
+
*
|
|
463
|
+
* For very large files, prefer using processFileStreaming directly
|
|
464
|
+
* to process chunks as they're generated.
|
|
465
|
+
*
|
|
466
|
+
* @param file - The file to process
|
|
467
|
+
* @param onProgress - Optional callback for progress updates
|
|
468
|
+
* @returns All hashes and compressed chunks
|
|
469
|
+
*/
|
|
470
|
+
declare function processFileStreamingComplete(file: File, onProgress?: (current: number, total: number) => void): Promise<StreamingProcessResult>;
|
|
471
|
+
/**
|
|
472
|
+
* Estimates the number of chunks for a file without reading it.
|
|
473
|
+
*
|
|
474
|
+
* @param fileSize - Size of the file in bytes
|
|
475
|
+
* @param isBinary - Whether the file is binary (uses smaller chunk size for base64 alignment)
|
|
476
|
+
* @returns Estimated number of chunks
|
|
477
|
+
*/
|
|
478
|
+
declare function estimateChunkCount(fileSize: number, isBinary?: boolean): number;
|
|
479
|
+
|
|
538
480
|
declare const STORAGE_CONTRACT: {
|
|
539
481
|
abi: Abi;
|
|
540
482
|
address: `0x${string}`;
|
|
@@ -570,4 +512,4 @@ declare const CONCURRENT_XML_FETCHES = 3;
|
|
|
570
512
|
*/
|
|
571
513
|
declare function resolveXmlRecursive(content: string, defaultOperator: string, client: PublicClient, maxDepth: number, visited?: Set<string>, inheritedOperator?: string): Promise<string>;
|
|
572
514
|
|
|
573
|
-
export {
|
|
515
|
+
export { BulkStorageKey, CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, StorageClientOptions, StorageData, type StreamingChunkResult, type StreamingProcessResult, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, estimateChunkCount, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, isBinaryFile, parseNetReferences, processDataForStorage, processFileStreaming, processFileStreamingComplete, readFileSlice, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, validateDataSize };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,158 +1,7 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
1
|
+
import { S as StorageClientOptions, a as StorageData, B as BulkStorageKey, b as BulkStorageResult } from './types-BnOI6cJS.js';
|
|
2
|
+
export { C as ChunkedMetadata, U as UseStorageOptions, c as UseXmlStorageOptions, X as XmlReference } from './types-BnOI6cJS.js';
|
|
3
3
|
import { WriteTransactionConfig } from '@net-protocol/core';
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* Storage data object: { text, value }
|
|
7
|
-
*/
|
|
8
|
-
type StorageData = {
|
|
9
|
-
text: string;
|
|
10
|
-
value: string;
|
|
11
|
-
};
|
|
12
|
-
/**
|
|
13
|
-
* Bulk storage key for batch operations
|
|
14
|
-
*/
|
|
15
|
-
type BulkStorageKey = {
|
|
16
|
-
key: string;
|
|
17
|
-
operator: string;
|
|
18
|
-
keyFormat?: "raw" | "bytes32";
|
|
19
|
-
};
|
|
20
|
-
/**
|
|
21
|
-
* Bulk storage result
|
|
22
|
-
*/
|
|
23
|
-
type BulkStorageResult = {
|
|
24
|
-
text: string;
|
|
25
|
-
value: string;
|
|
26
|
-
};
|
|
27
|
-
/**
|
|
28
|
-
* Options for useStorage hook
|
|
29
|
-
*/
|
|
30
|
-
type UseStorageOptions = {
|
|
31
|
-
chainId: number;
|
|
32
|
-
key?: string;
|
|
33
|
-
operatorAddress?: string;
|
|
34
|
-
enabled?: boolean;
|
|
35
|
-
index?: number;
|
|
36
|
-
keyFormat?: "raw" | "bytes32";
|
|
37
|
-
useRouter?: boolean;
|
|
38
|
-
outputFormat?: "hex" | "string";
|
|
39
|
-
};
|
|
40
|
-
/**
|
|
41
|
-
* Options for StorageClient
|
|
42
|
-
*/
|
|
43
|
-
type StorageClientOptions = {
|
|
44
|
-
chainId: number;
|
|
45
|
-
overrides?: {
|
|
46
|
-
rpcUrls: string[];
|
|
47
|
-
};
|
|
48
|
-
};
|
|
49
|
-
/**
|
|
50
|
-
* XML reference structure
|
|
51
|
-
*/
|
|
52
|
-
interface XmlReference$1 {
|
|
53
|
-
hash: string;
|
|
54
|
-
version: string;
|
|
55
|
-
index?: number;
|
|
56
|
-
operator?: string;
|
|
57
|
-
source?: string;
|
|
58
|
-
}
|
|
59
|
-
/**
|
|
60
|
-
* Chunked storage metadata
|
|
61
|
-
*/
|
|
62
|
-
type ChunkedMetadata = {
|
|
63
|
-
chunkCount: number;
|
|
64
|
-
originalText: string;
|
|
65
|
-
};
|
|
66
|
-
/**
|
|
67
|
-
* Options for useXmlStorage hook
|
|
68
|
-
*/
|
|
69
|
-
type UseXmlStorageOptions = {
|
|
70
|
-
chainId: number;
|
|
71
|
-
key?: string;
|
|
72
|
-
operatorAddress: string;
|
|
73
|
-
skipXmlParsing?: boolean;
|
|
74
|
-
enabled?: boolean;
|
|
75
|
-
content?: string;
|
|
76
|
-
index?: number;
|
|
77
|
-
keyFormat?: "raw" | "bytes32";
|
|
78
|
-
useRouter?: boolean;
|
|
79
|
-
outputFormat?: "hex" | "string";
|
|
80
|
-
};
|
|
81
|
-
/**
|
|
82
|
-
* Options for useStorageFromRouter hook
|
|
83
|
-
*/
|
|
84
|
-
type UseStorageFromRouterOptions = {
|
|
85
|
-
chainId: number;
|
|
86
|
-
storageKey: `0x${string}`;
|
|
87
|
-
operatorAddress: string;
|
|
88
|
-
enabled?: boolean;
|
|
89
|
-
};
|
|
90
|
-
|
|
91
|
-
declare function useStorage({ chainId, key, operatorAddress, enabled, index, keyFormat, useRouter, outputFormat, }: UseStorageOptions): {
|
|
92
|
-
data: StorageData | undefined;
|
|
93
|
-
isLoading: boolean;
|
|
94
|
-
error: Error | undefined;
|
|
95
|
-
};
|
|
96
|
-
declare function useStorageForOperator({ chainId, operatorAddress, }: UseStorageOptions): {
|
|
97
|
-
data: any[][];
|
|
98
|
-
isLoading: boolean;
|
|
99
|
-
error: undefined;
|
|
100
|
-
};
|
|
101
|
-
declare function useStorageForOperatorAndKey({ chainId, key, operatorAddress, keyFormat, outputFormat, }: UseStorageOptions): {
|
|
102
|
-
data: {
|
|
103
|
-
text: string;
|
|
104
|
-
value: string;
|
|
105
|
-
} | undefined;
|
|
106
|
-
isLoading: boolean;
|
|
107
|
-
error: Error | undefined;
|
|
108
|
-
};
|
|
109
|
-
declare function useBulkStorage({ chainId, keys, safe, keyFormat, }: {
|
|
110
|
-
chainId: number;
|
|
111
|
-
keys: BulkStorageKey[];
|
|
112
|
-
safe?: boolean;
|
|
113
|
-
keyFormat?: "raw" | "bytes32";
|
|
114
|
-
}): {
|
|
115
|
-
data: {
|
|
116
|
-
text: string;
|
|
117
|
-
value: string;
|
|
118
|
-
}[] | undefined;
|
|
119
|
-
isLoading: boolean;
|
|
120
|
-
error: Error | undefined;
|
|
121
|
-
};
|
|
122
|
-
/**
|
|
123
|
-
* Get total number of versions (writes) for a storage key
|
|
124
|
-
* Tries both chunked storage and regular storage
|
|
125
|
-
*/
|
|
126
|
-
declare function useStorageTotalWrites({ chainId, key, operatorAddress, enabled, keyFormat, }: {
|
|
127
|
-
chainId: number;
|
|
128
|
-
key?: string;
|
|
129
|
-
operatorAddress?: string;
|
|
130
|
-
enabled?: boolean;
|
|
131
|
-
keyFormat?: "raw" | "bytes32";
|
|
132
|
-
}): {
|
|
133
|
-
data: number | undefined;
|
|
134
|
-
isLoading: boolean;
|
|
135
|
-
error: viem.ReadContractErrorType | null | undefined;
|
|
136
|
-
};
|
|
137
|
-
|
|
138
|
-
declare function useXmlStorage({ chainId, key, operatorAddress, skipXmlParsing, enabled, content, index, keyFormat, useRouter, outputFormat, }: UseXmlStorageOptions): {
|
|
139
|
-
text: string;
|
|
140
|
-
value: string;
|
|
141
|
-
isLoading: boolean;
|
|
142
|
-
error: Error | undefined;
|
|
143
|
-
isXml: boolean;
|
|
144
|
-
};
|
|
145
|
-
|
|
146
|
-
/**
|
|
147
|
-
* Generic hook to fetch storage content from StorageRouter
|
|
148
|
-
* Handles both regular storage and chunked storage seamlessly
|
|
149
|
-
* Works for any storage key (not just canvases)
|
|
150
|
-
*/
|
|
151
|
-
declare function useStorageFromRouter({ chainId, storageKey, operatorAddress, enabled, }: UseStorageFromRouterOptions): {
|
|
152
|
-
data: StorageData | undefined;
|
|
153
|
-
isLoading: boolean;
|
|
154
|
-
error: Error | undefined;
|
|
155
|
-
};
|
|
4
|
+
import { Abi, PublicClient } from 'viem';
|
|
156
5
|
|
|
157
6
|
/**
|
|
158
7
|
* StorageClient - Client for interacting with Net protocol storage
|
|
@@ -535,6 +384,99 @@ declare function detectFileTypeFromBase64(base64Data: string): string | undefine
|
|
|
535
384
|
*/
|
|
536
385
|
declare function base64ToDataUri(base64Data: string): string;
|
|
537
386
|
|
|
387
|
+
/**
|
|
388
|
+
* Streaming utilities for processing large files without loading them entirely into memory.
|
|
389
|
+
* Uses file.slice() to read in chunks, keeping memory usage low.
|
|
390
|
+
*/
|
|
391
|
+
/**
|
|
392
|
+
* Result from processing a single file chunk
|
|
393
|
+
*/
|
|
394
|
+
interface StreamingChunkResult {
|
|
395
|
+
/** Index of this chunk (0-based) */
|
|
396
|
+
chunkIndex: number;
|
|
397
|
+
/** Keccak256 hash of the chunk content (66 chars) */
|
|
398
|
+
hash: string;
|
|
399
|
+
/** Compressed chunks ready for ChunkedStorage (array of hex strings) */
|
|
400
|
+
compressedChunks: string[];
|
|
401
|
+
}
|
|
402
|
+
/**
|
|
403
|
+
* Result from processing an entire file via streaming
|
|
404
|
+
*/
|
|
405
|
+
interface StreamingProcessResult {
|
|
406
|
+
/** All chunk hashes in order */
|
|
407
|
+
hashes: string[];
|
|
408
|
+
/** All compressed chunk arrays in order */
|
|
409
|
+
allCompressedChunks: string[][];
|
|
410
|
+
/** Total number of chunks */
|
|
411
|
+
totalChunks: number;
|
|
412
|
+
/** Whether the file was treated as binary */
|
|
413
|
+
isBinary: boolean;
|
|
414
|
+
}
|
|
415
|
+
/**
|
|
416
|
+
* Detects if a file should be treated as binary based on its MIME type.
|
|
417
|
+
* Binary files will be base64 encoded; text files will be read as text.
|
|
418
|
+
*
|
|
419
|
+
* @param file - The file to check
|
|
420
|
+
* @returns true if the file should be treated as binary
|
|
421
|
+
*/
|
|
422
|
+
declare function isBinaryFile(file: File): boolean;
|
|
423
|
+
/**
|
|
424
|
+
* Reads a slice of a file as a string.
|
|
425
|
+
* For binary files, returns base64-encoded data.
|
|
426
|
+
* For text files, returns the raw text.
|
|
427
|
+
*
|
|
428
|
+
* @param file - The file to read from
|
|
429
|
+
* @param offset - Start byte offset
|
|
430
|
+
* @param size - Number of bytes to read
|
|
431
|
+
* @param isBinary - Whether to read as binary (base64) or text
|
|
432
|
+
* @param isFirstChunk - Whether this is the first chunk (for data URI prefix)
|
|
433
|
+
* @returns The chunk as a string
|
|
434
|
+
*/
|
|
435
|
+
declare function readFileSlice(file: File, offset: number, size: number, isBinary: boolean, isFirstChunk: boolean): Promise<string>;
|
|
436
|
+
/**
|
|
437
|
+
* Async generator that processes a file in streaming chunks.
|
|
438
|
+
* Reads the file in 80KB slices, hashes and compresses each,
|
|
439
|
+
* yielding results one at a time to keep memory usage low.
|
|
440
|
+
*
|
|
441
|
+
* @param file - The file to process
|
|
442
|
+
* @param chunkSize - Size of each chunk in bytes (default: 80KB)
|
|
443
|
+
* @yields StreamingChunkResult for each chunk processed
|
|
444
|
+
*
|
|
445
|
+
* @example
|
|
446
|
+
* ```typescript
|
|
447
|
+
* const hashes: string[] = [];
|
|
448
|
+
* const transactions: TransactionConfig[] = [];
|
|
449
|
+
*
|
|
450
|
+
* for await (const result of processFileStreaming(file)) {
|
|
451
|
+
* hashes.push(result.hash);
|
|
452
|
+
* transactions.push(createTransaction(result.compressedChunks));
|
|
453
|
+
* }
|
|
454
|
+
*
|
|
455
|
+
* const metadata = generateXmlMetadata(hashes, 0, operatorAddress);
|
|
456
|
+
* ```
|
|
457
|
+
*/
|
|
458
|
+
declare function processFileStreaming(file: File, chunkSize?: number): AsyncGenerator<StreamingChunkResult>;
|
|
459
|
+
/**
|
|
460
|
+
* Processes an entire file via streaming and returns all results.
|
|
461
|
+
* This is a convenience function that collects all generator results.
|
|
462
|
+
*
|
|
463
|
+
* For very large files, prefer using processFileStreaming directly
|
|
464
|
+
* to process chunks as they're generated.
|
|
465
|
+
*
|
|
466
|
+
* @param file - The file to process
|
|
467
|
+
* @param onProgress - Optional callback for progress updates
|
|
468
|
+
* @returns All hashes and compressed chunks
|
|
469
|
+
*/
|
|
470
|
+
declare function processFileStreamingComplete(file: File, onProgress?: (current: number, total: number) => void): Promise<StreamingProcessResult>;
|
|
471
|
+
/**
|
|
472
|
+
* Estimates the number of chunks for a file without reading it.
|
|
473
|
+
*
|
|
474
|
+
* @param fileSize - Size of the file in bytes
|
|
475
|
+
* @param isBinary - Whether the file is binary (uses smaller chunk size for base64 alignment)
|
|
476
|
+
* @returns Estimated number of chunks
|
|
477
|
+
*/
|
|
478
|
+
declare function estimateChunkCount(fileSize: number, isBinary?: boolean): number;
|
|
479
|
+
|
|
538
480
|
declare const STORAGE_CONTRACT: {
|
|
539
481
|
abi: Abi;
|
|
540
482
|
address: `0x${string}`;
|
|
@@ -570,4 +512,4 @@ declare const CONCURRENT_XML_FETCHES = 3;
|
|
|
570
512
|
*/
|
|
571
513
|
declare function resolveXmlRecursive(content: string, defaultOperator: string, client: PublicClient, maxDepth: number, visited?: Set<string>, inheritedOperator?: string): Promise<string>;
|
|
572
514
|
|
|
573
|
-
export {
|
|
515
|
+
export { BulkStorageKey, CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, StorageClientOptions, StorageData, type StreamingChunkResult, type StreamingProcessResult, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, estimateChunkCount, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, isBinaryFile, parseNetReferences, processDataForStorage, processFileStreaming, processFileStreamingComplete, readFileSlice, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, validateDataSize };
|