@net-protocol/storage 0.1.8 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +97 -155
- package/dist/index.d.ts +97 -155
- package/dist/index.js +275 -716
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +272 -710
- package/dist/index.mjs.map +1 -1
- package/dist/react.d.mts +70 -0
- package/dist/react.d.ts +70 -0
- package/dist/react.js +1380 -0
- package/dist/react.js.map +1 -0
- package/dist/react.mjs +1367 -0
- package/dist/react.mjs.map +1 -0
- package/dist/types-BnOI6cJS.d.mts +87 -0
- package/dist/types-BnOI6cJS.d.ts +87 -0
- package/package.json +15 -2
package/dist/index.mjs
CHANGED
|
@@ -1,12 +1,9 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { useState, useEffect, useMemo } from 'react';
|
|
3
|
-
import { keccak256HashString, toBytes32, getNetContract, getPublicClient, normalizeDataOrEmpty } from '@net-protocol/core';
|
|
4
|
-
import { stringToHex, hexToBytes, hexToString, fromHex, decodeAbiParameters } from 'viem';
|
|
5
|
-
import pako from 'pako';
|
|
1
|
+
import { fromHex, stringToHex, hexToBytes, hexToString, decodeAbiParameters } from 'viem';
|
|
6
2
|
import { readContract } from 'viem/actions';
|
|
7
|
-
import
|
|
3
|
+
import { keccak256HashString, toBytes32, getPublicClient, getNetContract, normalizeDataOrEmpty } from '@net-protocol/core';
|
|
4
|
+
import pako from 'pako';
|
|
8
5
|
|
|
9
|
-
// src/
|
|
6
|
+
// src/client/StorageClient.ts
|
|
10
7
|
|
|
11
8
|
// src/abis/storage.json
|
|
12
9
|
var storage_default = [
|
|
@@ -591,6 +588,150 @@ var SAFE_STORAGE_READER_CONTRACT = {
|
|
|
591
588
|
abi: safe_storage_reader_default,
|
|
592
589
|
address: "0x0000000d03bad401fae4935dc9cbbf8084347214"
|
|
593
590
|
};
|
|
591
|
+
function isBinaryString(str) {
|
|
592
|
+
return str.split("").some((char) => char.charCodeAt(0) > 127);
|
|
593
|
+
}
|
|
594
|
+
function formatStorageKeyForDisplay(storageKey) {
|
|
595
|
+
if (storageKey.startsWith("0x") && storageKey.length === 66 && /^0x[0-9a-fA-F]{64}$/.test(storageKey)) {
|
|
596
|
+
try {
|
|
597
|
+
const decoded = fromHex(storageKey, "string");
|
|
598
|
+
const trimmed = decoded.replace(/\0/g, "");
|
|
599
|
+
if (!isBinaryString(trimmed) && trimmed.trim().length > 0) {
|
|
600
|
+
return {
|
|
601
|
+
displayText: trimmed,
|
|
602
|
+
isDecoded: true
|
|
603
|
+
};
|
|
604
|
+
}
|
|
605
|
+
} catch {
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
return {
|
|
609
|
+
displayText: storageKey,
|
|
610
|
+
isDecoded: false
|
|
611
|
+
};
|
|
612
|
+
}
|
|
613
|
+
function getStorageKeyBytes(input, keyFormat) {
|
|
614
|
+
if (keyFormat === "bytes32") {
|
|
615
|
+
return input.toLowerCase();
|
|
616
|
+
}
|
|
617
|
+
if (keyFormat === "raw") {
|
|
618
|
+
return input.length > 32 ? keccak256HashString(input.toLowerCase()) : toBytes32(input.toLowerCase());
|
|
619
|
+
}
|
|
620
|
+
if (input.startsWith("0x") && input.length === 66 && // 0x + 64 hex chars = bytes32
|
|
621
|
+
/^0x[0-9a-fA-F]{64}$/.test(input)) {
|
|
622
|
+
return input.toLowerCase();
|
|
623
|
+
}
|
|
624
|
+
return input.length > 32 ? keccak256HashString(input.toLowerCase()) : toBytes32(input.toLowerCase());
|
|
625
|
+
}
|
|
626
|
+
function encodeStorageKeyForUrl(key) {
|
|
627
|
+
return encodeURIComponent(key);
|
|
628
|
+
}
|
|
629
|
+
function generateStorageEmbedTag(params) {
|
|
630
|
+
const operator = params.operatorAddress.toLowerCase();
|
|
631
|
+
const indexAttr = params.versionIndex !== void 0 ? ` i="${params.versionIndex}"` : "";
|
|
632
|
+
const sourceAttr = params.isRegularStorage ? ` s="d"` : "";
|
|
633
|
+
return `<net k="${params.storageKeyBytes}" v="0.0.1"${indexAttr} o="${operator}"${sourceAttr} />`;
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
// src/client/storage.ts
|
|
637
|
+
function getStorageReadConfig(params) {
|
|
638
|
+
const { chainId, key, operator, keyFormat } = params;
|
|
639
|
+
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
640
|
+
return {
|
|
641
|
+
abi: STORAGE_CONTRACT.abi,
|
|
642
|
+
address: STORAGE_CONTRACT.address,
|
|
643
|
+
functionName: "get",
|
|
644
|
+
args: [storageKeyBytes, operator],
|
|
645
|
+
chainId
|
|
646
|
+
};
|
|
647
|
+
}
|
|
648
|
+
function getStorageValueAtIndexReadConfig(params) {
|
|
649
|
+
const { chainId, key, operator, index, keyFormat } = params;
|
|
650
|
+
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
651
|
+
return {
|
|
652
|
+
abi: STORAGE_CONTRACT.abi,
|
|
653
|
+
address: STORAGE_CONTRACT.address,
|
|
654
|
+
functionName: "getValueAtIndex",
|
|
655
|
+
args: [storageKeyBytes, operator, index],
|
|
656
|
+
chainId
|
|
657
|
+
};
|
|
658
|
+
}
|
|
659
|
+
function getStorageTotalWritesReadConfig(params) {
|
|
660
|
+
const { chainId, key, operator, keyFormat } = params;
|
|
661
|
+
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
662
|
+
return {
|
|
663
|
+
abi: STORAGE_CONTRACT.abi,
|
|
664
|
+
address: STORAGE_CONTRACT.address,
|
|
665
|
+
functionName: "getTotalWrites",
|
|
666
|
+
args: [storageKeyBytes, operator],
|
|
667
|
+
chainId
|
|
668
|
+
};
|
|
669
|
+
}
|
|
670
|
+
function getStorageBulkGetReadConfig(params) {
|
|
671
|
+
const { chainId, keys, safe = false, keyFormat } = params;
|
|
672
|
+
const contract = safe ? SAFE_STORAGE_READER_CONTRACT : STORAGE_CONTRACT;
|
|
673
|
+
const bulkKeys = keys.map((k) => ({
|
|
674
|
+
key: getStorageKeyBytes(k.key, k.keyFormat ?? keyFormat),
|
|
675
|
+
operator: k.operator
|
|
676
|
+
}));
|
|
677
|
+
return {
|
|
678
|
+
abi: contract.abi,
|
|
679
|
+
address: contract.address,
|
|
680
|
+
functionName: "bulkGet",
|
|
681
|
+
args: [bulkKeys],
|
|
682
|
+
chainId
|
|
683
|
+
};
|
|
684
|
+
}
|
|
685
|
+
function getStorageRouterReadConfig(params) {
|
|
686
|
+
const { chainId, key, operator, keyFormat } = params;
|
|
687
|
+
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
688
|
+
return {
|
|
689
|
+
abi: STORAGE_ROUTER_CONTRACT.abi,
|
|
690
|
+
address: STORAGE_ROUTER_CONTRACT.address,
|
|
691
|
+
functionName: "get",
|
|
692
|
+
args: [storageKeyBytes, operator],
|
|
693
|
+
chainId
|
|
694
|
+
};
|
|
695
|
+
}
|
|
696
|
+
|
|
697
|
+
// src/client/chunkedStorage.ts
|
|
698
|
+
function getChunkedStorageMetadataReadConfig(params) {
|
|
699
|
+
const { chainId, key, operator, index, keyFormat } = params;
|
|
700
|
+
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
701
|
+
const functionName = index !== void 0 ? "getMetadataAtIndex" : "getMetadata";
|
|
702
|
+
const args = index !== void 0 ? [storageKeyBytes, operator, index] : [storageKeyBytes, operator];
|
|
703
|
+
return {
|
|
704
|
+
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
705
|
+
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
706
|
+
functionName,
|
|
707
|
+
args,
|
|
708
|
+
chainId
|
|
709
|
+
};
|
|
710
|
+
}
|
|
711
|
+
function getChunkedStorageChunksReadConfig(params) {
|
|
712
|
+
const { chainId, key, operator, start, end, index, keyFormat } = params;
|
|
713
|
+
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
714
|
+
const functionName = index !== void 0 ? "getChunksAtIndex" : "getChunks";
|
|
715
|
+
const args = index !== void 0 ? [storageKeyBytes, operator, start, end, index] : [storageKeyBytes, operator, start, end];
|
|
716
|
+
return {
|
|
717
|
+
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
718
|
+
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
719
|
+
functionName,
|
|
720
|
+
args,
|
|
721
|
+
chainId
|
|
722
|
+
};
|
|
723
|
+
}
|
|
724
|
+
function getChunkedStorageTotalWritesReadConfig(params) {
|
|
725
|
+
const { chainId, key, operator, keyFormat } = params;
|
|
726
|
+
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
727
|
+
return {
|
|
728
|
+
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
729
|
+
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
730
|
+
functionName: "getTotalWrites",
|
|
731
|
+
args: [storageKeyBytes, operator],
|
|
732
|
+
chainId
|
|
733
|
+
};
|
|
734
|
+
}
|
|
594
735
|
|
|
595
736
|
// src/utils/xmlUtils.ts
|
|
596
737
|
function parseNetReferences(metadata) {
|
|
@@ -681,406 +822,6 @@ function assembleChunks(chunks, returnHex) {
|
|
|
681
822
|
throw new Error("Failed to decompress chunked data");
|
|
682
823
|
}
|
|
683
824
|
}
|
|
684
|
-
function isBinaryString(str) {
|
|
685
|
-
return str.split("").some((char) => char.charCodeAt(0) > 127);
|
|
686
|
-
}
|
|
687
|
-
function formatStorageKeyForDisplay(storageKey) {
|
|
688
|
-
if (storageKey.startsWith("0x") && storageKey.length === 66 && /^0x[0-9a-fA-F]{64}$/.test(storageKey)) {
|
|
689
|
-
try {
|
|
690
|
-
const decoded = fromHex(storageKey, "string");
|
|
691
|
-
const trimmed = decoded.replace(/\0/g, "");
|
|
692
|
-
if (!isBinaryString(trimmed) && trimmed.trim().length > 0) {
|
|
693
|
-
return {
|
|
694
|
-
displayText: trimmed,
|
|
695
|
-
isDecoded: true
|
|
696
|
-
};
|
|
697
|
-
}
|
|
698
|
-
} catch {
|
|
699
|
-
}
|
|
700
|
-
}
|
|
701
|
-
return {
|
|
702
|
-
displayText: storageKey,
|
|
703
|
-
isDecoded: false
|
|
704
|
-
};
|
|
705
|
-
}
|
|
706
|
-
function getStorageKeyBytes(input, keyFormat) {
|
|
707
|
-
if (keyFormat === "bytes32") {
|
|
708
|
-
return input.toLowerCase();
|
|
709
|
-
}
|
|
710
|
-
if (keyFormat === "raw") {
|
|
711
|
-
return input.length > 32 ? keccak256HashString(input.toLowerCase()) : toBytes32(input.toLowerCase());
|
|
712
|
-
}
|
|
713
|
-
if (input.startsWith("0x") && input.length === 66 && // 0x + 64 hex chars = bytes32
|
|
714
|
-
/^0x[0-9a-fA-F]{64}$/.test(input)) {
|
|
715
|
-
return input.toLowerCase();
|
|
716
|
-
}
|
|
717
|
-
return input.length > 32 ? keccak256HashString(input.toLowerCase()) : toBytes32(input.toLowerCase());
|
|
718
|
-
}
|
|
719
|
-
function encodeStorageKeyForUrl(key) {
|
|
720
|
-
return encodeURIComponent(key);
|
|
721
|
-
}
|
|
722
|
-
function generateStorageEmbedTag(params) {
|
|
723
|
-
const operator = params.operatorAddress.toLowerCase();
|
|
724
|
-
const indexAttr = params.versionIndex !== void 0 ? ` i="${params.versionIndex}"` : "";
|
|
725
|
-
const sourceAttr = params.isRegularStorage ? ` s="d"` : "";
|
|
726
|
-
return `<net k="${params.storageKeyBytes}" v="0.0.1"${indexAttr} o="${operator}"${sourceAttr} />`;
|
|
727
|
-
}
|
|
728
|
-
|
|
729
|
-
// src/hooks/useStorage.ts
|
|
730
|
-
var BATCH_SIZE = 2;
|
|
731
|
-
function useStorage({
|
|
732
|
-
chainId,
|
|
733
|
-
key,
|
|
734
|
-
operatorAddress,
|
|
735
|
-
enabled = true,
|
|
736
|
-
index,
|
|
737
|
-
keyFormat,
|
|
738
|
-
useRouter = false,
|
|
739
|
-
outputFormat = "hex"
|
|
740
|
-
}) {
|
|
741
|
-
const isLatestVersion = index === void 0;
|
|
742
|
-
const shouldUseRouter = useRouter === true && isLatestVersion;
|
|
743
|
-
const outputAsString = outputFormat === "string";
|
|
744
|
-
const storageKeyBytes = key ? getStorageKeyBytes(key, keyFormat) : void 0;
|
|
745
|
-
const formatData = (text, dataHex) => {
|
|
746
|
-
return {
|
|
747
|
-
text,
|
|
748
|
-
value: outputAsString ? hexToString(dataHex) : dataHex
|
|
749
|
-
};
|
|
750
|
-
};
|
|
751
|
-
const [routerData, setRouterData] = useState();
|
|
752
|
-
const [routerChunkLoading, setRouterChunkLoading] = useState(false);
|
|
753
|
-
const [routerChunkError, setRouterChunkError] = useState();
|
|
754
|
-
const routerHook = useReadContract({
|
|
755
|
-
abi: STORAGE_ROUTER_CONTRACT.abi,
|
|
756
|
-
address: STORAGE_ROUTER_CONTRACT.address,
|
|
757
|
-
functionName: "get",
|
|
758
|
-
args: storageKeyBytes && operatorAddress ? [storageKeyBytes, operatorAddress] : void 0,
|
|
759
|
-
chainId,
|
|
760
|
-
query: {
|
|
761
|
-
enabled: shouldUseRouter && enabled && !!key && !!operatorAddress
|
|
762
|
-
}
|
|
763
|
-
});
|
|
764
|
-
useEffect(() => {
|
|
765
|
-
async function processRouterResult() {
|
|
766
|
-
if (!routerHook.data || routerHook.isLoading || routerHook.error) {
|
|
767
|
-
return;
|
|
768
|
-
}
|
|
769
|
-
const [isChunkedStorage, text, data] = routerHook.data;
|
|
770
|
-
if (!isChunkedStorage) {
|
|
771
|
-
if (!data || typeof data !== "string") {
|
|
772
|
-
setRouterData(void 0);
|
|
773
|
-
return;
|
|
774
|
-
}
|
|
775
|
-
const formatted = formatData(text, data);
|
|
776
|
-
setRouterData(formatted);
|
|
777
|
-
return;
|
|
778
|
-
}
|
|
779
|
-
setRouterChunkLoading(true);
|
|
780
|
-
setRouterChunkError(void 0);
|
|
781
|
-
try {
|
|
782
|
-
const [chunkCount] = decodeAbiParameters([{ type: "uint8" }], data);
|
|
783
|
-
if (chunkCount === 0) {
|
|
784
|
-
setRouterData(void 0);
|
|
785
|
-
return;
|
|
786
|
-
}
|
|
787
|
-
const client = getPublicClient({ chainId });
|
|
788
|
-
if (!client) {
|
|
789
|
-
throw new Error(`Chain not found for chainId: ${chainId}`);
|
|
790
|
-
}
|
|
791
|
-
const allChunks = [];
|
|
792
|
-
for (let start = 0; start < Number(chunkCount); start += BATCH_SIZE) {
|
|
793
|
-
const end = Math.min(start + BATCH_SIZE, Number(chunkCount));
|
|
794
|
-
const batch = await readContract(client, {
|
|
795
|
-
abi: CHUNKED_STORAGE_CONTRACT.abi,
|
|
796
|
-
address: CHUNKED_STORAGE_CONTRACT.address,
|
|
797
|
-
functionName: "getChunks",
|
|
798
|
-
args: [storageKeyBytes, operatorAddress, start, end]
|
|
799
|
-
});
|
|
800
|
-
allChunks.push(...batch);
|
|
801
|
-
}
|
|
802
|
-
const assembledString = assembleChunks(allChunks);
|
|
803
|
-
if (assembledString === void 0) {
|
|
804
|
-
setRouterData(void 0);
|
|
805
|
-
} else {
|
|
806
|
-
if (outputAsString) {
|
|
807
|
-
setRouterData({ text, value: assembledString });
|
|
808
|
-
} else {
|
|
809
|
-
const hexData = stringToHex(assembledString);
|
|
810
|
-
setRouterData({ text, value: hexData });
|
|
811
|
-
}
|
|
812
|
-
}
|
|
813
|
-
} catch (error) {
|
|
814
|
-
setRouterChunkError(error);
|
|
815
|
-
} finally {
|
|
816
|
-
setRouterChunkLoading(false);
|
|
817
|
-
}
|
|
818
|
-
}
|
|
819
|
-
processRouterResult();
|
|
820
|
-
}, [
|
|
821
|
-
routerHook.data,
|
|
822
|
-
routerHook.isLoading,
|
|
823
|
-
routerHook.error,
|
|
824
|
-
operatorAddress,
|
|
825
|
-
chainId,
|
|
826
|
-
storageKeyBytes,
|
|
827
|
-
outputAsString
|
|
828
|
-
]);
|
|
829
|
-
const {
|
|
830
|
-
data: latestData,
|
|
831
|
-
isLoading: latestLoading,
|
|
832
|
-
error: latestError
|
|
833
|
-
} = useReadContract({
|
|
834
|
-
abi: STORAGE_CONTRACT.abi,
|
|
835
|
-
address: STORAGE_CONTRACT.address,
|
|
836
|
-
functionName: "get",
|
|
837
|
-
args: key && operatorAddress ? [getStorageKeyBytes(key, keyFormat), operatorAddress] : void 0,
|
|
838
|
-
chainId,
|
|
839
|
-
query: {
|
|
840
|
-
enabled: !shouldUseRouter && enabled && !!operatorAddress && !!key && isLatestVersion
|
|
841
|
-
}
|
|
842
|
-
});
|
|
843
|
-
const [historicalData, setHistoricalData] = useState(
|
|
844
|
-
void 0
|
|
845
|
-
);
|
|
846
|
-
const [historicalLoading, setHistoricalLoading] = useState(false);
|
|
847
|
-
const [historicalError, setHistoricalError] = useState();
|
|
848
|
-
useEffect(() => {
|
|
849
|
-
async function fetchHistoricalVersion() {
|
|
850
|
-
if (isLatestVersion || !key || !operatorAddress || !enabled) {
|
|
851
|
-
return;
|
|
852
|
-
}
|
|
853
|
-
setHistoricalLoading(true);
|
|
854
|
-
setHistoricalError(void 0);
|
|
855
|
-
setHistoricalData(void 0);
|
|
856
|
-
try {
|
|
857
|
-
const client = getPublicClient({ chainId });
|
|
858
|
-
if (!client) {
|
|
859
|
-
throw new Error(`Chain not found for chainId: ${chainId}`);
|
|
860
|
-
}
|
|
861
|
-
const storageKeyBytes2 = getStorageKeyBytes(
|
|
862
|
-
key,
|
|
863
|
-
keyFormat
|
|
864
|
-
);
|
|
865
|
-
try {
|
|
866
|
-
const metadata = await readContract(client, {
|
|
867
|
-
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
868
|
-
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
869
|
-
functionName: "getMetadataAtIndex",
|
|
870
|
-
args: [storageKeyBytes2, operatorAddress, index]
|
|
871
|
-
});
|
|
872
|
-
const [chunkCount, text2] = metadata;
|
|
873
|
-
if (chunkCount > 0) {
|
|
874
|
-
const chunks = await readContract(client, {
|
|
875
|
-
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
876
|
-
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
877
|
-
functionName: "getChunksAtIndex",
|
|
878
|
-
args: [storageKeyBytes2, operatorAddress, 0, chunkCount, index]
|
|
879
|
-
});
|
|
880
|
-
const assembledData = assembleChunks(chunks);
|
|
881
|
-
if (assembledData !== void 0) {
|
|
882
|
-
const hexData = stringToHex(assembledData);
|
|
883
|
-
setHistoricalData(formatData(text2, hexData));
|
|
884
|
-
setHistoricalLoading(false);
|
|
885
|
-
return;
|
|
886
|
-
}
|
|
887
|
-
}
|
|
888
|
-
} catch (chunkedError) {
|
|
889
|
-
}
|
|
890
|
-
const result = await readContract(client, {
|
|
891
|
-
abi: STORAGE_CONTRACT.abi,
|
|
892
|
-
address: STORAGE_CONTRACT.address,
|
|
893
|
-
functionName: "getValueAtIndex",
|
|
894
|
-
args: [storageKeyBytes2, operatorAddress, index]
|
|
895
|
-
});
|
|
896
|
-
const [text, data] = result;
|
|
897
|
-
if (!data || typeof data !== "string") {
|
|
898
|
-
setHistoricalData(void 0);
|
|
899
|
-
setHistoricalLoading(false);
|
|
900
|
-
return;
|
|
901
|
-
}
|
|
902
|
-
setHistoricalData(formatData(text, data));
|
|
903
|
-
} catch (error) {
|
|
904
|
-
console.error(
|
|
905
|
-
"[useStorage] Failed to fetch historical version:",
|
|
906
|
-
error
|
|
907
|
-
);
|
|
908
|
-
setHistoricalError(error);
|
|
909
|
-
} finally {
|
|
910
|
-
setHistoricalLoading(false);
|
|
911
|
-
}
|
|
912
|
-
}
|
|
913
|
-
fetchHistoricalVersion();
|
|
914
|
-
}, [
|
|
915
|
-
chainId,
|
|
916
|
-
key,
|
|
917
|
-
operatorAddress,
|
|
918
|
-
index,
|
|
919
|
-
enabled,
|
|
920
|
-
isLatestVersion,
|
|
921
|
-
outputAsString
|
|
922
|
-
]);
|
|
923
|
-
if (!isLatestVersion) {
|
|
924
|
-
return {
|
|
925
|
-
data: historicalData,
|
|
926
|
-
isLoading: historicalLoading,
|
|
927
|
-
error: historicalError
|
|
928
|
-
};
|
|
929
|
-
}
|
|
930
|
-
if (shouldUseRouter) {
|
|
931
|
-
return {
|
|
932
|
-
data: routerData,
|
|
933
|
-
isLoading: routerHook.isLoading || routerChunkLoading,
|
|
934
|
-
error: routerHook.error || routerChunkError
|
|
935
|
-
};
|
|
936
|
-
}
|
|
937
|
-
const formattedDirectData = latestData ? (() => {
|
|
938
|
-
const result = latestData;
|
|
939
|
-
const [text, valueHex] = result;
|
|
940
|
-
if (!valueHex || typeof valueHex !== "string") {
|
|
941
|
-
return void 0;
|
|
942
|
-
}
|
|
943
|
-
return formatData(text, valueHex);
|
|
944
|
-
})() : void 0;
|
|
945
|
-
return {
|
|
946
|
-
data: formattedDirectData,
|
|
947
|
-
isLoading: latestLoading,
|
|
948
|
-
error: latestError
|
|
949
|
-
};
|
|
950
|
-
}
|
|
951
|
-
function useStorageForOperator({
|
|
952
|
-
chainId,
|
|
953
|
-
operatorAddress
|
|
954
|
-
}) {
|
|
955
|
-
const netContract = getNetContract(chainId);
|
|
956
|
-
const { data: totalCount, isLoading: isLoadingCount } = useReadContract({
|
|
957
|
-
abi: netContract.abi,
|
|
958
|
-
address: netContract.address,
|
|
959
|
-
functionName: "getTotalMessagesForAppUserCount",
|
|
960
|
-
args: [STORAGE_CONTRACT.address, operatorAddress],
|
|
961
|
-
chainId
|
|
962
|
-
});
|
|
963
|
-
const totalCountNumber = totalCount ? Number(totalCount) : 0;
|
|
964
|
-
const { data: messages, isLoading: isLoadingMessages } = useReadContract({
|
|
965
|
-
abi: netContract.abi,
|
|
966
|
-
address: netContract.address,
|
|
967
|
-
functionName: "getMessagesInRangeForAppUser",
|
|
968
|
-
args: [0, totalCountNumber, STORAGE_CONTRACT.address, operatorAddress],
|
|
969
|
-
chainId
|
|
970
|
-
});
|
|
971
|
-
return {
|
|
972
|
-
data: messages?.map((msg) => [
|
|
973
|
-
msg.topic,
|
|
974
|
-
msg.text,
|
|
975
|
-
Number(msg.timestamp),
|
|
976
|
-
msg.data
|
|
977
|
-
]) || [],
|
|
978
|
-
isLoading: isLoadingCount || isLoadingMessages,
|
|
979
|
-
error: void 0
|
|
980
|
-
};
|
|
981
|
-
}
|
|
982
|
-
function useStorageForOperatorAndKey({
|
|
983
|
-
chainId,
|
|
984
|
-
key,
|
|
985
|
-
operatorAddress,
|
|
986
|
-
keyFormat,
|
|
987
|
-
outputFormat = "hex"
|
|
988
|
-
}) {
|
|
989
|
-
const storageKeyBytes = key ? getStorageKeyBytes(key, keyFormat) : void 0;
|
|
990
|
-
const outputAsString = outputFormat === "string";
|
|
991
|
-
const readContractArgs = {
|
|
992
|
-
abi: STORAGE_CONTRACT.abi,
|
|
993
|
-
address: STORAGE_CONTRACT.address,
|
|
994
|
-
functionName: "getForOperatorAndKey",
|
|
995
|
-
args: storageKeyBytes ? [operatorAddress, storageKeyBytes] : void 0,
|
|
996
|
-
chainId,
|
|
997
|
-
query: {
|
|
998
|
-
enabled: !!key && !!operatorAddress
|
|
999
|
-
}
|
|
1000
|
-
};
|
|
1001
|
-
const { data, isLoading, error } = useReadContract(readContractArgs);
|
|
1002
|
-
return {
|
|
1003
|
-
data: data ? (() => {
|
|
1004
|
-
const [text, valueHex] = data;
|
|
1005
|
-
return {
|
|
1006
|
-
text,
|
|
1007
|
-
value: outputAsString ? hexToString(valueHex) : valueHex
|
|
1008
|
-
};
|
|
1009
|
-
})() : void 0,
|
|
1010
|
-
isLoading,
|
|
1011
|
-
error
|
|
1012
|
-
};
|
|
1013
|
-
}
|
|
1014
|
-
function useBulkStorage({
|
|
1015
|
-
chainId,
|
|
1016
|
-
keys,
|
|
1017
|
-
safe = false,
|
|
1018
|
-
keyFormat
|
|
1019
|
-
}) {
|
|
1020
|
-
const contract = safe ? SAFE_STORAGE_READER_CONTRACT : STORAGE_CONTRACT;
|
|
1021
|
-
const bulkKeys = keys.map((k) => ({
|
|
1022
|
-
key: getStorageKeyBytes(k.key, keyFormat),
|
|
1023
|
-
operator: k.operator
|
|
1024
|
-
}));
|
|
1025
|
-
const readContractArgs = {
|
|
1026
|
-
abi: contract.abi,
|
|
1027
|
-
address: contract.address,
|
|
1028
|
-
functionName: "bulkGet",
|
|
1029
|
-
args: [bulkKeys],
|
|
1030
|
-
chainId
|
|
1031
|
-
};
|
|
1032
|
-
const { data, isLoading, error } = useReadContract(readContractArgs);
|
|
1033
|
-
return {
|
|
1034
|
-
data,
|
|
1035
|
-
isLoading,
|
|
1036
|
-
error
|
|
1037
|
-
};
|
|
1038
|
-
}
|
|
1039
|
-
function useStorageTotalWrites({
|
|
1040
|
-
chainId,
|
|
1041
|
-
key,
|
|
1042
|
-
operatorAddress,
|
|
1043
|
-
enabled = true,
|
|
1044
|
-
keyFormat
|
|
1045
|
-
}) {
|
|
1046
|
-
const storageKeyBytes = key ? getStorageKeyBytes(key, keyFormat) : void 0;
|
|
1047
|
-
const {
|
|
1048
|
-
data: chunkedTotal,
|
|
1049
|
-
isLoading: chunkedLoading,
|
|
1050
|
-
error: chunkedError
|
|
1051
|
-
} = useReadContract({
|
|
1052
|
-
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
1053
|
-
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
1054
|
-
functionName: "getTotalWrites",
|
|
1055
|
-
args: storageKeyBytes && operatorAddress ? [storageKeyBytes, operatorAddress] : void 0,
|
|
1056
|
-
chainId,
|
|
1057
|
-
query: {
|
|
1058
|
-
enabled: enabled && !!key && !!operatorAddress
|
|
1059
|
-
}
|
|
1060
|
-
});
|
|
1061
|
-
const {
|
|
1062
|
-
data: regularTotal,
|
|
1063
|
-
isLoading: regularLoading,
|
|
1064
|
-
error: regularError
|
|
1065
|
-
} = useReadContract({
|
|
1066
|
-
abi: STORAGE_CONTRACT.abi,
|
|
1067
|
-
address: STORAGE_CONTRACT.address,
|
|
1068
|
-
functionName: "getTotalWrites",
|
|
1069
|
-
args: storageKeyBytes && operatorAddress ? [storageKeyBytes, operatorAddress] : void 0,
|
|
1070
|
-
chainId,
|
|
1071
|
-
query: {
|
|
1072
|
-
enabled: enabled && !!key && !!operatorAddress && (chunkedTotal === void 0 || Number(chunkedTotal) === 0)
|
|
1073
|
-
}
|
|
1074
|
-
});
|
|
1075
|
-
const chunkedTotalNumber = chunkedTotal ? Number(chunkedTotal) : 0;
|
|
1076
|
-
const regularTotalNumber = regularTotal ? Number(regularTotal) : 0;
|
|
1077
|
-
const totalWrites = chunkedTotalNumber > 0 ? chunkedTotalNumber : regularTotalNumber;
|
|
1078
|
-
return {
|
|
1079
|
-
data: totalWrites > 0 ? totalWrites : void 0,
|
|
1080
|
-
isLoading: chunkedLoading || regularLoading,
|
|
1081
|
-
error: chunkedTotalNumber === 0 && regularTotalNumber === 0 ? chunkedError || regularError : void 0
|
|
1082
|
-
};
|
|
1083
|
-
}
|
|
1084
825
|
var MAX_XML_DEPTH = 3;
|
|
1085
826
|
var CONCURRENT_XML_FETCHES = 3;
|
|
1086
827
|
function assembleXmlData(metadata, chunks, references) {
|
|
@@ -1226,308 +967,6 @@ async function resolveXmlRecursive(content, defaultOperator, client, maxDepth, v
|
|
|
1226
967
|
const assembled = assembleXmlData(content, resolvedChunks, references);
|
|
1227
968
|
return assembled;
|
|
1228
969
|
}
|
|
1229
|
-
function useXmlStorage({
|
|
1230
|
-
chainId,
|
|
1231
|
-
key,
|
|
1232
|
-
operatorAddress,
|
|
1233
|
-
skipXmlParsing = false,
|
|
1234
|
-
enabled = true,
|
|
1235
|
-
content,
|
|
1236
|
-
index,
|
|
1237
|
-
keyFormat,
|
|
1238
|
-
useRouter,
|
|
1239
|
-
outputFormat = "hex"
|
|
1240
|
-
}) {
|
|
1241
|
-
const isPreviewMode = !!content;
|
|
1242
|
-
const outputAsString = outputFormat === "string";
|
|
1243
|
-
const {
|
|
1244
|
-
data: metadata,
|
|
1245
|
-
isLoading: metadataLoading,
|
|
1246
|
-
error: metadataError
|
|
1247
|
-
} = useStorage({
|
|
1248
|
-
chainId,
|
|
1249
|
-
key: key || "",
|
|
1250
|
-
operatorAddress,
|
|
1251
|
-
enabled: enabled && !isPreviewMode,
|
|
1252
|
-
index,
|
|
1253
|
-
// Pass index to useStorage for historical versions
|
|
1254
|
-
keyFormat,
|
|
1255
|
-
// Pass keyFormat through
|
|
1256
|
-
useRouter,
|
|
1257
|
-
// Pass useRouter through to enable router path
|
|
1258
|
-
outputFormat: "string"
|
|
1259
|
-
// Always get plain string from useStorage, then convert based on our outputFormat
|
|
1260
|
-
});
|
|
1261
|
-
const metadataString = useMemo(() => {
|
|
1262
|
-
if (skipXmlParsing) return "";
|
|
1263
|
-
if (isPreviewMode) return content || "";
|
|
1264
|
-
if (!metadata?.value) return "";
|
|
1265
|
-
return metadata.value;
|
|
1266
|
-
}, [skipXmlParsing, isPreviewMode, content, metadata]);
|
|
1267
|
-
useMemo(() => {
|
|
1268
|
-
if (!metadataString) return [];
|
|
1269
|
-
return parseNetReferences(metadataString);
|
|
1270
|
-
}, [metadataString]);
|
|
1271
|
-
const [chunks, setChunks] = useState([]);
|
|
1272
|
-
const [chunksLoading, setChunksLoading] = useState(false);
|
|
1273
|
-
const [chunksError, setChunksError] = useState();
|
|
1274
|
-
useAsyncEffect(async () => {
|
|
1275
|
-
if (skipXmlParsing || !metadataString) {
|
|
1276
|
-
setChunks([]);
|
|
1277
|
-
setChunksLoading(false);
|
|
1278
|
-
return;
|
|
1279
|
-
}
|
|
1280
|
-
if (!containsXmlReferences(metadataString)) {
|
|
1281
|
-
setChunks([]);
|
|
1282
|
-
setChunksLoading(false);
|
|
1283
|
-
return;
|
|
1284
|
-
}
|
|
1285
|
-
setChunksLoading(true);
|
|
1286
|
-
setChunksError(void 0);
|
|
1287
|
-
try {
|
|
1288
|
-
const client = getPublicClient({ chainId });
|
|
1289
|
-
if (!client) {
|
|
1290
|
-
throw new Error(`Chain not found for chainId: ${chainId}`);
|
|
1291
|
-
}
|
|
1292
|
-
const resolved = await resolveXmlRecursive(
|
|
1293
|
-
metadataString,
|
|
1294
|
-
operatorAddress,
|
|
1295
|
-
client,
|
|
1296
|
-
MAX_XML_DEPTH,
|
|
1297
|
-
/* @__PURE__ */ new Set()
|
|
1298
|
-
);
|
|
1299
|
-
setChunks([resolved]);
|
|
1300
|
-
} catch (error) {
|
|
1301
|
-
console.error("[useXmlStorage] Error in recursive resolution:", error);
|
|
1302
|
-
setChunksError(error);
|
|
1303
|
-
setChunks([]);
|
|
1304
|
-
} finally {
|
|
1305
|
-
setChunksLoading(false);
|
|
1306
|
-
}
|
|
1307
|
-
}, [metadataString, operatorAddress, chainId, skipXmlParsing]);
|
|
1308
|
-
const assembledData = useMemo(() => {
|
|
1309
|
-
if (skipXmlParsing || !metadataString || !chunks.length) return void 0;
|
|
1310
|
-
return chunks[0];
|
|
1311
|
-
}, [metadataString, chunks, skipXmlParsing]);
|
|
1312
|
-
const isXml = useMemo(() => {
|
|
1313
|
-
if (skipXmlParsing || !metadataString) return false;
|
|
1314
|
-
return containsXmlReferences(metadataString);
|
|
1315
|
-
}, [metadataString, skipXmlParsing]);
|
|
1316
|
-
const formatValue = (value) => {
|
|
1317
|
-
if (!value) return "";
|
|
1318
|
-
return outputAsString ? value : stringToHex(value);
|
|
1319
|
-
};
|
|
1320
|
-
if (skipXmlParsing) {
|
|
1321
|
-
return {
|
|
1322
|
-
text: metadata?.text || "",
|
|
1323
|
-
value: isPreviewMode ? content || "" : formatValue(metadata?.value),
|
|
1324
|
-
isLoading: metadataLoading,
|
|
1325
|
-
error: metadataError,
|
|
1326
|
-
isXml: false
|
|
1327
|
-
};
|
|
1328
|
-
}
|
|
1329
|
-
return {
|
|
1330
|
-
text: metadata?.text || "",
|
|
1331
|
-
value: isXml ? formatValue(assembledData) : isPreviewMode ? content || "" : formatValue(metadata?.value),
|
|
1332
|
-
isLoading: metadataLoading || isXml && chunksLoading,
|
|
1333
|
-
error: metadataError || chunksError,
|
|
1334
|
-
isXml
|
|
1335
|
-
};
|
|
1336
|
-
}
|
|
1337
|
-
var BATCH_SIZE2 = 2;
|
|
1338
|
-
function useStorageFromRouter({
|
|
1339
|
-
chainId,
|
|
1340
|
-
storageKey,
|
|
1341
|
-
operatorAddress,
|
|
1342
|
-
enabled = true
|
|
1343
|
-
}) {
|
|
1344
|
-
const [assembledData, setAssembledData] = useState();
|
|
1345
|
-
const [isChunkLoading, setIsChunkLoading] = useState(false);
|
|
1346
|
-
const [chunkError, setChunkError] = useState();
|
|
1347
|
-
const {
|
|
1348
|
-
data: routerResult,
|
|
1349
|
-
isLoading: routerLoading,
|
|
1350
|
-
error: routerError
|
|
1351
|
-
} = useReadContract({
|
|
1352
|
-
abi: STORAGE_ROUTER_CONTRACT.abi,
|
|
1353
|
-
address: STORAGE_ROUTER_CONTRACT.address,
|
|
1354
|
-
functionName: "get",
|
|
1355
|
-
args: [storageKey, operatorAddress],
|
|
1356
|
-
chainId,
|
|
1357
|
-
query: {
|
|
1358
|
-
enabled: enabled && !!operatorAddress
|
|
1359
|
-
}
|
|
1360
|
-
});
|
|
1361
|
-
useEffect(() => {
|
|
1362
|
-
async function processResult() {
|
|
1363
|
-
if (!routerResult || routerLoading || routerError) {
|
|
1364
|
-
return;
|
|
1365
|
-
}
|
|
1366
|
-
const [isChunkedStorage, text, data] = routerResult;
|
|
1367
|
-
if (!isChunkedStorage) {
|
|
1368
|
-
setAssembledData({ text, value: data });
|
|
1369
|
-
return;
|
|
1370
|
-
}
|
|
1371
|
-
setIsChunkLoading(true);
|
|
1372
|
-
setChunkError(void 0);
|
|
1373
|
-
try {
|
|
1374
|
-
const [chunkCount] = decodeAbiParameters([{ type: "uint8" }], data);
|
|
1375
|
-
if (chunkCount === 0) {
|
|
1376
|
-
setAssembledData(void 0);
|
|
1377
|
-
return;
|
|
1378
|
-
}
|
|
1379
|
-
const allChunks = await fetchChunksInBatches(
|
|
1380
|
-
Number(chunkCount),
|
|
1381
|
-
operatorAddress,
|
|
1382
|
-
chainId,
|
|
1383
|
-
storageKey
|
|
1384
|
-
);
|
|
1385
|
-
const assembledString = assembleChunks(allChunks);
|
|
1386
|
-
if (assembledString === void 0) {
|
|
1387
|
-
setAssembledData(void 0);
|
|
1388
|
-
} else {
|
|
1389
|
-
const hexData = stringToHex(assembledString);
|
|
1390
|
-
setAssembledData({ text, value: hexData });
|
|
1391
|
-
}
|
|
1392
|
-
} catch (error) {
|
|
1393
|
-
setChunkError(error);
|
|
1394
|
-
} finally {
|
|
1395
|
-
setIsChunkLoading(false);
|
|
1396
|
-
}
|
|
1397
|
-
}
|
|
1398
|
-
processResult();
|
|
1399
|
-
}, [
|
|
1400
|
-
routerResult,
|
|
1401
|
-
routerLoading,
|
|
1402
|
-
routerError,
|
|
1403
|
-
operatorAddress,
|
|
1404
|
-
chainId,
|
|
1405
|
-
storageKey
|
|
1406
|
-
]);
|
|
1407
|
-
return {
|
|
1408
|
-
data: assembledData,
|
|
1409
|
-
isLoading: routerLoading || isChunkLoading,
|
|
1410
|
-
error: routerError || chunkError
|
|
1411
|
-
};
|
|
1412
|
-
}
|
|
1413
|
-
async function fetchChunksInBatches(chunkCount, operatorAddress, chainId, storageKey) {
|
|
1414
|
-
const client = getPublicClient({ chainId });
|
|
1415
|
-
if (!client) {
|
|
1416
|
-
throw new Error(`Chain not found for chainId: ${chainId}`);
|
|
1417
|
-
}
|
|
1418
|
-
const allChunks = [];
|
|
1419
|
-
for (let start = 0; start < chunkCount; start += BATCH_SIZE2) {
|
|
1420
|
-
const end = Math.min(start + BATCH_SIZE2, chunkCount);
|
|
1421
|
-
const batch = await readContract(client, {
|
|
1422
|
-
abi: CHUNKED_STORAGE_CONTRACT.abi,
|
|
1423
|
-
address: CHUNKED_STORAGE_CONTRACT.address,
|
|
1424
|
-
functionName: "getChunks",
|
|
1425
|
-
args: [storageKey, operatorAddress, start, end]
|
|
1426
|
-
});
|
|
1427
|
-
allChunks.push(...batch);
|
|
1428
|
-
}
|
|
1429
|
-
return allChunks;
|
|
1430
|
-
}
|
|
1431
|
-
|
|
1432
|
-
// src/client/storage.ts
|
|
1433
|
-
function getStorageReadConfig(params) {
|
|
1434
|
-
const { chainId, key, operator, keyFormat } = params;
|
|
1435
|
-
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
1436
|
-
return {
|
|
1437
|
-
abi: STORAGE_CONTRACT.abi,
|
|
1438
|
-
address: STORAGE_CONTRACT.address,
|
|
1439
|
-
functionName: "get",
|
|
1440
|
-
args: [storageKeyBytes, operator],
|
|
1441
|
-
chainId
|
|
1442
|
-
};
|
|
1443
|
-
}
|
|
1444
|
-
function getStorageValueAtIndexReadConfig(params) {
|
|
1445
|
-
const { chainId, key, operator, index, keyFormat } = params;
|
|
1446
|
-
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
1447
|
-
return {
|
|
1448
|
-
abi: STORAGE_CONTRACT.abi,
|
|
1449
|
-
address: STORAGE_CONTRACT.address,
|
|
1450
|
-
functionName: "getValueAtIndex",
|
|
1451
|
-
args: [storageKeyBytes, operator, index],
|
|
1452
|
-
chainId
|
|
1453
|
-
};
|
|
1454
|
-
}
|
|
1455
|
-
function getStorageTotalWritesReadConfig(params) {
|
|
1456
|
-
const { chainId, key, operator, keyFormat } = params;
|
|
1457
|
-
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
1458
|
-
return {
|
|
1459
|
-
abi: STORAGE_CONTRACT.abi,
|
|
1460
|
-
address: STORAGE_CONTRACT.address,
|
|
1461
|
-
functionName: "getTotalWrites",
|
|
1462
|
-
args: [storageKeyBytes, operator],
|
|
1463
|
-
chainId
|
|
1464
|
-
};
|
|
1465
|
-
}
|
|
1466
|
-
function getStorageBulkGetReadConfig(params) {
|
|
1467
|
-
const { chainId, keys, safe = false, keyFormat } = params;
|
|
1468
|
-
const contract = safe ? SAFE_STORAGE_READER_CONTRACT : STORAGE_CONTRACT;
|
|
1469
|
-
const bulkKeys = keys.map((k) => ({
|
|
1470
|
-
key: getStorageKeyBytes(k.key, k.keyFormat ?? keyFormat),
|
|
1471
|
-
operator: k.operator
|
|
1472
|
-
}));
|
|
1473
|
-
return {
|
|
1474
|
-
abi: contract.abi,
|
|
1475
|
-
address: contract.address,
|
|
1476
|
-
functionName: "bulkGet",
|
|
1477
|
-
args: [bulkKeys],
|
|
1478
|
-
chainId
|
|
1479
|
-
};
|
|
1480
|
-
}
|
|
1481
|
-
function getStorageRouterReadConfig(params) {
|
|
1482
|
-
const { chainId, key, operator, keyFormat } = params;
|
|
1483
|
-
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
1484
|
-
return {
|
|
1485
|
-
abi: STORAGE_ROUTER_CONTRACT.abi,
|
|
1486
|
-
address: STORAGE_ROUTER_CONTRACT.address,
|
|
1487
|
-
functionName: "get",
|
|
1488
|
-
args: [storageKeyBytes, operator],
|
|
1489
|
-
chainId
|
|
1490
|
-
};
|
|
1491
|
-
}
|
|
1492
|
-
|
|
1493
|
-
// src/client/chunkedStorage.ts
|
|
1494
|
-
function getChunkedStorageMetadataReadConfig(params) {
|
|
1495
|
-
const { chainId, key, operator, index, keyFormat } = params;
|
|
1496
|
-
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
1497
|
-
const functionName = index !== void 0 ? "getMetadataAtIndex" : "getMetadata";
|
|
1498
|
-
const args = index !== void 0 ? [storageKeyBytes, operator, index] : [storageKeyBytes, operator];
|
|
1499
|
-
return {
|
|
1500
|
-
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
1501
|
-
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
1502
|
-
functionName,
|
|
1503
|
-
args,
|
|
1504
|
-
chainId
|
|
1505
|
-
};
|
|
1506
|
-
}
|
|
1507
|
-
function getChunkedStorageChunksReadConfig(params) {
|
|
1508
|
-
const { chainId, key, operator, start, end, index, keyFormat } = params;
|
|
1509
|
-
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
1510
|
-
const functionName = index !== void 0 ? "getChunksAtIndex" : "getChunks";
|
|
1511
|
-
const args = index !== void 0 ? [storageKeyBytes, operator, start, end, index] : [storageKeyBytes, operator, start, end];
|
|
1512
|
-
return {
|
|
1513
|
-
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
1514
|
-
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
1515
|
-
functionName,
|
|
1516
|
-
args,
|
|
1517
|
-
chainId
|
|
1518
|
-
};
|
|
1519
|
-
}
|
|
1520
|
-
function getChunkedStorageTotalWritesReadConfig(params) {
|
|
1521
|
-
const { chainId, key, operator, keyFormat } = params;
|
|
1522
|
-
const storageKeyBytes = getStorageKeyBytes(key, keyFormat);
|
|
1523
|
-
return {
|
|
1524
|
-
abi: CHUNKED_STORAGE_READER_CONTRACT.abi,
|
|
1525
|
-
address: CHUNKED_STORAGE_READER_CONTRACT.address,
|
|
1526
|
-
functionName: "getTotalWrites",
|
|
1527
|
-
args: [storageKeyBytes, operator],
|
|
1528
|
-
chainId
|
|
1529
|
-
};
|
|
1530
|
-
}
|
|
1531
970
|
var MAX_CHUNKS = 255;
|
|
1532
971
|
var OPTIMAL_CHUNK_SIZE = 80 * 1e3;
|
|
1533
972
|
function chunkData(data, chunkSize = OPTIMAL_CHUNK_SIZE) {
|
|
@@ -2202,7 +1641,130 @@ function base64ToDataUri(base64Data) {
|
|
|
2202
1641
|
const mimeType = detectFileTypeFromBase64(base64Data) || "application/octet-stream";
|
|
2203
1642
|
return `data:${mimeType};base64,${base64Data}`;
|
|
2204
1643
|
}
|
|
1644
|
+
var STREAMING_CHUNK_SIZE = 80 * 1e3;
|
|
1645
|
+
var BINARY_CHUNK_SIZE = 79998;
|
|
1646
|
+
function isBinaryFile(file) {
|
|
1647
|
+
const mimeType = file.type.toLowerCase();
|
|
1648
|
+
const textTypes = [
|
|
1649
|
+
"text/",
|
|
1650
|
+
"application/json",
|
|
1651
|
+
"application/xml",
|
|
1652
|
+
"application/javascript",
|
|
1653
|
+
"application/typescript",
|
|
1654
|
+
"application/x-javascript",
|
|
1655
|
+
"application/ecmascript"
|
|
1656
|
+
];
|
|
1657
|
+
for (const textType of textTypes) {
|
|
1658
|
+
if (mimeType.startsWith(textType)) {
|
|
1659
|
+
return false;
|
|
1660
|
+
}
|
|
1661
|
+
}
|
|
1662
|
+
if (!mimeType || mimeType === "application/octet-stream") {
|
|
1663
|
+
const extension = file.name.split(".").pop()?.toLowerCase() || "";
|
|
1664
|
+
const textExtensions = [
|
|
1665
|
+
"txt",
|
|
1666
|
+
"md",
|
|
1667
|
+
"json",
|
|
1668
|
+
"xml",
|
|
1669
|
+
"html",
|
|
1670
|
+
"htm",
|
|
1671
|
+
"css",
|
|
1672
|
+
"js",
|
|
1673
|
+
"ts",
|
|
1674
|
+
"jsx",
|
|
1675
|
+
"tsx",
|
|
1676
|
+
"yaml",
|
|
1677
|
+
"yml",
|
|
1678
|
+
"toml",
|
|
1679
|
+
"ini",
|
|
1680
|
+
"cfg",
|
|
1681
|
+
"conf",
|
|
1682
|
+
"log",
|
|
1683
|
+
"csv",
|
|
1684
|
+
"svg"
|
|
1685
|
+
];
|
|
1686
|
+
return !textExtensions.includes(extension);
|
|
1687
|
+
}
|
|
1688
|
+
return true;
|
|
1689
|
+
}
|
|
1690
|
+
async function readFileSlice(file, offset, size, isBinary, isFirstChunk) {
|
|
1691
|
+
const blob = file.slice(offset, offset + size);
|
|
1692
|
+
if (isBinary) {
|
|
1693
|
+
const arrayBuffer = await blob.arrayBuffer();
|
|
1694
|
+
const bytes = new Uint8Array(arrayBuffer);
|
|
1695
|
+
let base64 = "";
|
|
1696
|
+
const chunkSize = 32766;
|
|
1697
|
+
for (let i = 0; i < bytes.length; i += chunkSize) {
|
|
1698
|
+
const chunk = bytes.slice(i, i + chunkSize);
|
|
1699
|
+
base64 += btoa(String.fromCharCode(...chunk));
|
|
1700
|
+
}
|
|
1701
|
+
if (isFirstChunk) {
|
|
1702
|
+
const mimeType = detectFileTypeFromBase64(base64) || file.type || "application/octet-stream";
|
|
1703
|
+
return `data:${mimeType};base64,${base64}`;
|
|
1704
|
+
}
|
|
1705
|
+
return base64;
|
|
1706
|
+
} else {
|
|
1707
|
+
return await blob.text();
|
|
1708
|
+
}
|
|
1709
|
+
}
|
|
1710
|
+
async function* processFileStreaming(file, chunkSize = STREAMING_CHUNK_SIZE) {
|
|
1711
|
+
const binary = isBinaryFile(file);
|
|
1712
|
+
const effectiveChunkSize = binary ? BINARY_CHUNK_SIZE : chunkSize;
|
|
1713
|
+
let offset = 0;
|
|
1714
|
+
let chunkIndex = 0;
|
|
1715
|
+
while (offset < file.size) {
|
|
1716
|
+
const chunkString = await readFileSlice(
|
|
1717
|
+
file,
|
|
1718
|
+
offset,
|
|
1719
|
+
effectiveChunkSize,
|
|
1720
|
+
binary,
|
|
1721
|
+
chunkIndex === 0
|
|
1722
|
+
);
|
|
1723
|
+
const hash = keccak256HashString(chunkString);
|
|
1724
|
+
const compressedChunks = chunkDataForStorage(chunkString);
|
|
1725
|
+
yield {
|
|
1726
|
+
chunkIndex,
|
|
1727
|
+
hash,
|
|
1728
|
+
compressedChunks
|
|
1729
|
+
};
|
|
1730
|
+
offset += effectiveChunkSize;
|
|
1731
|
+
chunkIndex++;
|
|
1732
|
+
}
|
|
1733
|
+
if (chunkIndex === 0) {
|
|
1734
|
+
const emptyString = binary ? `data:${file.type || "application/octet-stream"};base64,` : "";
|
|
1735
|
+
const hash = keccak256HashString(emptyString);
|
|
1736
|
+
const compressedChunks = chunkDataForStorage(emptyString);
|
|
1737
|
+
yield {
|
|
1738
|
+
chunkIndex: 0,
|
|
1739
|
+
hash,
|
|
1740
|
+
compressedChunks
|
|
1741
|
+
};
|
|
1742
|
+
}
|
|
1743
|
+
}
|
|
1744
|
+
async function processFileStreamingComplete(file, onProgress) {
|
|
1745
|
+
const totalChunks = Math.max(1, Math.ceil(file.size / STREAMING_CHUNK_SIZE));
|
|
1746
|
+
const hashes = [];
|
|
1747
|
+
const allCompressedChunks = [];
|
|
1748
|
+
const binary = isBinaryFile(file);
|
|
1749
|
+
let processed = 0;
|
|
1750
|
+
for await (const result of processFileStreaming(file)) {
|
|
1751
|
+
hashes.push(result.hash);
|
|
1752
|
+
allCompressedChunks.push(result.compressedChunks);
|
|
1753
|
+
processed++;
|
|
1754
|
+
onProgress?.(processed, totalChunks);
|
|
1755
|
+
}
|
|
1756
|
+
return {
|
|
1757
|
+
hashes,
|
|
1758
|
+
allCompressedChunks,
|
|
1759
|
+
totalChunks: hashes.length,
|
|
1760
|
+
isBinary: binary
|
|
1761
|
+
};
|
|
1762
|
+
}
|
|
1763
|
+
function estimateChunkCount(fileSize, isBinary = true) {
|
|
1764
|
+
const chunkSize = isBinary ? BINARY_CHUNK_SIZE : STREAMING_CHUNK_SIZE;
|
|
1765
|
+
return Math.max(1, Math.ceil(fileSize / chunkSize));
|
|
1766
|
+
}
|
|
2205
1767
|
|
|
2206
|
-
export { CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, parseNetReferences, processDataForStorage,
|
|
1768
|
+
export { CHUNKED_STORAGE_CONTRACT, CHUNKED_STORAGE_READER_CONTRACT, CONCURRENT_XML_FETCHES, MAX_XML_DEPTH, SAFE_STORAGE_READER_CONTRACT, STORAGE_CONTRACT, STORAGE_ROUTER_CONTRACT, StorageClient, assembleChunks, base64ToDataUri, chunkData, chunkDataForStorage, computeTopLevelHash, containsXmlReferences, detectFileTypeFromBase64, detectStorageType, encodeStorageKeyForUrl, estimateChunkCount, fileToDataUri, formatStorageKeyForDisplay, generateStorageEmbedTag, generateXmlMetadata, generateXmlMetadataWithSource, getChunkCount, getReferenceKey, getStorageKeyBytes, isBinaryFile, parseNetReferences, processDataForStorage, processFileStreaming, processFileStreamingComplete, readFileSlice, resolveOperator, resolveXmlRecursive, shouldSuggestXmlStorage, validateDataSize };
|
|
2207
1769
|
//# sourceMappingURL=index.mjs.map
|
|
2208
1770
|
//# sourceMappingURL=index.mjs.map
|