@acala-network/chopsticks 0.10.2 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/cli.js +4 -1
- package/dist/cjs/context.d.ts +11 -0
- package/dist/cjs/context.js +14 -2
- package/dist/cjs/logger.d.ts +3 -0
- package/dist/cjs/logger.js +45 -0
- package/dist/cjs/plugins/dry-run/index.d.ts +3 -0
- package/dist/cjs/plugins/dry-run/rpc.d.ts +4 -4
- package/dist/cjs/plugins/fetch-storage/cli.d.ts +2 -0
- package/dist/cjs/plugins/fetch-storage/cli.js +49 -0
- package/dist/cjs/plugins/fetch-storage/index.d.ts +1 -0
- package/dist/cjs/plugins/fetch-storage/index.js +18 -0
- package/dist/cjs/plugins/index.d.ts +3 -1
- package/dist/cjs/plugins/index.js +30 -2
- package/dist/cjs/rpc/index.js +3 -3
- package/dist/cjs/schema/index.d.ts +3 -0
- package/dist/cjs/schema/index.js +4 -1
- package/dist/cjs/setup-with-server.d.ts +11 -0
- package/dist/cjs/setup-with-server.js +1 -0
- package/dist/cjs/utils/fetch-storages-worker.js +62 -0
- package/dist/cjs/utils/fetch-storages.d.ts +26 -0
- package/dist/cjs/utils/fetch-storages.js +213 -0
- package/dist/cjs/utils/index.d.ts +1 -0
- package/dist/cjs/utils/index.js +1 -0
- package/dist/esm/cli.js +5 -2
- package/dist/esm/context.d.ts +11 -0
- package/dist/esm/context.js +14 -2
- package/dist/esm/logger.d.ts +3 -0
- package/dist/esm/logger.js +34 -0
- package/dist/esm/plugins/dry-run/index.d.ts +3 -0
- package/dist/esm/plugins/dry-run/rpc.d.ts +4 -4
- package/dist/esm/plugins/fetch-storage/cli.d.ts +2 -0
- package/dist/esm/plugins/fetch-storage/cli.js +34 -0
- package/dist/esm/plugins/fetch-storage/index.d.ts +1 -0
- package/dist/esm/plugins/fetch-storage/index.js +1 -0
- package/dist/esm/plugins/index.d.ts +3 -1
- package/dist/esm/plugins/index.js +25 -2
- package/dist/esm/rpc/index.js +4 -4
- package/dist/esm/schema/index.d.ts +3 -0
- package/dist/esm/schema/index.js +4 -1
- package/dist/esm/setup-with-server.d.ts +11 -0
- package/dist/esm/setup-with-server.js +1 -0
- package/dist/esm/utils/fetch-storages-worker.js +12 -0
- package/dist/esm/utils/fetch-storages.d.ts +26 -0
- package/dist/esm/utils/fetch-storages.js +188 -0
- package/dist/esm/utils/index.d.ts +1 -0
- package/dist/esm/utils/index.js +1 -0
- package/package.json +3 -3
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
import { Api, defaultLogger } from '@acala-network/chopsticks-core';
|
|
2
|
+
import { ApiPromise } from '@polkadot/api';
|
|
3
|
+
import { SqliteDatabase } from '@acala-network/chopsticks-db';
|
|
4
|
+
import { WsProvider } from '@polkadot/rpc-provider';
|
|
5
|
+
import { compactStripLength, stringCamelCase, u8aToHex } from '@polkadot/util';
|
|
6
|
+
import { expandMetadata } from '@polkadot/types';
|
|
7
|
+
import { releaseProxy, wrap } from 'comlink';
|
|
8
|
+
import { xxhashAsHex } from '@polkadot/util-crypto';
|
|
9
|
+
import _ from 'lodash';
|
|
10
|
+
import nodeEndpoint from 'comlink/dist/umd/node-adapter.js';
|
|
11
|
+
import threads from 'node:worker_threads';
|
|
12
|
+
const BATCH_SIZE = 1000;
|
|
13
|
+
export const logger = defaultLogger.child({
|
|
14
|
+
name: 'fetch-storages'
|
|
15
|
+
});
|
|
16
|
+
const getHexKeyWithArgs = (meta, storage, args)=>{
|
|
17
|
+
const isPartialKey = args.length !== (meta.type.isPlain ? 0 : meta.type.asMap.hashers.length);
|
|
18
|
+
const hexKey = isPartialKey && storage.creator.iterKey ? storage.creator.iterKey(...args).toHex() : u8aToHex(compactStripLength(storage.creator(...args))[1]);
|
|
19
|
+
return hexKey;
|
|
20
|
+
};
|
|
21
|
+
const checkPalletStorageByName = (meta, palletName, storageName)=>{
|
|
22
|
+
const pallet = meta.query[stringCamelCase(palletName)];
|
|
23
|
+
if (!pallet) throw Error(`Cannot find pallet ${palletName}`);
|
|
24
|
+
let storage;
|
|
25
|
+
if (storageName) {
|
|
26
|
+
storage = pallet[stringCamelCase(storageName)];
|
|
27
|
+
if (!storage) throw Error(`Cannot find storage ${storageName} in pallet ${palletName}`);
|
|
28
|
+
}
|
|
29
|
+
return {
|
|
30
|
+
pallet,
|
|
31
|
+
storage
|
|
32
|
+
};
|
|
33
|
+
};
|
|
34
|
+
/**
|
|
35
|
+
* Convert fetch-storage configs to prefixes for fetching.
|
|
36
|
+
*/ export const getPrefixesFromConfig = async (config, api)=>{
|
|
37
|
+
logger.debug({
|
|
38
|
+
config
|
|
39
|
+
}, 'received fetch-storage config');
|
|
40
|
+
const prefixes = [];
|
|
41
|
+
const metadata = await api.rpc.state.getMetadata();
|
|
42
|
+
const expandMeta = expandMetadata(metadata.registry, metadata);
|
|
43
|
+
for (const item of config){
|
|
44
|
+
if (typeof item === 'string' && item.startsWith('0x')) {
|
|
45
|
+
// hex
|
|
46
|
+
prefixes.push(item);
|
|
47
|
+
} else if (typeof item === 'string' && !item.includes('.')) {
|
|
48
|
+
// pallet
|
|
49
|
+
checkPalletStorageByName(expandMeta, item);
|
|
50
|
+
prefixes.push(xxhashAsHex(item, 128));
|
|
51
|
+
} else if (typeof item === 'string' && item.includes('.')) {
|
|
52
|
+
// pallet.storage
|
|
53
|
+
const [palletName, storageName] = item.split('.');
|
|
54
|
+
const { storage } = checkPalletStorageByName(expandMeta, palletName, storageName);
|
|
55
|
+
prefixes.push(u8aToHex(storage.keyPrefix()));
|
|
56
|
+
} else if (typeof item === 'object') {
|
|
57
|
+
// object cases
|
|
58
|
+
const [objectKey, objectVal] = Object.entries(item)[0];
|
|
59
|
+
if (typeof objectVal === 'string') {
|
|
60
|
+
// - System: Account
|
|
61
|
+
const { storage } = checkPalletStorageByName(expandMeta, objectKey, objectVal);
|
|
62
|
+
prefixes.push(u8aToHex(storage.keyPrefix()));
|
|
63
|
+
} else if (objectKey.includes('.') && Array.isArray(objectVal)) {
|
|
64
|
+
// - Pallet.Storage: [xxx, ...]
|
|
65
|
+
const [pallet, storage] = objectKey.split('.').map((x)=>stringCamelCase(x));
|
|
66
|
+
checkPalletStorageByName(expandMeta, pallet, storage);
|
|
67
|
+
const storageEntry = api.query[pallet][storage];
|
|
68
|
+
const meta = storageEntry.creator.meta;
|
|
69
|
+
const args = objectVal;
|
|
70
|
+
const hexKey = getHexKeyWithArgs(meta, storageEntry, args);
|
|
71
|
+
prefixes.push(hexKey);
|
|
72
|
+
} else if (!Array.isArray(objectVal)) {
|
|
73
|
+
// - Tokens:
|
|
74
|
+
// Accounts: [xxx, ...]
|
|
75
|
+
const pallet = stringCamelCase(objectKey);
|
|
76
|
+
const [storage, args] = Object.entries(objectVal)[0];
|
|
77
|
+
checkPalletStorageByName(expandMeta, pallet, storage);
|
|
78
|
+
const storageEntry = api.query[pallet][stringCamelCase(storage)];
|
|
79
|
+
const meta = storageEntry.creator.meta;
|
|
80
|
+
const hexKey = getHexKeyWithArgs(meta, storageEntry, args);
|
|
81
|
+
prefixes.push(hexKey);
|
|
82
|
+
} else {
|
|
83
|
+
throw new Error(`Unsupported fetch-storage config: ${objectKey}.${objectVal}`);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
logger.debug({
|
|
88
|
+
prefixes
|
|
89
|
+
}, 'prefixes from config');
|
|
90
|
+
return prefixes;
|
|
91
|
+
};
|
|
92
|
+
/**
|
|
93
|
+
* Fetch storages and save in a local db
|
|
94
|
+
*/ export const fetchStorages = async ({ block, endpoint, dbPath, config })=>{
|
|
95
|
+
if (!endpoint) throw new Error('endpoint is required');
|
|
96
|
+
if (!block) throw new Error('block is required');
|
|
97
|
+
const provider = new WsProvider(endpoint, 3_000);
|
|
98
|
+
const apiPromise = new ApiPromise({
|
|
99
|
+
provider
|
|
100
|
+
});
|
|
101
|
+
await apiPromise.isReady;
|
|
102
|
+
let blockHash;
|
|
103
|
+
if (block == null) {
|
|
104
|
+
const lastHdr = await apiPromise.rpc.chain.getHeader();
|
|
105
|
+
blockHash = lastHdr.hash.toString();
|
|
106
|
+
} else if (typeof block === 'string' && block.startsWith('0x')) {
|
|
107
|
+
blockHash = block;
|
|
108
|
+
} else if (Number.isInteger(+block)) {
|
|
109
|
+
blockHash = await apiPromise.rpc.chain.getBlockHash(Number(block)).then((h)=>h.toString());
|
|
110
|
+
} else {
|
|
111
|
+
throw new Error(`Invalid block number or hash: ${block}`);
|
|
112
|
+
}
|
|
113
|
+
const prefixesFromConfig = await getPrefixesFromConfig(config, apiPromise);
|
|
114
|
+
const uniqPrefixes = _.uniq(prefixesFromConfig);
|
|
115
|
+
const processPrefixes = (prefixes)=>{
|
|
116
|
+
prefixes.sort();
|
|
117
|
+
const result = [];
|
|
118
|
+
for (const prefix of prefixes){
|
|
119
|
+
// check if the current prefix is not a prefix of any added prefix
|
|
120
|
+
if (!result.some((prev)=>prefix.startsWith(prev))) {
|
|
121
|
+
result.push(prefix);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
return result;
|
|
125
|
+
};
|
|
126
|
+
const prefixes = processPrefixes(uniqPrefixes);
|
|
127
|
+
if (!prefixes.length) throw new Error('No prefixes to fetch');
|
|
128
|
+
const signedBlock = await apiPromise.rpc.chain.getBlock(blockHash);
|
|
129
|
+
const blockNumber = signedBlock.block.header.number.toNumber();
|
|
130
|
+
const chainName = (await apiPromise.rpc.system.chain()).toString();
|
|
131
|
+
const finalDbPath = dbPath ?? `db-${chainName}-${blockNumber}.sqlite`;
|
|
132
|
+
const api = new Api(provider);
|
|
133
|
+
const db = new SqliteDatabase(finalDbPath);
|
|
134
|
+
logger.info(`Storages will be saved at ${finalDbPath}, use '--db=${finalDbPath} --block=${blockNumber}' to apply it later on`);
|
|
135
|
+
for (const prefix of prefixes){
|
|
136
|
+
let startKey = '0x';
|
|
137
|
+
let hasMorePages = true;
|
|
138
|
+
while(hasMorePages){
|
|
139
|
+
logger.debug({
|
|
140
|
+
prefix,
|
|
141
|
+
startKey
|
|
142
|
+
}, 'fetching keys');
|
|
143
|
+
const keysPage = await api.getKeysPaged(prefix, BATCH_SIZE, startKey, blockHash);
|
|
144
|
+
logger.debug({
|
|
145
|
+
prefix,
|
|
146
|
+
startKey
|
|
147
|
+
}, `fetched ${keysPage.length} keys`);
|
|
148
|
+
if (!keysPage.length) break;
|
|
149
|
+
startKey = keysPage[keysPage.length - 1];
|
|
150
|
+
if (!keysPage || keysPage.length < BATCH_SIZE) {
|
|
151
|
+
hasMorePages = false;
|
|
152
|
+
}
|
|
153
|
+
logger.debug({
|
|
154
|
+
prefix
|
|
155
|
+
}, 'fetching storages');
|
|
156
|
+
const storages = await api.getStorageBatch(prefix, keysPage, blockHash);
|
|
157
|
+
logger.debug({
|
|
158
|
+
prefix
|
|
159
|
+
}, `fetched ${storages.length} storages`);
|
|
160
|
+
const keyValueEntries = storages.map(([key, value])=>({
|
|
161
|
+
blockHash,
|
|
162
|
+
key,
|
|
163
|
+
value
|
|
164
|
+
}));
|
|
165
|
+
await db.saveStorageBatch(keyValueEntries);
|
|
166
|
+
logger.debug({
|
|
167
|
+
prefix
|
|
168
|
+
}, `saved ${storages.length} storages ✅`);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
logger.info(`Storages are saved at ${finalDbPath}, use '--db=${finalDbPath} --block=${blockNumber}' to apply it`);
|
|
172
|
+
};
|
|
173
|
+
export const startFetchStorageWorker = async (options)=>{
|
|
174
|
+
if (!options.config) return null;
|
|
175
|
+
const worker = new threads.Worker(new URL('./fetch-storages-worker.js', import.meta.url), {
|
|
176
|
+
name: 'fetch-storages-worker'
|
|
177
|
+
});
|
|
178
|
+
const workerApi = wrap(nodeEndpoint(worker));
|
|
179
|
+
workerApi.startFetch(options);
|
|
180
|
+
const terminate = async ()=>{
|
|
181
|
+
workerApi[releaseProxy]();
|
|
182
|
+
await worker.terminate();
|
|
183
|
+
};
|
|
184
|
+
return {
|
|
185
|
+
worker: workerApi,
|
|
186
|
+
terminate
|
|
187
|
+
};
|
|
188
|
+
};
|
package/dist/esm/utils/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@acala-network/chopsticks",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.12.0",
|
|
4
4
|
"author": "Acala Developers <hello@acala.network>",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"bin": "./chopsticks.cjs",
|
|
@@ -13,8 +13,8 @@
|
|
|
13
13
|
"docs:prep": "typedoc"
|
|
14
14
|
},
|
|
15
15
|
"dependencies": {
|
|
16
|
-
"@acala-network/chopsticks-core": "0.
|
|
17
|
-
"@acala-network/chopsticks-db": "0.
|
|
16
|
+
"@acala-network/chopsticks-core": "0.12.0",
|
|
17
|
+
"@acala-network/chopsticks-db": "0.12.0",
|
|
18
18
|
"@pnpm/npm-conf": "^2.2.2",
|
|
19
19
|
"@polkadot/api-augment": "^10.11.2",
|
|
20
20
|
"@polkadot/types": "^10.11.2",
|