@acala-network/chopsticks 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +52 -0
- package/chopsticks.js +2 -0
- package/dist/api.d.ts +43 -0
- package/dist/api.js +78 -0
- package/dist/blockchain/block.d.ts +38 -0
- package/dist/blockchain/block.js +178 -0
- package/dist/blockchain/head-state.d.ts +13 -0
- package/dist/blockchain/head-state.js +57 -0
- package/dist/blockchain/index.d.ts +35 -0
- package/dist/blockchain/index.js +115 -0
- package/dist/blockchain/inherents.d.ts +26 -0
- package/dist/blockchain/inherents.js +96 -0
- package/dist/blockchain/storage-layer.d.ts +32 -0
- package/dist/blockchain/storage-layer.js +168 -0
- package/dist/blockchain/txpool.d.ts +13 -0
- package/dist/blockchain/txpool.js +165 -0
- package/dist/db/entities.d.ts +5 -0
- package/dist/db/entities.js +34 -0
- package/dist/db/index.d.ts +3 -0
- package/dist/db/index.js +41 -0
- package/dist/executor.d.ts +16 -0
- package/dist/executor.js +25 -0
- package/dist/executor.test.d.ts +1 -0
- package/dist/executor.test.js +58 -0
- package/dist/genesis-provider.d.ts +42 -0
- package/dist/genesis-provider.js +141 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.js +208 -0
- package/dist/logger.d.ts +7 -0
- package/dist/logger.js +25 -0
- package/dist/rpc/dev.d.ts +3 -0
- package/dist/rpc/dev.js +33 -0
- package/dist/rpc/exec.d.ts +3 -0
- package/dist/rpc/exec.js +44 -0
- package/dist/rpc/index.d.ts +5 -0
- package/dist/rpc/index.js +25 -0
- package/dist/rpc/shared.d.ts +30 -0
- package/dist/rpc/shared.js +20 -0
- package/dist/rpc/substrate/author.d.ts +3 -0
- package/dist/rpc/substrate/author.js +42 -0
- package/dist/rpc/substrate/chain.d.ts +5 -0
- package/dist/rpc/substrate/chain.js +62 -0
- package/dist/rpc/substrate/index.d.ts +3 -0
- package/dist/rpc/substrate/index.js +20 -0
- package/dist/rpc/substrate/state.d.ts +3 -0
- package/dist/rpc/substrate/state.js +80 -0
- package/dist/rpc/substrate/system.d.ts +3 -0
- package/dist/rpc/substrate/system.js +27 -0
- package/dist/schema/index.d.ts +183 -0
- package/dist/schema/index.js +29 -0
- package/dist/server.d.ts +9 -0
- package/dist/server.js +148 -0
- package/dist/task.d.ts +38 -0
- package/dist/task.js +66 -0
- package/dist/utils/import-storage.d.ts +4 -0
- package/dist/utils/import-storage.js +43 -0
- package/dist/utils/index.d.ts +7 -0
- package/dist/utils/index.js +32 -0
- package/dist/utils/set-storage.d.ts +6 -0
- package/dist/utils/set-storage.js +57 -0
- package/package.json +91 -0
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SetValidationData = exports.InherentProviders = exports.SetTimestamp = void 0;
|
|
4
|
+
const types_1 = require("@polkadot/types");
|
|
5
|
+
class SetTimestamp {
|
|
6
|
+
#getTimestamp;
|
|
7
|
+
constructor(getTimestamp = Date.now) {
|
|
8
|
+
this.#getTimestamp = getTimestamp;
|
|
9
|
+
}
|
|
10
|
+
async createInherents(meta, timestamp, _parent) {
|
|
11
|
+
return [new types_1.GenericExtrinsic(meta.registry, meta.tx.timestamp.set(timestamp)).toHex()];
|
|
12
|
+
}
|
|
13
|
+
getTimestamp() {
|
|
14
|
+
return this.#getTimestamp();
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.SetTimestamp = SetTimestamp;
|
|
18
|
+
const MOCK_VALIDATION_DATA = {
|
|
19
|
+
validationData: {
|
|
20
|
+
relayParentNumber: 1000,
|
|
21
|
+
relayParentStorageRoot: '0x49416764844ff0d8bad851e8abe686dff9dd2de78621180ef8e9f99bb7a480f1',
|
|
22
|
+
maxPovSize: 5242880,
|
|
23
|
+
},
|
|
24
|
+
relayChainState: {
|
|
25
|
+
trieNodes: [
|
|
26
|
+
'0x5f04b49d95320d9021994c850f25b8e385f902000030000080000008000000000010000000100005000000050000000a0000000a000000000050000000100000e8764817000000040000000400000000000000000000000000000000000000000000000000000000000000000000000800000000200000040000000400000000001000b0040000000000000000000014000000040000000400000000000000010100000000060000006400000002000000c8000000020000001900000000000000020000000200000000c817a804000000',
|
|
27
|
+
'0x80011080ace5323aee784b03389c0e2cc68d81973f8fa26d395f333ecad7399271c781e1808e5db75be813c05205986cbd6fdede707a4d26816063a41eb42ebc262c734fad',
|
|
28
|
+
'0x8004648086a9239b72237f5bf119e2a880c32f5866460632700509cb874c60f67fe815ea80f6f6801e4b41e2e6d8ec194dba122bfb9eb33feb2545ef5144cea79551f7cc5280c629a7e712d763fe83b35d2a082430af6737a89f23219c0eb3051c83bc5af5ad80fed5ecd6097308a6540f8cf31aeaad186e6898d2ecc0e623767c521c70e39953',
|
|
29
|
+
'0x800804809f3ada68c357b5e0a3ebb39ef181acfa9943af4725c244330a4b2c60837612e88082ad3fbdf392429afeacc70177704b760bb145547c1f152e1fcf651916b43820',
|
|
30
|
+
'0x8008208042793f557c1d003b647e2eda79c2b5088c7d8cab2e82c1dcc87f4343cca91ae4485ead6eef5c4b1c68eaa71ea17a02d9de0400',
|
|
31
|
+
'0x80210280de38445d825563f8e218255a417c16971afa85b4f2ef18fbe08fbc5b976dc0d6801a2ff24096295cfccf1adda80b8dfffe380b9f3b54d7a3cdb67864a4655e62968022a699b2cc90a6654c84163d2a498506b192afe7cd9777227e5288e8ff069c0f',
|
|
32
|
+
'0x80400180ebebd1a1cd0bbc6714b7fb0ac854cca5a4c4e34e69485da48be3c8087b56e09b80128645c79ca6581c248a412fd7b8bc532a187600e6e1cc20c915538ba4df6a79',
|
|
33
|
+
'0x80ffbe80d9302a505e2b1ac931f539aed33bf791d1982906ae64c7197324044f191e9bca80972cd2f703f9c94fd516e14b7013c6f1545095855b6c4b36f21d89dad30aa54d80b2495ce4d07001927bb0857611f8d3a1449e791d0b010e3861c32dec0c44179680f5929c8ef9b0ac6ec8a529c91348d8cd6c169920dd37d055544a6c31c53b11e380402a0bf7ff07cee790d9cc065fc138ff6afa7db016d898d65b2b716af354c6f68042aef1dafffd1d9debbb8e6c4eb48b5c141ddf0aad2b0f3f4ddf53e6b38e65c080b31fa0392c1abdce1aa29d1544c94634ecab87ecaba6409db33aaa7621992a8280b1f4de7c3ac5665016d561a60659cd2d8f2d3e0a97e2ea9749279bd8e35eb1f180816ac87a2893694016b21768137a76ea459190ea0fc3c645d85e1a3d4eb194fe802e563b43e7334454c841953424be8c2b7a1c3295dbc391045cb6b88388ad5e7080b1ed3b02e5989b7d134ca056c778f1f5b6ffd377b2d8064483da6e94b82b0e40800cb3299c44a5db8fdcb4932e1b6ab0385d6ef1c9f8d85e0a75b787b6f4fd6c3c805a44c30e2676dc2d4c17451f51d9502e85064387999f366e6f3d404509a7780f80d6788ca71c6aabe421c352027acdb9532563dc5f1b25e6154b721f431e9990ed',
|
|
34
|
+
'0x9d0da05ca59913bc38a8630590f2627c154080834dda0ba5adf00d798e981a28a13e728cf83e35aefc87318440a61869f724474c5f0a351b6a99a5b21324516e668bb86a570400505f0e7b9012096b41c4eb3aaf947f6ea4290800007c7700e67da63472835bb0b737093a19ad4c63f5a4efb16ffa83d00700000400',
|
|
35
|
+
'0x9e207f03cfdce586301014700e2c25931040505f0e7b9012096b41c4eb3aaf947f6ea4290800004c5f0ec2d17a76153ff51817f12d9cfc3c7f0400',
|
|
36
|
+
'0x9e710b30bd2eab0352ddcc26417aa1945fc180699a53b51a9709a3a86039c49b5ef278e9fc244dae27e1a0380c91bff5b0488580c0d4096d94e724b8e86f952e5456c7253776de04c405582d2c350ee172d3eaa77c77081e0bfde17b36573208a06cb5cfba6b63f5a4efb16ffa83d00700000402803d0ae0b8f6832e8fabf0ec62521c2487c58b69eb97060faa8059b00ff6b7262d505f0e7b9012096b41c4eb3aaf947f6ea4290800004c5f03c716fb8fff3de61a883bb76adb34a20400806c8122e0f7f786071d6a51b330d612eccdcbe8d8f79936accabd640506dffdf380a6abfb72ed49b586829cca4ce631c092d45a017ab0d68288d308873025cfe5d280521b868fc212b25f021984cf02ced547cd45952b88360766839dfde7d4683e61',
|
|
37
|
+
'0x9ede3d8a54d27e44a9d5ce189618f22d1008505f0e7b9012096b41c4eb3aaf947f6ea42908010080c74756edffa217dfb07ab596d82753deff985ac215e5cc2997d29afe1d397c16',
|
|
38
|
+
'0x9ef78c98723ddc9073523ef3beefda0c1004505f0e7b9012096b41c4eb3aaf947f6ea4290800007c77095dac46c07a40d91506e7637ec4ba5763f5a4efb16ffa83d00700000400',
|
|
39
|
+
],
|
|
40
|
+
},
|
|
41
|
+
};
|
|
42
|
+
class InherentProviders {
|
|
43
|
+
#base;
|
|
44
|
+
#providers;
|
|
45
|
+
constructor(base, providers) {
|
|
46
|
+
this.#base = base;
|
|
47
|
+
this.#providers = providers;
|
|
48
|
+
}
|
|
49
|
+
async createInherents(meta, timestamp, parent) {
|
|
50
|
+
const base = await this.#base.createInherents(meta, timestamp, parent);
|
|
51
|
+
const extra = await Promise.all(this.#providers.map((provider) => provider.createInherents(meta, timestamp, parent)));
|
|
52
|
+
return [...base, ...extra.flat()];
|
|
53
|
+
}
|
|
54
|
+
getTimestamp() {
|
|
55
|
+
return this.#base.getTimestamp();
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
exports.InherentProviders = InherentProviders;
|
|
59
|
+
class SetValidationData {
|
|
60
|
+
#tasks;
|
|
61
|
+
#expectedIndex;
|
|
62
|
+
constructor(tasks, expectedIndex) {
|
|
63
|
+
this.#tasks = tasks;
|
|
64
|
+
this.#expectedIndex = expectedIndex;
|
|
65
|
+
}
|
|
66
|
+
async createInherents(meta, _timestamp, parent) {
|
|
67
|
+
if (!meta.tx.parachainSystem?.setValidationData) {
|
|
68
|
+
return [];
|
|
69
|
+
}
|
|
70
|
+
void this.#tasks; // TODO
|
|
71
|
+
const parentBlock = await parent.parentBlock;
|
|
72
|
+
if (!parentBlock) {
|
|
73
|
+
throw new Error('Parent block not found');
|
|
74
|
+
}
|
|
75
|
+
const extrinsics = await parentBlock.extrinsics;
|
|
76
|
+
let newData;
|
|
77
|
+
if (parentBlock.number === 0) {
|
|
78
|
+
// chain started with genesis, mock 1st validationData
|
|
79
|
+
newData = MOCK_VALIDATION_DATA;
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
const method = meta.registry.createType('GenericExtrinsic', extrinsics[this.#expectedIndex]);
|
|
83
|
+
const validationData = method.args[0].toJSON();
|
|
84
|
+
newData = {
|
|
85
|
+
...validationData,
|
|
86
|
+
validationData: {
|
|
87
|
+
...validationData.validationData,
|
|
88
|
+
relayParentNumber: validationData.validationData.relayParentNumber + 2,
|
|
89
|
+
},
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
const inherent = new types_1.GenericExtrinsic(meta.registry, meta.tx.parachainSystem.setValidationData(newData));
|
|
93
|
+
return [inherent.toHex()];
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
exports.SetValidationData = SetValidationData;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { DataSource } from 'typeorm';
|
|
2
|
+
import { Api } from '../api';
|
|
3
|
+
export declare const enum StorageValueKind {
|
|
4
|
+
Deleted = "Deleted",
|
|
5
|
+
DeletedPrefix = "DeletedPrefix"
|
|
6
|
+
}
|
|
7
|
+
export type StorageValue = string | StorageValueKind | undefined;
|
|
8
|
+
export interface StorageLayerProvider {
|
|
9
|
+
get(key: string, cache: boolean): Promise<StorageValue>;
|
|
10
|
+
foldInto(into: StorageLayer): Promise<StorageLayerProvider | undefined>;
|
|
11
|
+
fold(): Promise<void>;
|
|
12
|
+
getKeysPaged(prefix: string, pageSize: number, startKey: string): Promise<string[]>;
|
|
13
|
+
}
|
|
14
|
+
export declare class RemoteStorageLayer implements StorageLayerProvider {
|
|
15
|
+
#private;
|
|
16
|
+
constructor(api: Api, at: string, db: DataSource | undefined);
|
|
17
|
+
get(key: string): Promise<StorageValue>;
|
|
18
|
+
foldInto(_into: StorageLayer): Promise<StorageLayerProvider>;
|
|
19
|
+
fold(): Promise<void>;
|
|
20
|
+
getKeysPaged(prefix: string, pageSize: number, startKey: string): Promise<string[]>;
|
|
21
|
+
}
|
|
22
|
+
export declare class StorageLayer implements StorageLayerProvider {
|
|
23
|
+
#private;
|
|
24
|
+
constructor(parent?: StorageLayerProvider);
|
|
25
|
+
get(key: string, cache: boolean): Promise<StorageValue | undefined>;
|
|
26
|
+
set(key: string, value: StorageValue): void;
|
|
27
|
+
setAll(values: Record<string, StorageValue | null> | [string, StorageValue | null][]): void;
|
|
28
|
+
foldInto(into: StorageLayer): Promise<StorageLayerProvider | undefined>;
|
|
29
|
+
fold(): Promise<void>;
|
|
30
|
+
getKeysPaged(prefix: string, pageSize: number, startKey: string): Promise<string[]>;
|
|
31
|
+
mergeInto(into: Record<string, string | null>): Promise<void>;
|
|
32
|
+
}
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.StorageLayer = exports.RemoteStorageLayer = void 0;
|
|
7
|
+
const lodash_1 = __importDefault(require("lodash"));
|
|
8
|
+
const logger_1 = require("../logger");
|
|
9
|
+
const logger = logger_1.defaultLogger.child({ name: 'layer' });
|
|
10
|
+
class RemoteStorageLayer {
|
|
11
|
+
#api;
|
|
12
|
+
#at;
|
|
13
|
+
#db;
|
|
14
|
+
constructor(api, at, db) {
|
|
15
|
+
this.#api = api;
|
|
16
|
+
this.#at = at;
|
|
17
|
+
this.#db = db;
|
|
18
|
+
}
|
|
19
|
+
async get(key) {
|
|
20
|
+
if (this.#db) {
|
|
21
|
+
const res = await this.#db.getRepository('KeyValuePair').findOne({ where: { key, blockHash: this.#at } });
|
|
22
|
+
if (res) {
|
|
23
|
+
return res.value;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
logger.trace({ at: this.#at, key }, 'RemoteStorageLayer get');
|
|
27
|
+
const data = await this.#api.getStorage(key, this.#at);
|
|
28
|
+
this.#db?.getRepository('KeyValuePair').upsert({ key, blockHash: this.#at, value: data }, ['key', 'blockHash']);
|
|
29
|
+
return data;
|
|
30
|
+
}
|
|
31
|
+
async foldInto(_into) {
|
|
32
|
+
return this;
|
|
33
|
+
}
|
|
34
|
+
async fold() { }
|
|
35
|
+
async getKeysPaged(prefix, pageSize, startKey) {
|
|
36
|
+
logger.trace({ at: this.#at, prefix, pageSize, startKey }, 'RemoteStorageLayer getKeysPaged');
|
|
37
|
+
return this.#api.getKeysPaged(prefix, pageSize, startKey, this.#at);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
exports.RemoteStorageLayer = RemoteStorageLayer;
|
|
41
|
+
class StorageLayer {
|
|
42
|
+
#store = {};
|
|
43
|
+
#keys = [];
|
|
44
|
+
#deletedPrefix = [];
|
|
45
|
+
#parent;
|
|
46
|
+
constructor(parent) {
|
|
47
|
+
this.#parent = parent;
|
|
48
|
+
}
|
|
49
|
+
#addKey(key) {
|
|
50
|
+
const idx = lodash_1.default.sortedIndex(this.#keys, key);
|
|
51
|
+
const key2 = this.#keys[idx];
|
|
52
|
+
if (key === key2) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
this.#keys.splice(idx, 0, key);
|
|
56
|
+
}
|
|
57
|
+
#removeKey(key) {
|
|
58
|
+
const idx = lodash_1.default.sortedIndex(this.#keys, key);
|
|
59
|
+
const key2 = this.#keys[idx];
|
|
60
|
+
if (key === key2) {
|
|
61
|
+
this.#keys.splice(idx, 1);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
async get(key, cache) {
|
|
65
|
+
if (key in this.#store) {
|
|
66
|
+
return this.#store[key];
|
|
67
|
+
}
|
|
68
|
+
if (this.#deletedPrefix.some((prefix) => key.startsWith(prefix))) {
|
|
69
|
+
return "Deleted" /* StorageValueKind.Deleted */;
|
|
70
|
+
}
|
|
71
|
+
if (this.#parent) {
|
|
72
|
+
const val = this.#parent.get(key, false);
|
|
73
|
+
if (cache) {
|
|
74
|
+
this.#store[key] = val;
|
|
75
|
+
}
|
|
76
|
+
return val;
|
|
77
|
+
}
|
|
78
|
+
return undefined;
|
|
79
|
+
}
|
|
80
|
+
set(key, value) {
|
|
81
|
+
switch (value) {
|
|
82
|
+
case "Deleted" /* StorageValueKind.Deleted */:
|
|
83
|
+
this.#store[key] = value;
|
|
84
|
+
this.#removeKey(key);
|
|
85
|
+
break;
|
|
86
|
+
case "DeletedPrefix" /* StorageValueKind.DeletedPrefix */:
|
|
87
|
+
this.#deletedPrefix.push(key);
|
|
88
|
+
for (const k of this.#keys) {
|
|
89
|
+
if (k.startsWith(key)) {
|
|
90
|
+
this.#store[k] = "Deleted" /* StorageValueKind.Deleted */;
|
|
91
|
+
this.#removeKey(k);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
break;
|
|
95
|
+
case undefined:
|
|
96
|
+
delete this.#store[key];
|
|
97
|
+
this.#removeKey(key);
|
|
98
|
+
break;
|
|
99
|
+
default:
|
|
100
|
+
this.#store[key] = value;
|
|
101
|
+
this.#addKey(key);
|
|
102
|
+
break;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
setAll(values) {
|
|
106
|
+
if (!Array.isArray(values)) {
|
|
107
|
+
values = Object.entries(values);
|
|
108
|
+
}
|
|
109
|
+
for (const [key, value] of values) {
|
|
110
|
+
this.set(key, value || "Deleted" /* StorageValueKind.Deleted */);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
async foldInto(into) {
|
|
114
|
+
const newParent = await this.#parent?.foldInto(into);
|
|
115
|
+
for (const deletedPrefix of this.#deletedPrefix) {
|
|
116
|
+
into.set(deletedPrefix, "DeletedPrefix" /* StorageValueKind.DeletedPrefix */);
|
|
117
|
+
}
|
|
118
|
+
for (const key of this.#keys) {
|
|
119
|
+
const value = await this.#store[key];
|
|
120
|
+
into.set(key, value);
|
|
121
|
+
}
|
|
122
|
+
return newParent;
|
|
123
|
+
}
|
|
124
|
+
async fold() {
|
|
125
|
+
if (this.#parent) {
|
|
126
|
+
this.#parent = await this.#parent.foldInto(this);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
async getKeysPaged(prefix, pageSize, startKey) {
|
|
130
|
+
if (!this.#deletedPrefix.some((prefix) => startKey.startsWith(prefix))) {
|
|
131
|
+
await this.fold();
|
|
132
|
+
// TODO: maintain a list of fetched ranges to avoid fetching the same range multiple times
|
|
133
|
+
const remote = (await this.#parent?.getKeysPaged(prefix, pageSize, startKey)) ?? [];
|
|
134
|
+
for (const key of remote) {
|
|
135
|
+
if (this.#deletedPrefix.some((prefix) => key.startsWith(prefix))) {
|
|
136
|
+
continue;
|
|
137
|
+
}
|
|
138
|
+
this.#addKey(key);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
let idx = lodash_1.default.sortedIndex(this.#keys, startKey);
|
|
142
|
+
if (this.#keys[idx] === startKey) {
|
|
143
|
+
++idx;
|
|
144
|
+
}
|
|
145
|
+
const res = [];
|
|
146
|
+
while (res.length < pageSize) {
|
|
147
|
+
const key = this.#keys[idx];
|
|
148
|
+
if (!key || !key.startsWith(prefix)) {
|
|
149
|
+
break;
|
|
150
|
+
}
|
|
151
|
+
res.push(key);
|
|
152
|
+
++idx;
|
|
153
|
+
}
|
|
154
|
+
return res;
|
|
155
|
+
}
|
|
156
|
+
async mergeInto(into) {
|
|
157
|
+
for (const [key, maybeValue] of Object.entries(this.#store)) {
|
|
158
|
+
const value = await maybeValue;
|
|
159
|
+
if (value === "Deleted" /* StorageValueKind.Deleted */) {
|
|
160
|
+
into[key] = null;
|
|
161
|
+
}
|
|
162
|
+
else {
|
|
163
|
+
into[key] = value;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
exports.StorageLayer = StorageLayer;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { Blockchain } from '.';
|
|
2
|
+
import { InherentProvider } from './inherents';
|
|
3
|
+
export declare enum BuildBlockMode {
|
|
4
|
+
Batch = 0,
|
|
5
|
+
Instant = 1,
|
|
6
|
+
Manual = 2
|
|
7
|
+
}
|
|
8
|
+
export declare class TxPool {
|
|
9
|
+
#private;
|
|
10
|
+
constructor(chain: Blockchain, inherentProvider: InherentProvider, mode?: BuildBlockMode);
|
|
11
|
+
submitExtrinsic(extrinsic: string): void;
|
|
12
|
+
buildBlock(): Promise<void>;
|
|
13
|
+
}
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.TxPool = exports.BuildBlockMode = void 0;
|
|
7
|
+
const util_1 = require("@polkadot/util");
|
|
8
|
+
const lodash_1 = __importDefault(require("lodash"));
|
|
9
|
+
const block_1 = require("./block");
|
|
10
|
+
const shared_1 = require("../rpc/shared");
|
|
11
|
+
const utils_1 = require("../utils");
|
|
12
|
+
const logger_1 = require("../logger");
|
|
13
|
+
const logger = logger_1.defaultLogger.child({ name: 'txpool' });
|
|
14
|
+
var BuildBlockMode;
|
|
15
|
+
(function (BuildBlockMode) {
|
|
16
|
+
BuildBlockMode[BuildBlockMode["Batch"] = 0] = "Batch";
|
|
17
|
+
BuildBlockMode[BuildBlockMode["Instant"] = 1] = "Instant";
|
|
18
|
+
BuildBlockMode[BuildBlockMode["Manual"] = 2] = "Manual";
|
|
19
|
+
})(BuildBlockMode = exports.BuildBlockMode || (exports.BuildBlockMode = {}));
|
|
20
|
+
const getConsensus = (header) => {
|
|
21
|
+
if (header.digest.logs.length === 0)
|
|
22
|
+
return;
|
|
23
|
+
const preRuntime = header.digest.logs[0].asPreRuntime;
|
|
24
|
+
const [consensusEngine] = preRuntime;
|
|
25
|
+
return { consensusEngine, rest: header.digest.logs.slice(1) };
|
|
26
|
+
};
|
|
27
|
+
class TxPool {
|
|
28
|
+
#chain;
|
|
29
|
+
#pool = [];
|
|
30
|
+
#mode;
|
|
31
|
+
#inherentProvider;
|
|
32
|
+
#lastBuildBlockPromise = Promise.resolve();
|
|
33
|
+
constructor(chain, inherentProvider, mode = BuildBlockMode.Batch) {
|
|
34
|
+
this.#chain = chain;
|
|
35
|
+
this.#mode = mode;
|
|
36
|
+
this.#inherentProvider = inherentProvider;
|
|
37
|
+
}
|
|
38
|
+
submitExtrinsic(extrinsic) {
|
|
39
|
+
this.#pool.push(extrinsic);
|
|
40
|
+
switch (this.#mode) {
|
|
41
|
+
case BuildBlockMode.Batch:
|
|
42
|
+
this.#batchBuildBlock();
|
|
43
|
+
break;
|
|
44
|
+
case BuildBlockMode.Instant:
|
|
45
|
+
this.buildBlock();
|
|
46
|
+
break;
|
|
47
|
+
case BuildBlockMode.Manual:
|
|
48
|
+
// does nothing
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
#batchBuildBlock = lodash_1.default.debounce(this.buildBlock, 100, { maxWait: 1000 });
|
|
53
|
+
async buildBlock() {
|
|
54
|
+
const last = this.#lastBuildBlockPromise;
|
|
55
|
+
this.#lastBuildBlockPromise = this.#buildBlock(last);
|
|
56
|
+
await this.#lastBuildBlockPromise;
|
|
57
|
+
}
|
|
58
|
+
async #buildBlock(wait) {
|
|
59
|
+
await this.#chain.api.isReady;
|
|
60
|
+
await wait.catch(() => { }); // ignore error
|
|
61
|
+
const head = this.#chain.head;
|
|
62
|
+
const extrinsics = this.#pool.splice(0);
|
|
63
|
+
const meta = await head.meta;
|
|
64
|
+
const parentHeader = await head.header;
|
|
65
|
+
const time = this.#inherentProvider.getTimestamp();
|
|
66
|
+
let newLogs = parentHeader.digest.logs;
|
|
67
|
+
const expectedSlot = Math.floor(time / (meta.consts.timestamp.minimumPeriod.toNumber() * 2));
|
|
68
|
+
const consensus = getConsensus(parentHeader);
|
|
69
|
+
if (consensus?.consensusEngine.isAura) {
|
|
70
|
+
const newSlot = (0, util_1.compactAddLength)((0, util_1.bnToU8a)(expectedSlot, { isLe: true, bitLength: 64 }));
|
|
71
|
+
newLogs = [{ PreRuntime: [consensus.consensusEngine, newSlot] }, ...consensus.rest];
|
|
72
|
+
}
|
|
73
|
+
else if (consensus?.consensusEngine.isBabe) {
|
|
74
|
+
// trying to make a SecondaryPlainPreDigest
|
|
75
|
+
const newSlot = (0, util_1.compactAddLength)((0, util_1.u8aConcat)('0x02000000', (0, util_1.bnToU8a)(expectedSlot, { isLe: true, bitLength: 64 })));
|
|
76
|
+
newLogs = [{ PreRuntime: [consensus.consensusEngine, newSlot] }, ...consensus.rest];
|
|
77
|
+
}
|
|
78
|
+
const registry = await head.registry;
|
|
79
|
+
const header = registry.createType('Header', {
|
|
80
|
+
parentHash: head.hash,
|
|
81
|
+
number: head.number + 1,
|
|
82
|
+
stateRoot: '0x0000000000000000000000000000000000000000000000000000000000000000',
|
|
83
|
+
extrinsicsRoot: '0x0000000000000000000000000000000000000000000000000000000000000000',
|
|
84
|
+
digest: {
|
|
85
|
+
logs: newLogs,
|
|
86
|
+
},
|
|
87
|
+
});
|
|
88
|
+
const newBlock = this.#chain.newTempBlock(head, header);
|
|
89
|
+
logger.info({
|
|
90
|
+
hash: head.hash,
|
|
91
|
+
number: head.number,
|
|
92
|
+
extrinsicsCount: extrinsics.length,
|
|
93
|
+
tempHash: newBlock.hash,
|
|
94
|
+
}, 'Building block');
|
|
95
|
+
const resp = await newBlock.call('Core_initialize_block', header.toHex());
|
|
96
|
+
logger.trace(resp.storageDiff, 'Initialize block');
|
|
97
|
+
newBlock.pushStorageLayer().setAll(resp.storageDiff);
|
|
98
|
+
if (meta.query.babe?.currentSlot) {
|
|
99
|
+
// TODO: figure out how to generate a valid babe slot digest instead of just modify the data
|
|
100
|
+
// but hey, we can get it working without a valid digest
|
|
101
|
+
const key = (0, utils_1.compactHex)(meta.query.babe.currentSlot());
|
|
102
|
+
newBlock.pushStorageLayer().set(key, (0, util_1.bnToHex)(expectedSlot, { isLe: true, bitLength: 64 }));
|
|
103
|
+
}
|
|
104
|
+
const inherents = await this.#inherentProvider.createInherents(meta, time, newBlock);
|
|
105
|
+
for (const extrinsic of inherents) {
|
|
106
|
+
try {
|
|
107
|
+
const resp = await newBlock.call('BlockBuilder_apply_extrinsic', extrinsic);
|
|
108
|
+
newBlock.pushStorageLayer().setAll(resp.storageDiff);
|
|
109
|
+
logger.trace(resp.storageDiff, 'Applied inherent');
|
|
110
|
+
}
|
|
111
|
+
catch (e) {
|
|
112
|
+
logger.warn('Failed to apply inherents %o %s', e, e);
|
|
113
|
+
throw new shared_1.ResponseError(1, 'Failed to apply inherents');
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
if (meta.query.parachainSystem?.validationData) {
|
|
117
|
+
// this is a parachain
|
|
118
|
+
const validationDataKey = (0, utils_1.compactHex)(meta.query.parachainSystem.validationData());
|
|
119
|
+
const validationData = await newBlock.get(validationDataKey);
|
|
120
|
+
if (!validationData) {
|
|
121
|
+
// there is no set validation data inherent
|
|
122
|
+
// so we need to restore the old validation data to make the on_finalize check happy
|
|
123
|
+
const oldValidationData = await head.get(validationDataKey);
|
|
124
|
+
newBlock.pushStorageLayer().set(validationDataKey, oldValidationData);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
for (const extrinsic of extrinsics) {
|
|
128
|
+
try {
|
|
129
|
+
const resp = await newBlock.call('BlockBuilder_apply_extrinsic', extrinsic);
|
|
130
|
+
newBlock.pushStorageLayer().setAll(resp.storageDiff);
|
|
131
|
+
logger.trace(resp.storageDiff, 'Applied extrinsic');
|
|
132
|
+
}
|
|
133
|
+
catch (e) {
|
|
134
|
+
logger.info('Failed to apply extrinsic %o %s', e, e);
|
|
135
|
+
this.#pool.push(extrinsic);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
if (meta.query.paraInherent?.included) {
|
|
139
|
+
// TODO: remvoe this once paraInherent.enter is implemented
|
|
140
|
+
// we are relaychain, however as we have not yet implemented the paraInherent.enter
|
|
141
|
+
// so need to do some trick to make the on_finalize check happy
|
|
142
|
+
const paraInherentIncludedKey = (0, utils_1.compactHex)(meta.query.paraInherent.included());
|
|
143
|
+
newBlock.pushStorageLayer().set(paraInherentIncludedKey, '0x01');
|
|
144
|
+
}
|
|
145
|
+
const resp2 = await newBlock.call('BlockBuilder_finalize_block', '0x');
|
|
146
|
+
newBlock.pushStorageLayer().setAll(resp2.storageDiff);
|
|
147
|
+
logger.trace(resp2.storageDiff, 'Finalize block');
|
|
148
|
+
const blockData = registry.createType('Block', {
|
|
149
|
+
header,
|
|
150
|
+
extrinsics,
|
|
151
|
+
});
|
|
152
|
+
const finalBlock = new block_1.Block(this.#chain, newBlock.number, blockData.hash.toHex(), head, {
|
|
153
|
+
header,
|
|
154
|
+
extrinsics: [...inherents, ...extrinsics],
|
|
155
|
+
storage: head.storage,
|
|
156
|
+
});
|
|
157
|
+
const diff = await newBlock.storageDiff();
|
|
158
|
+
logger.trace(diff, 'Final block');
|
|
159
|
+
finalBlock.pushStorageLayer().setAll(diff);
|
|
160
|
+
this.#chain.unregisterBlock(newBlock);
|
|
161
|
+
this.#chain.setHead(finalBlock);
|
|
162
|
+
logger.info({ hash: finalBlock.hash, number: finalBlock.number, prevHash: newBlock.hash }, 'Block built');
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
exports.TxPool = TxPool;
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.KeyValuePair = void 0;
|
|
13
|
+
const typeorm_1 = require("typeorm");
|
|
14
|
+
let KeyValuePair = class KeyValuePair {
|
|
15
|
+
blockHash;
|
|
16
|
+
key;
|
|
17
|
+
value;
|
|
18
|
+
};
|
|
19
|
+
__decorate([
|
|
20
|
+
(0, typeorm_1.PrimaryColumn)(),
|
|
21
|
+
__metadata("design:type", String)
|
|
22
|
+
], KeyValuePair.prototype, "blockHash", void 0);
|
|
23
|
+
__decorate([
|
|
24
|
+
(0, typeorm_1.PrimaryColumn)(),
|
|
25
|
+
__metadata("design:type", String)
|
|
26
|
+
], KeyValuePair.prototype, "key", void 0);
|
|
27
|
+
__decorate([
|
|
28
|
+
(0, typeorm_1.Column)({ nullable: true }),
|
|
29
|
+
__metadata("design:type", String)
|
|
30
|
+
], KeyValuePair.prototype, "value", void 0);
|
|
31
|
+
KeyValuePair = __decorate([
|
|
32
|
+
(0, typeorm_1.Entity)()
|
|
33
|
+
], KeyValuePair);
|
|
34
|
+
exports.KeyValuePair = KeyValuePair;
|
package/dist/db/index.js
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
+
if (mod && mod.__esModule) return mod;
|
|
20
|
+
var result = {};
|
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
+
__setModuleDefault(result, mod);
|
|
23
|
+
return result;
|
|
24
|
+
};
|
|
25
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
+
exports.openDb = void 0;
|
|
27
|
+
require("reflect-metadata");
|
|
28
|
+
const typeorm_1 = require("typeorm");
|
|
29
|
+
const entities = __importStar(require("./entities"));
|
|
30
|
+
const openDb = async (dbPath) => {
|
|
31
|
+
const source = new typeorm_1.DataSource({
|
|
32
|
+
type: 'sqlite',
|
|
33
|
+
database: dbPath,
|
|
34
|
+
entities: Object.values(entities),
|
|
35
|
+
synchronize: true,
|
|
36
|
+
logging: false,
|
|
37
|
+
});
|
|
38
|
+
await source.initialize();
|
|
39
|
+
return source;
|
|
40
|
+
};
|
|
41
|
+
exports.openDb = openDb;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { HexString } from '@polkadot/util/types';
|
|
2
|
+
import { run_task } from '@acala-network/chopsticks-executor';
|
|
3
|
+
export type RuntimeVersion = {
|
|
4
|
+
specName: string;
|
|
5
|
+
implName: string;
|
|
6
|
+
authoringVersion: number;
|
|
7
|
+
specVersion: number;
|
|
8
|
+
implVersion: number;
|
|
9
|
+
apis: [HexString, number][];
|
|
10
|
+
transactionVersion: number;
|
|
11
|
+
stateVersion: number;
|
|
12
|
+
};
|
|
13
|
+
export declare const getRuntimeVersion: (code: HexString) => Promise<RuntimeVersion>;
|
|
14
|
+
export declare const getMetadata: (code: HexString) => Promise<HexString>;
|
|
15
|
+
export declare const calculateStateRoot: (entries: [HexString, HexString][]) => Promise<HexString>;
|
|
16
|
+
export { run_task as runTask };
|
package/dist/executor.js
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.runTask = exports.calculateStateRoot = exports.getMetadata = exports.getRuntimeVersion = void 0;
|
|
4
|
+
const ws_1 = require("ws");
|
|
5
|
+
const util_1 = require("@polkadot/util");
|
|
6
|
+
global.WebSocket = ws_1.WebSocket;
|
|
7
|
+
const chopsticks_executor_1 = require("@acala-network/chopsticks-executor");
|
|
8
|
+
Object.defineProperty(exports, "runTask", { enumerable: true, get: function () { return chopsticks_executor_1.run_task; } });
|
|
9
|
+
const utils_1 = require("./utils");
|
|
10
|
+
const getRuntimeVersion = async (code) => {
|
|
11
|
+
return (0, chopsticks_executor_1.get_runtime_version)(code).then((version) => {
|
|
12
|
+
version.specName = (0, util_1.hexToString)(version.specName);
|
|
13
|
+
version.implName = (0, util_1.hexToString)(version.implName);
|
|
14
|
+
return version;
|
|
15
|
+
});
|
|
16
|
+
};
|
|
17
|
+
exports.getRuntimeVersion = getRuntimeVersion;
|
|
18
|
+
const getMetadata = async (code) => {
|
|
19
|
+
return (0, utils_1.compactHex)((0, util_1.hexToU8a)(await (0, chopsticks_executor_1.get_metadata)(code)));
|
|
20
|
+
};
|
|
21
|
+
exports.getMetadata = getMetadata;
|
|
22
|
+
const calculateStateRoot = async (entries) => {
|
|
23
|
+
return (0, chopsticks_executor_1.calculate_state_root)(entries);
|
|
24
|
+
};
|
|
25
|
+
exports.calculateStateRoot = calculateStateRoot;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const metadata_1 = require("@polkadot/types/metadata");
|
|
7
|
+
const types_1 = require("@polkadot/types");
|
|
8
|
+
const executor_1 = require("./executor");
|
|
9
|
+
const decorate_1 = require("@polkadot/types/metadata/decorate");
|
|
10
|
+
const vitest_1 = require("vitest");
|
|
11
|
+
const node_fs_1 = require("node:fs");
|
|
12
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
13
|
+
(0, vitest_1.describe)('wasm', () => {
|
|
14
|
+
(0, vitest_1.it)('get metadata from wasm runtime', async () => {
|
|
15
|
+
const code = String((0, node_fs_1.readFileSync)(node_path_1.default.join(__dirname, 'runtime.example'))).trim();
|
|
16
|
+
(0, vitest_1.expect)(code.length).toBeGreaterThan(2);
|
|
17
|
+
const registry = new types_1.TypeRegistry();
|
|
18
|
+
const metadata = new metadata_1.Metadata(registry, await (0, executor_1.getMetadata)(code));
|
|
19
|
+
registry.setMetadata(metadata);
|
|
20
|
+
(0, vitest_1.expect)(registry.metadata.pallets.length).toBeGreaterThan(0);
|
|
21
|
+
const storage = (0, decorate_1.decorateStorage)(registry, metadata.asLatest, metadata.version);
|
|
22
|
+
(0, vitest_1.expect)(storage.system).toBeTruthy;
|
|
23
|
+
});
|
|
24
|
+
(0, vitest_1.it)('get runtime version from wasm runtime', async () => {
|
|
25
|
+
const code = String((0, node_fs_1.readFileSync)(node_path_1.default.join(__dirname, 'runtime.example'))).trim();
|
|
26
|
+
(0, vitest_1.expect)(code.length).toBeGreaterThan(2);
|
|
27
|
+
const expectedRuntimeVersion = {
|
|
28
|
+
specName: 'acala',
|
|
29
|
+
implName: 'acala',
|
|
30
|
+
authoringVersion: 1,
|
|
31
|
+
specVersion: 2000,
|
|
32
|
+
implVersion: 0,
|
|
33
|
+
apis: [
|
|
34
|
+
['0xdf6acb689907609b', 3],
|
|
35
|
+
['0x37e397fc7c91f5e4', 1],
|
|
36
|
+
['0x40fe3ad401f8959a', 5],
|
|
37
|
+
['0xd2bc9897eed08f15', 3],
|
|
38
|
+
['0xf78b278be53f454c', 2],
|
|
39
|
+
['0xdd718d5cc53262d4', 1],
|
|
40
|
+
['0xab3c0572291feb8b', 1],
|
|
41
|
+
['0xbc9d89904f5b923f', 1],
|
|
42
|
+
['0x37c8bb1350a9a2a8', 1],
|
|
43
|
+
['0x6ef953004ba30e59', 1],
|
|
44
|
+
['0xf485c9145d3f0aad', 1],
|
|
45
|
+
['0xe3df3f2aa8a5cc57', 1],
|
|
46
|
+
['0xea93e3f16f3d6962', 1],
|
|
47
|
+
],
|
|
48
|
+
transactionVersion: 1,
|
|
49
|
+
stateVersion: 0,
|
|
50
|
+
};
|
|
51
|
+
(0, vitest_1.expect)(await (0, executor_1.getRuntimeVersion)(code)).toMatchObject(expectedRuntimeVersion);
|
|
52
|
+
});
|
|
53
|
+
(0, vitest_1.it)('calculate state root', async () => {
|
|
54
|
+
const a = await (0, executor_1.calculateStateRoot)([['0x5301bf5ff0298f5c7b93a446709f8e885f772afdd0d8ba3d4d559a06f0742f12', '0x01']]);
|
|
55
|
+
const b = await (0, executor_1.calculateStateRoot)([['0x5301bf5ff0298f5c7b93a446709f8e885f772afdd0d8ba3d4d559a06f0742f12', '0x02']]);
|
|
56
|
+
(0, vitest_1.expect)(a).to.not.eq(b);
|
|
57
|
+
});
|
|
58
|
+
});
|