@acala-network/chopsticks 0.5.3 → 0.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/lib/api.js +1 -5
  2. package/lib/blockchain/block-builder.js +36 -43
  3. package/lib/blockchain/block.js +24 -28
  4. package/lib/blockchain/head-state.js +9 -14
  5. package/lib/blockchain/index.js +24 -28
  6. package/lib/blockchain/inherent/index.js +11 -20
  7. package/lib/blockchain/inherent/para-enter.js +3 -7
  8. package/lib/blockchain/inherent/parachain/babe-randomness.js +3 -7
  9. package/lib/blockchain/inherent/parachain/nimbus-author-inherent.js +3 -7
  10. package/lib/blockchain/inherent/parachain/validation-data.js +26 -33
  11. package/lib/blockchain/storage-layer.js +8 -16
  12. package/lib/blockchain/txpool.js +13 -20
  13. package/lib/cli.js +30 -35
  14. package/lib/db/entities.js +6 -9
  15. package/lib/db/index.js +5 -32
  16. package/lib/dry-run-preimage.js +23 -27
  17. package/lib/dry-run.js +12 -16
  18. package/lib/executor.js +24 -36
  19. package/lib/genesis-provider.js +17 -24
  20. package/lib/index.js +7 -31
  21. package/lib/logger.js +3 -10
  22. package/lib/rpc/dev/dry-run.js +21 -28
  23. package/lib/rpc/dev/index.js +14 -16
  24. package/lib/rpc/index.js +9 -16
  25. package/lib/rpc/shared.js +3 -7
  26. package/lib/rpc/substrate/author.js +8 -10
  27. package/lib/rpc/substrate/chain.js +5 -7
  28. package/lib/rpc/substrate/index.js +11 -16
  29. package/lib/rpc/substrate/payment.js +7 -9
  30. package/lib/rpc/substrate/state.js +5 -7
  31. package/lib/rpc/substrate/system.js +6 -11
  32. package/lib/run-block.js +12 -16
  33. package/lib/schema/index.js +22 -25
  34. package/lib/server.js +8 -35
  35. package/lib/setup-with-server.js +8 -12
  36. package/lib/setup.js +26 -30
  37. package/lib/utils/decoder.js +16 -25
  38. package/lib/utils/generate-html-diff.js +12 -20
  39. package/lib/utils/import-storage.js +11 -19
  40. package/lib/utils/index.js +10 -19
  41. package/lib/utils/open-html.js +3 -7
  42. package/lib/utils/proof.js +9 -17
  43. package/lib/utils/set-storage.js +10 -14
  44. package/lib/utils/time-travel.js +21 -28
  45. package/lib/xcm/downward.js +11 -15
  46. package/lib/xcm/horizontal.js +7 -11
  47. package/lib/xcm/index.js +14 -19
  48. package/lib/xcm/upward.js +6 -10
  49. package/package.json +2 -2
package/lib/api.js CHANGED
@@ -1,7 +1,4 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.Api = void 0;
4
- class Api {
1
+ export class Api {
5
2
  #provider;
6
3
  #ready;
7
4
  #chain;
@@ -76,4 +73,3 @@ class Api {
76
73
  return this.#provider.send('state_getKeysPaged', [prefix, pageSize, startKey]);
77
74
  }
78
75
  }
79
- exports.Api = Api;
@@ -1,13 +1,10 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.dryRunInherents = exports.dryRunExtrinsic = exports.buildBlock = exports.newHeader = void 0;
4
- const block_1 = require("./block");
5
- const util_crypto_1 = require("@polkadot/util-crypto");
6
- const util_1 = require("@polkadot/util");
7
- const utils_1 = require("../utils");
8
- const logger_1 = require("../logger");
9
- const time_travel_1 = require("../utils/time-travel");
10
- const logger = logger_1.defaultLogger.child({ name: 'block-builder' });
1
+ import { Block } from './block';
2
+ import { blake2AsHex } from '@polkadot/util-crypto';
3
+ import { compactAddLength, hexToU8a, stringToHex } from '@polkadot/util';
4
+ import { compactHex } from '../utils';
5
+ import { defaultLogger, truncate } from '../logger';
6
+ import { getCurrentSlot } from '../utils/time-travel';
7
+ const logger = defaultLogger.child({ name: 'block-builder' });
11
8
  const getConsensus = (header) => {
12
9
  if (header.digest.logs.length === 0)
13
10
  return;
@@ -42,24 +39,24 @@ const getNewSlot = (digest, slotNumber) => {
42
39
  }
43
40
  return digest.toJSON();
44
41
  };
45
- const newHeader = async (head) => {
42
+ export const newHeader = async (head) => {
46
43
  const meta = await head.meta;
47
44
  const parentHeader = await head.header;
48
45
  let newLogs = parentHeader.digest.logs;
49
46
  const consensus = getConsensus(parentHeader);
50
47
  if (consensus?.consensusEngine.isAura) {
51
- const slot = await (0, time_travel_1.getCurrentSlot)(head.chain);
52
- const newSlot = (0, util_1.compactAddLength)(meta.registry.createType('Slot', slot + 1).toU8a());
48
+ const slot = await getCurrentSlot(head.chain);
49
+ const newSlot = compactAddLength(meta.registry.createType('Slot', slot + 1).toU8a());
53
50
  newLogs = [{ PreRuntime: [consensus.consensusEngine, newSlot] }, ...consensus.rest];
54
51
  }
55
52
  else if (consensus?.consensusEngine.isBabe) {
56
- const slot = await (0, time_travel_1.getCurrentSlot)(head.chain);
53
+ const slot = await getCurrentSlot(head.chain);
57
54
  const digest = meta.registry.createType('RawBabePreDigest', consensus.slot);
58
- const newSlot = (0, util_1.compactAddLength)(meta.registry.createType('RawBabePreDigest', getNewSlot(digest, slot + 1)).toU8a());
55
+ const newSlot = compactAddLength(meta.registry.createType('RawBabePreDigest', getNewSlot(digest, slot + 1)).toU8a());
59
56
  newLogs = [{ PreRuntime: [consensus.consensusEngine, newSlot] }, ...consensus.rest];
60
57
  }
61
58
  else if (consensus?.consensusEngine?.toString() == 'nmbs') {
62
- const nmbsKey = (0, util_1.stringToHex)('nmbs');
59
+ const nmbsKey = stringToHex('nmbs');
63
60
  newLogs = [
64
61
  {
65
62
  // Using previous block author
@@ -75,7 +72,7 @@ const newHeader = async (head) => {
75
72
  if (meta.query.randomness) {
76
73
  // TODO: shouldn't modify existing head
77
74
  // reset notFirstBlock so randomness will skip validation
78
- head.pushStorageLayer().set((0, utils_1.compactHex)(meta.query.randomness.notFirstBlock()), "Deleted" /* StorageValueKind.Deleted */);
75
+ head.pushStorageLayer().set(compactHex(meta.query.randomness.notFirstBlock()), "Deleted" /* StorageValueKind.Deleted */);
79
76
  }
80
77
  }
81
78
  const header = meta.registry.createType('Header', {
@@ -89,18 +86,17 @@ const newHeader = async (head) => {
89
86
  });
90
87
  return header;
91
88
  };
92
- exports.newHeader = newHeader;
93
89
  const initNewBlock = async (head, header, inherents) => {
94
90
  const blockNumber = header.number.toNumber();
95
91
  const hash = `0x${Math.round(Math.random() * 100000000)
96
92
  .toString(16)
97
93
  .padEnd(64, '0')}`;
98
- const newBlock = new block_1.Block(head.chain, blockNumber, hash, head, { header, extrinsics: [], storage: head.storage });
94
+ const newBlock = new Block(head.chain, blockNumber, hash, head, { header, extrinsics: [], storage: head.storage });
99
95
  {
100
96
  // initialize block
101
97
  const { storageDiff } = await newBlock.call('Core_initialize_block', [header.toHex()]);
102
98
  newBlock.pushStorageLayer().setAll(storageDiff);
103
- logger.trace((0, logger_1.truncate)(storageDiff), 'Initialize block');
99
+ logger.trace(truncate(storageDiff), 'Initialize block');
104
100
  }
105
101
  const layers = [];
106
102
  // apply inherents
@@ -110,7 +106,7 @@ const initNewBlock = async (head, header, inherents) => {
110
106
  const layer = newBlock.pushStorageLayer();
111
107
  layer.setAll(storageDiff);
112
108
  layers.push(layer);
113
- logger.trace((0, logger_1.truncate)(storageDiff), 'Applied inherent');
109
+ logger.trace(truncate(storageDiff), 'Applied inherent');
114
110
  }
115
111
  catch (e) {
116
112
  logger.warn('Failed to apply inherents %o %s', e, e);
@@ -122,9 +118,9 @@ const initNewBlock = async (head, header, inherents) => {
122
118
  layers: layers,
123
119
  };
124
120
  };
125
- const buildBlock = async (head, inherents, extrinsics, ump, onApplyExtrinsicError) => {
121
+ export const buildBlock = async (head, inherents, extrinsics, ump, onApplyExtrinsicError) => {
126
122
  const registry = await head.registry;
127
- const header = await (0, exports.newHeader)(head);
123
+ const header = await newHeader(head);
128
124
  const { block: newBlock } = await initNewBlock(head, header, inherents);
129
125
  logger.info({
130
126
  number: newBlock.number,
@@ -145,8 +141,8 @@ const buildBlock = async (head, inherents, extrinsics, ump, onApplyExtrinsicErro
145
141
  const messages = meta.registry.createType('Vec<Bytes>', upwardMessages);
146
142
  // TODO: make sure we append instead of replace
147
143
  layer.setAll([
148
- [(0, utils_1.compactHex)(meta.query.ump.relayDispatchQueues(paraId)), messages.toHex()],
149
- [(0, utils_1.compactHex)(meta.query.ump.relayDispatchQueueSize(paraId)), queueSize.toHex()],
144
+ [compactHex(meta.query.ump.relayDispatchQueues(paraId)), messages.toHex()],
145
+ [compactHex(meta.query.ump.relayDispatchQueueSize(paraId)), queueSize.toHex()],
150
146
  ]);
151
147
  }
152
148
  logger.trace({
@@ -155,7 +151,7 @@ const buildBlock = async (head, inherents, extrinsics, ump, onApplyExtrinsicErro
155
151
  ump,
156
152
  }, 'Upward messages');
157
153
  const needsDispatch = meta.registry.createType('Vec<u32>', Object.keys(ump));
158
- layer.set((0, utils_1.compactHex)(meta.query.ump.needsDispatch()), needsDispatch.toHex());
154
+ layer.set(compactHex(meta.query.ump.needsDispatch()), needsDispatch.toHex());
159
155
  }
160
156
  // apply extrinsics
161
157
  for (const extrinsic of extrinsics) {
@@ -167,7 +163,7 @@ const buildBlock = async (head, inherents, extrinsics, ump, onApplyExtrinsicErro
167
163
  continue;
168
164
  }
169
165
  newBlock.pushStorageLayer().setAll(storageDiff);
170
- logger.trace((0, logger_1.truncate)(storageDiff), 'Applied extrinsic');
166
+ logger.trace(truncate(storageDiff), 'Applied extrinsic');
171
167
  includedExtrinsic.push(extrinsic);
172
168
  }
173
169
  catch (e) {
@@ -179,15 +175,15 @@ const buildBlock = async (head, inherents, extrinsics, ump, onApplyExtrinsicErro
179
175
  // finalize block
180
176
  const { storageDiff } = await newBlock.call('BlockBuilder_finalize_block', []);
181
177
  newBlock.pushStorageLayer().setAll(storageDiff);
182
- logger.trace((0, logger_1.truncate)(storageDiff), 'Finalize block');
178
+ logger.trace(truncate(storageDiff), 'Finalize block');
183
179
  }
184
180
  const blockData = registry.createType('Block', {
185
181
  header,
186
182
  extrinsics: includedExtrinsic,
187
183
  });
188
184
  const storageDiff = await newBlock.storageDiff();
189
- logger.trace(Object.entries(storageDiff).map(([key, value]) => [key, (0, logger_1.truncate)(value)]), 'Final block');
190
- const finalBlock = new block_1.Block(head.chain, newBlock.number, blockData.hash.toHex(), head, {
185
+ logger.trace(Object.entries(storageDiff).map(([key, value]) => [key, truncate(value)]), 'Final block');
186
+ const finalBlock = new Block(head.chain, newBlock.number, blockData.hash.toHex(), head, {
191
187
  header,
192
188
  extrinsics: [...inherents, ...includedExtrinsic],
193
189
  storage: head.storage,
@@ -195,26 +191,25 @@ const buildBlock = async (head, inherents, extrinsics, ump, onApplyExtrinsicErro
195
191
  });
196
192
  logger.info({
197
193
  hash: finalBlock.hash,
198
- extrinsics: includedExtrinsic.map((x) => (0, util_crypto_1.blake2AsHex)(x, 256)),
194
+ extrinsics: includedExtrinsic.map((x) => blake2AsHex(x, 256)),
199
195
  pendingExtrinsics: pendingExtrinsics.length,
200
196
  number: newBlock.number,
201
197
  }, `Block built #${newBlock.number.toLocaleString()} hash ${finalBlock.hash}`);
202
198
  return [finalBlock, pendingExtrinsics];
203
199
  };
204
- exports.buildBlock = buildBlock;
205
- const dryRunExtrinsic = async (head, inherents, extrinsic) => {
200
+ export const dryRunExtrinsic = async (head, inherents, extrinsic) => {
206
201
  const registry = await head.registry;
207
- const header = await (0, exports.newHeader)(head);
202
+ const header = await newHeader(head);
208
203
  const { block: newBlock } = await initNewBlock(head, header, inherents);
209
204
  if (typeof extrinsic !== 'string') {
210
205
  if (!head.chain.mockSignatureHost) {
211
206
  throw new Error('Cannot fake signature because mock signature host is not enabled. Start chain with `mockSignatureHost: true`');
212
207
  }
213
208
  const meta = await head.meta;
214
- const call = registry.createType('Call', (0, util_1.hexToU8a)(extrinsic.call));
209
+ const call = registry.createType('Call', hexToU8a(extrinsic.call));
215
210
  const generic = registry.createType('GenericExtrinsic', call);
216
- const accountRaw = await head.get((0, utils_1.compactHex)(meta.query.system.account(extrinsic.address)));
217
- const account = registry.createType('AccountInfo', (0, util_1.hexToU8a)(accountRaw));
211
+ const accountRaw = await head.get(compactHex(meta.query.system.account(extrinsic.address)));
212
+ const account = registry.createType('AccountInfo', hexToU8a(accountRaw));
218
213
  generic.signFake(extrinsic.address, {
219
214
  blockHash: head.hash,
220
215
  genesisHash: head.hash,
@@ -225,15 +220,14 @@ const dryRunExtrinsic = async (head, inherents, extrinsic) => {
225
220
  mockSignature.fill(0xcd);
226
221
  mockSignature.set([0xde, 0xad, 0xbe, 0xef]);
227
222
  generic.signature.set(mockSignature);
228
- logger_1.defaultLogger.info({ call: call.toHuman() }, 'dry_run_call');
223
+ defaultLogger.info({ call: call.toHuman() }, 'dry_run_call');
229
224
  return newBlock.call('BlockBuilder_apply_extrinsic', [generic.toHex()]);
230
225
  }
231
- logger_1.defaultLogger.info({ call: registry.createType('GenericExtrinsic', (0, util_1.hexToU8a)(extrinsic)).toHuman() }, 'dry_run_extrinsic');
226
+ defaultLogger.info({ call: registry.createType('GenericExtrinsic', hexToU8a(extrinsic)).toHuman() }, 'dry_run_extrinsic');
232
227
  return newBlock.call('BlockBuilder_apply_extrinsic', [extrinsic]);
233
228
  };
234
- exports.dryRunExtrinsic = dryRunExtrinsic;
235
- const dryRunInherents = async (head, inherents) => {
236
- const header = await (0, exports.newHeader)(head);
229
+ export const dryRunInherents = async (head, inherents) => {
230
+ const header = await newHeader(head);
237
231
  const { layers } = await initNewBlock(head, header, inherents);
238
232
  const stoarge = {};
239
233
  for (const layer of layers) {
@@ -241,4 +235,3 @@ const dryRunInherents = async (head, inherents) => {
241
235
  }
242
236
  return Object.entries(stoarge);
243
237
  };
244
- exports.dryRunInherents = dryRunInherents;
@@ -1,14 +1,11 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.Block = void 0;
4
- const types_1 = require("@polkadot/types");
5
- const metadata_1 = require("@polkadot/types/metadata");
6
- const util_1 = require("@polkadot/types-known/util");
7
- const util_2 = require("@polkadot/util");
8
- const storage_layer_1 = require("./storage-layer");
9
- const utils_1 = require("../utils");
10
- const executor_1 = require("../executor");
11
- class Block {
1
+ import { Metadata, TypeRegistry } from '@polkadot/types';
2
+ import { expandMetadata } from '@polkadot/types/metadata';
3
+ import { getSpecExtensions, getSpecHasher, getSpecTypes } from '@polkadot/types-known/util';
4
+ import { hexToU8a, objectSpread, stringToHex } from '@polkadot/util';
5
+ import { RemoteStorageLayer, StorageLayer } from './storage-layer';
6
+ import { compactHex } from '../utils';
7
+ import { getRuntimeVersion, runTask, taskHandler } from '../executor';
8
+ export class Block {
12
9
  number;
13
10
  hash;
14
11
  #chain;
@@ -29,12 +26,12 @@ class Block {
29
26
  this.#parentBlock = parentBlock;
30
27
  this.#header = block?.header;
31
28
  this.#extrinsics = block?.extrinsics;
32
- this.#baseStorage = block?.storage ?? new storage_layer_1.RemoteStorageLayer(chain.api, hash, chain.db);
29
+ this.#baseStorage = block?.storage ?? new RemoteStorageLayer(chain.api, hash, chain.db);
33
30
  this.#storages = [];
34
31
  const storageDiff = block?.storageDiff;
35
32
  if (storageDiff) {
36
33
  // if code doesn't change then reuse parent block's meta
37
- if (!storageDiff?.[(0, util_2.stringToHex)(':code')]) {
34
+ if (!storageDiff?.[stringToHex(':code')]) {
38
35
  this.#runtimeVersion = parentBlock?.runtimeVersion;
39
36
  this.#metadata = parentBlock?.metadata;
40
37
  this.#registry = parentBlock?.registry;
@@ -80,7 +77,7 @@ class Block {
80
77
  }
81
78
  }
82
79
  async getKeysPaged(options) {
83
- const layer = new storage_layer_1.StorageLayer(this.storage);
80
+ const layer = new StorageLayer(this.storage);
84
81
  await layer.fold();
85
82
  const prefix = options.prefix ?? '0x';
86
83
  const startKey = options.startKey ?? prefix;
@@ -88,7 +85,7 @@ class Block {
88
85
  return layer.getKeysPaged(prefix, pageSize, startKey);
89
86
  }
90
87
  pushStorageLayer() {
91
- const layer = new storage_layer_1.StorageLayer(this.storage);
88
+ const layer = new StorageLayer(this.storage);
92
89
  this.#storages.push(layer);
93
90
  return layer;
94
91
  }
@@ -105,7 +102,7 @@ class Block {
105
102
  get wasm() {
106
103
  if (!this.#wasm) {
107
104
  this.#wasm = (async () => {
108
- const wasmKey = (0, util_2.stringToHex)(':code');
105
+ const wasmKey = stringToHex(':code');
109
106
  const wasm = await this.get(wasmKey);
110
107
  if (!wasm) {
111
108
  throw new Error('No wasm found');
@@ -116,7 +113,7 @@ class Block {
116
113
  return this.#wasm;
117
114
  }
118
115
  setWasm(wasm) {
119
- const wasmKey = (0, util_2.stringToHex)(':code');
116
+ const wasmKey = stringToHex(':code');
120
117
  this.pushStorageLayer().set(wasmKey, wasm);
121
118
  this.#wasm = Promise.resolve(wasm);
122
119
  this.#runtimeVersion = undefined;
@@ -132,12 +129,12 @@ class Block {
132
129
  this.#chain.api.chain,
133
130
  this.runtimeVersion,
134
131
  ]).then(([data, properties, chain, version]) => {
135
- const registry = new types_1.TypeRegistry(this.hash);
132
+ const registry = new TypeRegistry(this.hash);
136
133
  registry.setKnownTypes(this.chain.registeredTypes);
137
134
  registry.setChainProperties(registry.createType('ChainProperties', properties));
138
- registry.register((0, util_1.getSpecTypes)(registry, chain, version.specName, version.specVersion));
139
- registry.setHasher((0, util_1.getSpecHasher)(registry, chain, version.specName));
140
- registry.setMetadata(new types_1.Metadata(registry, data), undefined, (0, util_2.objectSpread)({}, (0, util_1.getSpecExtensions)(registry, chain, version.specName), this.#chain.api.signedExtensions));
135
+ registry.register(getSpecTypes(registry, chain, version.specName, version.specVersion));
136
+ registry.setHasher(getSpecHasher(registry, chain, version.specName));
137
+ registry.setMetadata(new Metadata(registry, data), undefined, objectSpread({}, getSpecExtensions(registry, chain, version.specName), this.#chain.api.signedExtensions));
141
138
  return registry;
142
139
  });
143
140
  }
@@ -145,34 +142,34 @@ class Block {
145
142
  }
146
143
  get runtimeVersion() {
147
144
  if (!this.#runtimeVersion) {
148
- this.#runtimeVersion = this.wasm.then(executor_1.getRuntimeVersion);
145
+ this.#runtimeVersion = this.wasm.then(getRuntimeVersion);
149
146
  }
150
147
  return this.#runtimeVersion;
151
148
  }
152
149
  get metadata() {
153
150
  if (!this.#metadata) {
154
- this.#metadata = this.call('Metadata_metadata', []).then((resp) => (0, utils_1.compactHex)((0, util_2.hexToU8a)(resp.result)));
151
+ this.#metadata = this.call('Metadata_metadata', []).then((resp) => compactHex(hexToU8a(resp.result)));
155
152
  }
156
153
  return this.#metadata;
157
154
  }
158
155
  get meta() {
159
156
  if (!this.#meta) {
160
157
  this.#meta = Promise.all([this.registry, this.metadata]).then(([registry, metadataStr]) => {
161
- const metadata = new types_1.Metadata(registry, metadataStr);
162
- return (0, metadata_1.expandMetadata)(registry, metadata);
158
+ const metadata = new Metadata(registry, metadataStr);
159
+ return expandMetadata(registry, metadata);
163
160
  });
164
161
  }
165
162
  return this.#meta;
166
163
  }
167
164
  async call(method, args, storage = []) {
168
165
  const wasm = await this.wasm;
169
- const response = await (0, executor_1.runTask)({
166
+ const response = await runTask({
170
167
  wasm,
171
168
  calls: [[method, args]],
172
169
  storage,
173
170
  mockSignatureHost: this.#chain.mockSignatureHost,
174
171
  allowUnresolvedImports: this.#chain.allowUnresolvedImports,
175
- }, (0, executor_1.taskHandler)(this));
172
+ }, taskHandler(this));
176
173
  if (response.Call)
177
174
  return response.Call;
178
175
  if (response.Error)
@@ -180,4 +177,3 @@ class Block {
180
177
  throw Error('Unexpected response');
181
178
  }
182
179
  }
183
- exports.Block = Block;
@@ -1,12 +1,8 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.HeadState = exports.randomId = void 0;
4
- const util_1 = require("@polkadot/util");
5
- const logger_1 = require("../logger");
6
- const randomId = () => Math.random().toString(36).substring(2);
7
- exports.randomId = randomId;
8
- const logger = logger_1.defaultLogger.child({ name: 'head-state' });
9
- class HeadState {
1
+ import { stringToHex } from '@polkadot/util';
2
+ import { defaultLogger } from '../logger';
3
+ export const randomId = () => Math.random().toString(36).substring(2);
4
+ const logger = defaultLogger.child({ name: 'head-state' });
5
+ export class HeadState {
10
6
  #headListeners = {};
11
7
  #storageListeners = {};
12
8
  #oldValues = {};
@@ -15,7 +11,7 @@ class HeadState {
15
11
  this.#head = head;
16
12
  }
17
13
  subscribeHead(cb) {
18
- const id = (0, exports.randomId)();
14
+ const id = randomId();
19
15
  this.#headListeners[id] = cb;
20
16
  return id;
21
17
  }
@@ -23,7 +19,7 @@ class HeadState {
23
19
  delete this.#headListeners[id];
24
20
  }
25
21
  async subscribeStorage(keys, cb) {
26
- const id = (0, exports.randomId)();
22
+ const id = randomId();
27
23
  this.#storageListeners[id] = [keys, cb];
28
24
  for (const key of keys) {
29
25
  this.#oldValues[key] = await this.#head.get(key);
@@ -34,8 +30,8 @@ class HeadState {
34
30
  delete this.#storageListeners[id];
35
31
  }
36
32
  async subscrubeRuntimeVersion(cb) {
37
- const id = (0, exports.randomId)();
38
- const codeKey = (0, util_1.stringToHex)(':code');
33
+ const id = randomId();
34
+ const codeKey = stringToHex(':code');
39
35
  this.#storageListeners[id] = [[codeKey], cb];
40
36
  this.#oldValues[codeKey] = await this.#head.get(codeKey);
41
37
  return id;
@@ -68,4 +64,3 @@ class HeadState {
68
64
  Object.assign(this.#oldValues, diff);
69
65
  }
70
66
  }
71
- exports.HeadState = HeadState;
@@ -1,16 +1,13 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.Blockchain = void 0;
4
- const util_crypto_1 = require("@polkadot/util-crypto");
5
- const util_1 = require("@polkadot/util");
6
- const block_1 = require("./block");
7
- const txpool_1 = require("./txpool");
8
- const head_state_1 = require("./head-state");
9
- const utils_1 = require("../utils");
10
- const logger_1 = require("../logger");
11
- const block_builder_1 = require("./block-builder");
12
- const logger = logger_1.defaultLogger.child({ name: 'blockchain' });
13
- class Blockchain {
1
+ import { blake2AsHex } from '@polkadot/util-crypto';
2
+ import { u8aConcat, u8aToHex } from '@polkadot/util';
3
+ import { Block } from './block';
4
+ import { TxPool } from './txpool';
5
+ import { HeadState } from './head-state';
6
+ import { compactHex } from '../utils';
7
+ import { defaultLogger } from '../logger';
8
+ import { dryRunExtrinsic, dryRunInherents } from './block-builder';
9
+ const logger = defaultLogger.child({ name: 'blockchain' });
10
+ export class Blockchain {
14
11
  uid = Math.random().toString(36).substring(2);
15
12
  api;
16
13
  db;
@@ -30,11 +27,11 @@ class Blockchain {
30
27
  this.mockSignatureHost = mockSignatureHost;
31
28
  this.allowUnresolvedImports = allowUnresolvedImports;
32
29
  this.registeredTypes = registeredTypes;
33
- this.#head = new block_1.Block(this, header.number, header.hash);
30
+ this.#head = new Block(this, header.number, header.hash);
34
31
  this.#registerBlock(this.#head);
35
- this.#txpool = new txpool_1.TxPool(this, inherentProvider, buildBlockMode);
32
+ this.#txpool = new TxPool(this, inherentProvider, buildBlockMode);
36
33
  this.#inherentProvider = inherentProvider;
37
- this.headState = new head_state_1.HeadState(this.#head);
34
+ this.headState = new HeadState(this.#head);
38
35
  }
39
36
  #registerBlock(block) {
40
37
  this.#blocksByNumber[block.number] = block;
@@ -55,7 +52,7 @@ class Blockchain {
55
52
  }
56
53
  if (!this.#blocksByNumber[number]) {
57
54
  const hash = await this.api.getBlockHash(number);
58
- const block = new block_1.Block(this, number, hash);
55
+ const block = new Block(this, number, hash);
59
56
  this.#registerBlock(block);
60
57
  }
61
58
  return this.#blocksByNumber[number];
@@ -74,7 +71,7 @@ class Blockchain {
74
71
  const loadingBlock = (async () => {
75
72
  try {
76
73
  const header = await this.api.getHeader(hash);
77
- const block = new block_1.Block(this, Number(header.number), hash);
74
+ const block = new Block(this, Number(header.number), hash);
78
75
  this.#registerBlock(block);
79
76
  }
80
77
  catch (e) {
@@ -108,13 +105,13 @@ class Blockchain {
108
105
  }
109
106
  async submitExtrinsic(extrinsic) {
110
107
  const source = '0x02'; // External
111
- const args = (0, util_1.u8aToHex)((0, util_1.u8aConcat)(source, extrinsic, this.head.hash));
108
+ const args = u8aToHex(u8aConcat(source, extrinsic, this.head.hash));
112
109
  const res = await this.head.call('TaggedTransactionQueue_validate_transaction', [args]);
113
110
  const registry = await this.head.registry;
114
111
  const validity = registry.createType('TransactionValidity', res.result);
115
112
  if (validity.isOk) {
116
113
  await this.#txpool.submitExtrinsic(extrinsic);
117
- return (0, util_crypto_1.blake2AsHex)(extrinsic, 256);
114
+ return blake2AsHex(extrinsic, 256);
118
115
  }
119
116
  throw validity.asErr;
120
117
  }
@@ -154,7 +151,7 @@ class Blockchain {
154
151
  upwardMessages: [],
155
152
  horizontalMessages: {},
156
153
  });
157
- const { result, storageDiff } = await (0, block_builder_1.dryRunExtrinsic)(head, inherents, extrinsic);
154
+ const { result, storageDiff } = await dryRunExtrinsic(head, inherents, extrinsic);
158
155
  const outcome = registry.createType('ApplyExtrinsicResult', result);
159
156
  return { outcome, storageDiff };
160
157
  }
@@ -170,7 +167,7 @@ class Blockchain {
170
167
  upwardMessages: [],
171
168
  horizontalMessages: hrmp,
172
169
  });
173
- return (0, block_builder_1.dryRunInherents)(head, inherents);
170
+ return dryRunInherents(head, inherents);
174
171
  }
175
172
  async dryRunDmp(dmp, at) {
176
173
  await this.api.isReady;
@@ -184,7 +181,7 @@ class Blockchain {
184
181
  upwardMessages: [],
185
182
  horizontalMessages: {},
186
183
  });
187
- return (0, block_builder_1.dryRunInherents)(head, inherents);
184
+ return dryRunInherents(head, inherents);
188
185
  }
189
186
  async dryRunUmp(ump, at) {
190
187
  await this.api.isReady;
@@ -195,7 +192,7 @@ class Blockchain {
195
192
  const meta = await head.meta;
196
193
  const needsDispatch = meta.registry.createType('Vec<u32>', Object.keys(ump));
197
194
  const stroageValues = [
198
- [(0, utils_1.compactHex)(meta.query.ump.needsDispatch()), needsDispatch.toHex()],
195
+ [compactHex(meta.query.ump.needsDispatch()), needsDispatch.toHex()],
199
196
  ];
200
197
  for (const [paraId, messages] of Object.entries(ump)) {
201
198
  const upwardMessages = meta.registry.createType('Vec<Bytes>', messages);
@@ -205,8 +202,8 @@ class Blockchain {
205
202
  upwardMessages.length,
206
203
  upwardMessages.map((x) => x.byteLength).reduce((s, i) => s + i, 0),
207
204
  ]);
208
- stroageValues.push([(0, utils_1.compactHex)(meta.query.ump.relayDispatchQueues(paraId)), upwardMessages.toHex()]);
209
- stroageValues.push([(0, utils_1.compactHex)(meta.query.ump.relayDispatchQueueSize(paraId)), queueSize.toHex()]);
205
+ stroageValues.push([compactHex(meta.query.ump.relayDispatchQueues(paraId)), upwardMessages.toHex()]);
206
+ stroageValues.push([compactHex(meta.query.ump.relayDispatchQueueSize(paraId)), queueSize.toHex()]);
210
207
  }
211
208
  head.pushStorageLayer().setAll(stroageValues);
212
209
  const inherents = await this.#inherentProvider.createInherents(head, {
@@ -215,7 +212,7 @@ class Blockchain {
215
212
  upwardMessages: [],
216
213
  horizontalMessages: {},
217
214
  });
218
- return (0, block_builder_1.dryRunInherents)(head, inherents);
215
+ return dryRunInherents(head, inherents);
219
216
  }
220
217
  async getInherents() {
221
218
  await this.api.isReady;
@@ -228,4 +225,3 @@ class Blockchain {
228
225
  return inherents;
229
226
  }
230
227
  }
231
- exports.Blockchain = Blockchain;
@@ -1,26 +1,18 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.InherentProviders = exports.SetTimestamp = exports.SetNimbusAuthorInherent = exports.SetBabeRandomness = exports.ParaInherentEnter = exports.SetValidationData = void 0;
4
- const types_1 = require("@polkadot/types");
5
- const time_travel_1 = require("../../utils/time-travel");
6
- var validation_data_1 = require("./parachain/validation-data");
7
- Object.defineProperty(exports, "SetValidationData", { enumerable: true, get: function () { return validation_data_1.SetValidationData; } });
8
- var para_enter_1 = require("./para-enter");
9
- Object.defineProperty(exports, "ParaInherentEnter", { enumerable: true, get: function () { return para_enter_1.ParaInherentEnter; } });
10
- var babe_randomness_1 = require("./parachain/babe-randomness");
11
- Object.defineProperty(exports, "SetBabeRandomness", { enumerable: true, get: function () { return babe_randomness_1.SetBabeRandomness; } });
12
- var nimbus_author_inherent_1 = require("./parachain/nimbus-author-inherent");
13
- Object.defineProperty(exports, "SetNimbusAuthorInherent", { enumerable: true, get: function () { return nimbus_author_inherent_1.SetNimbusAuthorInherent; } });
14
- class SetTimestamp {
1
+ import { GenericExtrinsic } from '@polkadot/types';
2
+ import { getCurrentTimestamp, getSlotDuration } from '../../utils/time-travel';
3
+ export { SetValidationData } from './parachain/validation-data';
4
+ export { ParaInherentEnter } from './para-enter';
5
+ export { SetBabeRandomness } from './parachain/babe-randomness';
6
+ export { SetNimbusAuthorInherent } from './parachain/nimbus-author-inherent';
7
+ export class SetTimestamp {
15
8
  async createInherents(parent) {
16
9
  const meta = await parent.meta;
17
- const slotDuration = await (0, time_travel_1.getSlotDuration)(parent.chain);
18
- const currentTimestamp = await (0, time_travel_1.getCurrentTimestamp)(parent.chain);
19
- return [new types_1.GenericExtrinsic(meta.registry, meta.tx.timestamp.set(currentTimestamp + slotDuration)).toHex()];
10
+ const slotDuration = await getSlotDuration(parent.chain);
11
+ const currentTimestamp = await getCurrentTimestamp(parent.chain);
12
+ return [new GenericExtrinsic(meta.registry, meta.tx.timestamp.set(currentTimestamp + slotDuration)).toHex()];
20
13
  }
21
14
  }
22
- exports.SetTimestamp = SetTimestamp;
23
- class InherentProviders {
15
+ export class InherentProviders {
24
16
  #base;
25
17
  #providers;
26
18
  constructor(base, providers) {
@@ -33,4 +25,3 @@ class InherentProviders {
33
25
  return [...base, ...extra.flat()];
34
26
  }
35
27
  }
36
- exports.InherentProviders = InherentProviders;
@@ -1,8 +1,5 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ParaInherentEnter = void 0;
4
- const types_1 = require("@polkadot/types");
5
- class ParaInherentEnter {
1
+ import { GenericExtrinsic } from '@polkadot/types';
2
+ export class ParaInherentEnter {
6
3
  async createInherents(parent, _params) {
7
4
  const meta = await parent.meta;
8
5
  if (!meta.tx.paraInherent?.enter) {
@@ -27,7 +24,6 @@ class ParaInherentEnter {
27
24
  parentHeader,
28
25
  };
29
26
  // TODO: fill with data
30
- return [new types_1.GenericExtrinsic(meta.registry, meta.tx.paraInherent.enter(newData)).toHex()];
27
+ return [new GenericExtrinsic(meta.registry, meta.tx.paraInherent.enter(newData)).toHex()];
31
28
  }
32
29
  }
33
- exports.ParaInherentEnter = ParaInherentEnter;
@@ -1,15 +1,11 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.SetBabeRandomness = void 0;
4
- const types_1 = require("@polkadot/types");
1
+ import { GenericExtrinsic } from '@polkadot/types';
5
2
  // Support for Moonbeam pallet-randomness mandatory inherent
6
- class SetBabeRandomness {
3
+ export class SetBabeRandomness {
7
4
  async createInherents(parent, _params) {
8
5
  const meta = await parent.meta;
9
6
  if (!meta.tx.randomness?.setBabeRandomnessResults) {
10
7
  return [];
11
8
  }
12
- return [new types_1.GenericExtrinsic(meta.registry, meta.tx.randomness.setBabeRandomnessResults()).toHex()];
9
+ return [new GenericExtrinsic(meta.registry, meta.tx.randomness.setBabeRandomnessResults()).toHex()];
13
10
  }
14
11
  }
15
- exports.SetBabeRandomness = SetBabeRandomness;
@@ -1,15 +1,11 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.SetNimbusAuthorInherent = void 0;
4
- const types_1 = require("@polkadot/types");
1
+ import { GenericExtrinsic } from '@polkadot/types';
5
2
  // Support for Nimbus Author Inherent
6
- class SetNimbusAuthorInherent {
3
+ export class SetNimbusAuthorInherent {
7
4
  async createInherents(parent, _params) {
8
5
  const meta = await parent.meta;
9
6
  if (!meta.tx.authorInherent?.kickOffAuthorshipValidation) {
10
7
  return [];
11
8
  }
12
- return [new types_1.GenericExtrinsic(meta.registry, meta.tx.authorInherent.kickOffAuthorshipValidation()).toHex()];
9
+ return [new GenericExtrinsic(meta.registry, meta.tx.authorInherent.kickOffAuthorshipValidation()).toHex()];
13
10
  }
14
11
  }
15
- exports.SetNimbusAuthorInherent = SetNimbusAuthorInherent;