@lodestar/light-client 1.35.0-dev.f80d2d52da → 1.35.0-dev.fcf8d024ea
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/events.d.ts +1 -5
- package/lib/events.d.ts.map +1 -0
- package/lib/events.js.map +1 -1
- package/lib/index.d.ts +1 -1
- package/lib/index.d.ts.map +1 -0
- package/lib/index.js +10 -5
- package/lib/index.js.map +1 -1
- package/lib/spec/index.d.ts +1 -1
- package/lib/spec/index.d.ts.map +1 -0
- package/lib/spec/index.js +3 -0
- package/lib/spec/index.js.map +1 -1
- package/lib/spec/isBetterUpdate.d.ts.map +1 -0
- package/lib/spec/processLightClientUpdate.d.ts.map +1 -0
- package/lib/spec/store.d.ts.map +1 -0
- package/lib/spec/store.js +7 -3
- package/lib/spec/store.js.map +1 -1
- package/lib/spec/utils.d.ts.map +1 -0
- package/lib/spec/utils.js.map +1 -1
- package/lib/spec/validateLightClientBootstrap.d.ts.map +1 -0
- package/lib/spec/validateLightClientUpdate.d.ts.map +1 -0
- package/lib/transport/index.d.ts.map +1 -0
- package/lib/transport/interface.d.ts.map +1 -0
- package/lib/transport/rest.d.ts +1 -1
- package/lib/transport/rest.d.ts.map +1 -0
- package/lib/transport/rest.js +5 -4
- package/lib/transport/rest.js.map +1 -1
- package/lib/transport.d.ts.map +1 -0
- package/lib/types.d.ts.map +1 -0
- package/lib/utils/api.d.ts.map +1 -0
- package/lib/utils/chunkify.d.ts.map +1 -0
- package/lib/utils/clock.d.ts.map +1 -0
- package/lib/utils/domain.d.ts.map +1 -0
- package/lib/utils/index.d.ts.map +1 -0
- package/lib/utils/logger.d.ts.map +1 -0
- package/lib/utils/map.d.ts.map +1 -0
- package/lib/utils/normalizeMerkleBranch.d.ts.map +1 -0
- package/lib/utils/update.d.ts.map +1 -0
- package/lib/utils/utils.d.ts.map +1 -0
- package/lib/utils/verifyMerkleBranch.d.ts.map +1 -0
- package/lib/utils.d.ts.map +1 -0
- package/lib/validation.d.ts.map +1 -0
- package/package.json +16 -18
- package/src/events.ts +17 -0
- package/src/index.ts +340 -0
- package/src/spec/index.ts +71 -0
- package/src/spec/isBetterUpdate.ts +94 -0
- package/src/spec/processLightClientUpdate.ts +119 -0
- package/src/spec/store.ts +105 -0
- package/src/spec/utils.ts +266 -0
- package/src/spec/validateLightClientBootstrap.ts +41 -0
- package/src/spec/validateLightClientUpdate.ts +154 -0
- package/src/transport/index.ts +2 -0
- package/src/transport/interface.ts +37 -0
- package/src/transport/rest.ts +89 -0
- package/src/transport.ts +2 -0
- package/src/types.ts +20 -0
- package/src/utils/api.ts +19 -0
- package/src/utils/chunkify.ts +26 -0
- package/src/utils/clock.ts +45 -0
- package/src/utils/domain.ts +44 -0
- package/src/utils/index.ts +8 -0
- package/src/utils/logger.ts +29 -0
- package/src/utils/map.ts +20 -0
- package/src/utils/normalizeMerkleBranch.ts +15 -0
- package/src/utils/update.ts +30 -0
- package/src/utils/utils.ts +95 -0
- package/src/utils/verifyMerkleBranch.ts +29 -0
- package/src/utils.ts +2 -0
- package/src/validation.ts +201 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import mitt, {Emitter as MittEmitter} from "mitt";
|
|
2
|
+
import {type ApiClient, routes} from "@lodestar/api";
|
|
3
|
+
import {type ForkName} from "@lodestar/params";
|
|
4
|
+
import {
|
|
5
|
+
LightClientBootstrap,
|
|
6
|
+
LightClientFinalityUpdate,
|
|
7
|
+
LightClientOptimisticUpdate,
|
|
8
|
+
LightClientUpdate,
|
|
9
|
+
type SyncPeriod,
|
|
10
|
+
} from "@lodestar/types";
|
|
11
|
+
import {type LightClientTransport} from "./interface.js";
|
|
12
|
+
|
|
13
|
+
export type LightClientRestEvents = {
|
|
14
|
+
[routes.events.EventType.lightClientFinalityUpdate]: (update: LightClientFinalityUpdate) => void;
|
|
15
|
+
[routes.events.EventType.lightClientOptimisticUpdate]: (update: LightClientOptimisticUpdate) => void;
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
export type LightClientRestEmitter = MittEmitter<LightClientRestEvents>;
|
|
19
|
+
|
|
20
|
+
export class LightClientRestTransport implements LightClientTransport {
|
|
21
|
+
private controller = new AbortController();
|
|
22
|
+
private readonly eventEmitter: LightClientRestEmitter = mitt();
|
|
23
|
+
private subscribedEventstream = false;
|
|
24
|
+
|
|
25
|
+
constructor(private readonly api: ApiClient) {}
|
|
26
|
+
|
|
27
|
+
async getUpdates(
|
|
28
|
+
startPeriod: SyncPeriod,
|
|
29
|
+
count: number
|
|
30
|
+
): Promise<
|
|
31
|
+
{
|
|
32
|
+
version: ForkName;
|
|
33
|
+
data: LightClientUpdate;
|
|
34
|
+
}[]
|
|
35
|
+
> {
|
|
36
|
+
const res = await this.api.lightclient.getLightClientUpdatesByRange({startPeriod, count});
|
|
37
|
+
const updates = res.value();
|
|
38
|
+
const {versions} = res.meta();
|
|
39
|
+
return updates.map((data, i) => ({data, version: versions[i]}));
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async getOptimisticUpdate(): Promise<{version: ForkName; data: LightClientOptimisticUpdate}> {
|
|
43
|
+
const res = await this.api.lightclient.getLightClientOptimisticUpdate();
|
|
44
|
+
return {version: res.meta().version, data: res.value()};
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async getFinalityUpdate(): Promise<{version: ForkName; data: LightClientFinalityUpdate}> {
|
|
48
|
+
const res = await this.api.lightclient.getLightClientFinalityUpdate();
|
|
49
|
+
return {version: res.meta().version, data: res.value()};
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async getBootstrap(blockRoot: string): Promise<{version: ForkName; data: LightClientBootstrap}> {
|
|
53
|
+
const res = await this.api.lightclient.getLightClientBootstrap({blockRoot});
|
|
54
|
+
return {version: res.meta().version, data: res.value()};
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
onOptimisticUpdate(handler: (optimisticUpdate: LightClientOptimisticUpdate) => void): void {
|
|
58
|
+
this.subscribeEventstream();
|
|
59
|
+
this.eventEmitter.on(routes.events.EventType.lightClientOptimisticUpdate, handler);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
onFinalityUpdate(handler: (finalityUpdate: LightClientFinalityUpdate) => void): void {
|
|
63
|
+
this.subscribeEventstream();
|
|
64
|
+
this.eventEmitter.on(routes.events.EventType.lightClientFinalityUpdate, handler);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
private subscribeEventstream(): void {
|
|
68
|
+
if (this.subscribedEventstream) {
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
void this.api.events.eventstream({
|
|
73
|
+
topics: [routes.events.EventType.lightClientOptimisticUpdate, routes.events.EventType.lightClientFinalityUpdate],
|
|
74
|
+
signal: this.controller.signal,
|
|
75
|
+
onEvent: (event) => {
|
|
76
|
+
switch (event.type) {
|
|
77
|
+
case routes.events.EventType.lightClientOptimisticUpdate:
|
|
78
|
+
this.eventEmitter.emit(routes.events.EventType.lightClientOptimisticUpdate, event.message.data);
|
|
79
|
+
break;
|
|
80
|
+
|
|
81
|
+
case routes.events.EventType.lightClientFinalityUpdate:
|
|
82
|
+
this.eventEmitter.emit(routes.events.EventType.lightClientFinalityUpdate, event.message.data);
|
|
83
|
+
break;
|
|
84
|
+
}
|
|
85
|
+
},
|
|
86
|
+
});
|
|
87
|
+
this.subscribedEventstream = true;
|
|
88
|
+
}
|
|
89
|
+
}
|
package/src/transport.ts
ADDED
package/src/types.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type {PublicKey} from "@chainsafe/bls/types";
|
|
2
|
+
import {LightClientHeader, LightClientUpdate, SyncPeriod} from "@lodestar/types";
|
|
3
|
+
|
|
4
|
+
export type LightClientStoreFast = {
|
|
5
|
+
snapshot: LightClientSnapshotFast;
|
|
6
|
+
bestUpdates: Map<SyncPeriod, LightClientUpdate>;
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
export type LightClientSnapshotFast = {
|
|
10
|
+
/** Beacon block header */
|
|
11
|
+
header: LightClientHeader;
|
|
12
|
+
/** Sync committees corresponding to the header */
|
|
13
|
+
currentSyncCommittee: SyncCommitteeFast;
|
|
14
|
+
nextSyncCommittee: SyncCommitteeFast;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export type SyncCommitteeFast = {
|
|
18
|
+
pubkeys: PublicKey[];
|
|
19
|
+
aggregatePubkey: PublicKey;
|
|
20
|
+
};
|
package/src/utils/api.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import {ApiClient, ApiRequestInit, getClient} from "@lodestar/api";
|
|
2
|
+
import {ChainForkConfig, createChainForkConfig} from "@lodestar/config";
|
|
3
|
+
import {NetworkName, networksChainConfig} from "@lodestar/config/networks";
|
|
4
|
+
|
|
5
|
+
export function getApiFromUrl(url: string, network: NetworkName, init?: ApiRequestInit): ApiClient {
|
|
6
|
+
if (!(network in networksChainConfig)) {
|
|
7
|
+
throw Error(`Invalid network name "${network}". Valid options are: ${Object.keys(networksChainConfig).join()}`);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
return getClient({urls: [url], globalInit: init}, {config: createChainForkConfig(networksChainConfig[network])});
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export function getChainForkConfigFromNetwork(network: NetworkName): ChainForkConfig {
|
|
14
|
+
if (!(network in networksChainConfig)) {
|
|
15
|
+
throw Error(`Invalid network name "${network}". Valid options are: ${Object.keys(networksChainConfig).join()}`);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return createChainForkConfig(networksChainConfig[network]);
|
|
19
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Split an inclusive range into a sequence of contiguous inclusive ranges
|
|
3
|
+
* ```
|
|
4
|
+
* [[a,b], [c,d] ... Sn] = chunkifyInclusiveRange([a,z], n)
|
|
5
|
+
* // where
|
|
6
|
+
* [a,z] = [a,b] U [c,d] U ... U Sn
|
|
7
|
+
* ```
|
|
8
|
+
* @param from range start inclusive
|
|
9
|
+
* @param to range end inclusive
|
|
10
|
+
* @param chunks Maximum number of chunks, if range is big enough
|
|
11
|
+
*/
|
|
12
|
+
export function chunkifyInclusiveRange(from: number, to: number, itemsPerChunk: number): number[][] {
|
|
13
|
+
if (itemsPerChunk < 1) itemsPerChunk = 1;
|
|
14
|
+
const totalItems = to - from + 1;
|
|
15
|
+
// Enforce chunkCount >= 1
|
|
16
|
+
const chunkCount = Math.max(Math.ceil(totalItems / itemsPerChunk), 1);
|
|
17
|
+
|
|
18
|
+
const chunks: number[][] = [];
|
|
19
|
+
for (let i = 0; i < chunkCount; i++) {
|
|
20
|
+
const _from = from + i * itemsPerChunk;
|
|
21
|
+
const _to = Math.min(from + (i + 1) * itemsPerChunk - 1, to);
|
|
22
|
+
chunks.push([_from, _to]);
|
|
23
|
+
if (_to >= to) break;
|
|
24
|
+
}
|
|
25
|
+
return chunks;
|
|
26
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import {ChainConfig} from "@lodestar/config";
|
|
2
|
+
import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SLOTS_PER_EPOCH} from "@lodestar/params";
|
|
3
|
+
import {Epoch, Slot, SyncPeriod} from "@lodestar/types";
|
|
4
|
+
|
|
5
|
+
export function getCurrentSlot(config: ChainConfig, genesisTime: number): Slot {
|
|
6
|
+
const diffInSeconds = Date.now() / 1000 - genesisTime;
|
|
7
|
+
return Math.floor(diffInSeconds / config.SECONDS_PER_SLOT);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
/** Returns the slot if the internal clock were advanced by `toleranceSec`. */
|
|
11
|
+
export function slotWithFutureTolerance(config: ChainConfig, genesisTime: number, toleranceSec: number): Slot {
|
|
12
|
+
// this is the same to getting slot at now + toleranceSec
|
|
13
|
+
return getCurrentSlot(config, genesisTime - toleranceSec);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Return the epoch number at the given slot.
|
|
18
|
+
*/
|
|
19
|
+
export function computeEpochAtSlot(slot: Slot): Epoch {
|
|
20
|
+
return Math.floor(slot / SLOTS_PER_EPOCH);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Return the sync committee period at slot
|
|
25
|
+
*/
|
|
26
|
+
export function computeSyncPeriodAtSlot(slot: Slot): SyncPeriod {
|
|
27
|
+
return computeSyncPeriodAtEpoch(computeEpochAtSlot(slot));
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Return the sync committee period at epoch
|
|
32
|
+
*/
|
|
33
|
+
export function computeSyncPeriodAtEpoch(epoch: Epoch): SyncPeriod {
|
|
34
|
+
return Math.floor(epoch / EPOCHS_PER_SYNC_COMMITTEE_PERIOD);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export function timeUntilNextEpoch(config: Pick<ChainConfig, "SECONDS_PER_SLOT">, genesisTime: number): number {
|
|
38
|
+
const milliSecondsPerEpoch = SLOTS_PER_EPOCH * config.SECONDS_PER_SLOT * 1000;
|
|
39
|
+
const msFromGenesis = Date.now() - genesisTime * 1000;
|
|
40
|
+
if (msFromGenesis >= 0) {
|
|
41
|
+
return milliSecondsPerEpoch - (msFromGenesis % milliSecondsPerEpoch);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
return Math.abs(msFromGenesis % milliSecondsPerEpoch);
|
|
45
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
// Only used by processDeposit + lightclient
|
|
2
|
+
|
|
3
|
+
import {Type} from "@chainsafe/ssz";
|
|
4
|
+
import {Domain, DomainType, Epoch, Root, Version, phase0, ssz} from "@lodestar/types";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Return the domain for the [[domainType]] and [[forkVersion]].
|
|
8
|
+
*/
|
|
9
|
+
export function computeDomain(domainType: DomainType, forkVersion: Version, genesisValidatorRoot: Root): Uint8Array {
|
|
10
|
+
const forkDataRoot = computeForkDataRoot(forkVersion, genesisValidatorRoot);
|
|
11
|
+
const domain = new Uint8Array(32);
|
|
12
|
+
domain.set(domainType, 0);
|
|
13
|
+
domain.set(forkDataRoot.slice(0, 28), 4);
|
|
14
|
+
return domain;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Return the ForkVersion at an epoch from a Fork type
|
|
19
|
+
*/
|
|
20
|
+
export function getForkVersion(fork: phase0.Fork, epoch: Epoch): Version {
|
|
21
|
+
return epoch < fork.epoch ? fork.previousVersion : fork.currentVersion;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Used primarily in signature domains to avoid collisions across forks/chains.
|
|
26
|
+
*/
|
|
27
|
+
export function computeForkDataRoot(currentVersion: Version, genesisValidatorsRoot: Root): Uint8Array {
|
|
28
|
+
const forkData: phase0.ForkData = {
|
|
29
|
+
currentVersion,
|
|
30
|
+
genesisValidatorsRoot,
|
|
31
|
+
};
|
|
32
|
+
return ssz.phase0.ForkData.hashTreeRoot(forkData);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Return the signing root of an object by calculating the root of the object-domain tree.
|
|
37
|
+
*/
|
|
38
|
+
export function computeSigningRoot<T>(type: Type<T>, sszObject: T, domain: Domain): Uint8Array {
|
|
39
|
+
const domainWrappedObject: phase0.SigningData = {
|
|
40
|
+
objectRoot: type.hashTreeRoot(sszObject),
|
|
41
|
+
domain,
|
|
42
|
+
};
|
|
43
|
+
return ssz.phase0.SigningData.hashTreeRoot(domainWrappedObject);
|
|
44
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* biome-ignore lint/suspicious/noExplicitAny: We need to use `any` type here
|
|
3
|
+
* biome-ignore-all lint/suspicious/noConsole: The logger need to use the console
|
|
4
|
+
* */
|
|
5
|
+
export type LogHandler = (message: string, context?: any, error?: Error) => void;
|
|
6
|
+
|
|
7
|
+
export type ILcLogger = {
|
|
8
|
+
error: LogHandler;
|
|
9
|
+
warn: LogHandler;
|
|
10
|
+
info: LogHandler;
|
|
11
|
+
debug: LogHandler;
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* With `console` module and ignoring debug logs
|
|
16
|
+
*/
|
|
17
|
+
export function getConsoleLogger(opts?: {logDebug?: boolean}): ILcLogger {
|
|
18
|
+
return {
|
|
19
|
+
error: console.error,
|
|
20
|
+
warn: console.warn,
|
|
21
|
+
info: console.log,
|
|
22
|
+
debug: opts?.logDebug ? console.log : () => {},
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* @deprecated - Use `getConsoleLogger` instead.
|
|
28
|
+
*/
|
|
29
|
+
export const getLcLoggerConsole = getConsoleLogger;
|
package/src/utils/map.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Prune an arbitrary set removing the first keys to have a set.size === maxItems.
|
|
3
|
+
* Returns the count of deleted items.
|
|
4
|
+
*/
|
|
5
|
+
export function pruneSetToMax<T>(set: Set<T> | Map<T, unknown>, maxItems: number): number {
|
|
6
|
+
let itemsToDelete = set.size - maxItems;
|
|
7
|
+
const deletedItems = Math.max(0, itemsToDelete);
|
|
8
|
+
|
|
9
|
+
if (itemsToDelete > 0) {
|
|
10
|
+
for (const key of set.keys()) {
|
|
11
|
+
set.delete(key);
|
|
12
|
+
itemsToDelete--;
|
|
13
|
+
if (itemsToDelete <= 0) {
|
|
14
|
+
break;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
return deletedItems;
|
|
20
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import {ZERO_HASH} from "../spec/utils.js";
|
|
2
|
+
|
|
3
|
+
export const SYNC_COMMITTEES_DEPTH = 4;
|
|
4
|
+
export const SYNC_COMMITTEES_INDEX = 11;
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Given merkle branch ``branch``, extend its depth according to ``depth``
|
|
8
|
+
* If given ``depth`` is less than the depth of ``branch``, it will return
|
|
9
|
+
* unmodified ``branch``
|
|
10
|
+
*/
|
|
11
|
+
export function normalizeMerkleBranch(branch: Uint8Array[], depth: number): Uint8Array[] {
|
|
12
|
+
const numExtraDepth = depth - branch.length;
|
|
13
|
+
|
|
14
|
+
return [...Array.from({length: numExtraDepth}, () => ZERO_HASH), ...branch];
|
|
15
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import {SYNC_COMMITTEE_SIZE} from "@lodestar/params";
|
|
2
|
+
import {Slot} from "@lodestar/types";
|
|
3
|
+
|
|
4
|
+
export type LightclientUpdateStats = {
|
|
5
|
+
isFinalized: boolean;
|
|
6
|
+
participation: number;
|
|
7
|
+
slot: Slot;
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Returns the update with more bits. On ties, newUpdate is the better
|
|
12
|
+
*
|
|
13
|
+
* Spec v1.0.1
|
|
14
|
+
* ```python
|
|
15
|
+
* max(store.valid_updates, key=lambda update: sum(update.sync_committee_bits)))
|
|
16
|
+
* ```
|
|
17
|
+
*/
|
|
18
|
+
export function isBetterUpdate(prev: LightclientUpdateStats, next: LightclientUpdateStats): boolean {
|
|
19
|
+
// Finalized if participation is over 66%
|
|
20
|
+
if (!prev.isFinalized && next.isFinalized && next.participation * 3 > SYNC_COMMITTEE_SIZE * 2) {
|
|
21
|
+
return true;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// Higher bit count
|
|
25
|
+
if (prev.participation > next.participation) return false;
|
|
26
|
+
if (prev.participation < next.participation) return true;
|
|
27
|
+
|
|
28
|
+
// else keep the oldest, lowest chance or re-org and requires less updating
|
|
29
|
+
return prev.slot > next.slot;
|
|
30
|
+
}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import bls from "@chainsafe/bls";
|
|
2
|
+
import type {PublicKey} from "@chainsafe/bls/types";
|
|
3
|
+
import {BitArray} from "@chainsafe/ssz";
|
|
4
|
+
import {ApiClient} from "@lodestar/api";
|
|
5
|
+
import {Bytes32, Root, altair, ssz} from "@lodestar/types";
|
|
6
|
+
import {BeaconBlockHeader} from "@lodestar/types/phase0";
|
|
7
|
+
import {GenesisData} from "../index.js";
|
|
8
|
+
import {SyncCommitteeFast} from "../types.js";
|
|
9
|
+
|
|
10
|
+
export function sumBits(bits: BitArray): number {
|
|
11
|
+
return bits.getTrueBitIndexes().length;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function isZeroHash(root: Root): boolean {
|
|
15
|
+
for (let i = 0; i < root.length; i++) {
|
|
16
|
+
if (root[i] !== 0) {
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
return true;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function assertZeroHashes(rootArray: Root[], expectedLength: number, errorMessage: string): void {
|
|
24
|
+
if (rootArray.length !== expectedLength) {
|
|
25
|
+
throw Error(`Wrong length ${errorMessage}`);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
for (const root of rootArray) {
|
|
29
|
+
if (!isZeroHash(root)) {
|
|
30
|
+
throw Error(`Not zeroed ${errorMessage}`);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Util to guarantee that all bits have a corresponding pubkey
|
|
37
|
+
*/
|
|
38
|
+
export function getParticipantPubkeys<T>(pubkeys: T[], bits: BitArray): T[] {
|
|
39
|
+
// BitArray.intersectValues() checks the length is correct
|
|
40
|
+
return bits.intersectValues(pubkeys);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export function toBlockHeader(block: altair.BeaconBlock): BeaconBlockHeader {
|
|
44
|
+
return {
|
|
45
|
+
slot: block.slot,
|
|
46
|
+
proposerIndex: block.proposerIndex,
|
|
47
|
+
parentRoot: block.parentRoot,
|
|
48
|
+
stateRoot: block.stateRoot,
|
|
49
|
+
bodyRoot: ssz.altair.BeaconBlockBody.hashTreeRoot(block.body),
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function deserializePubkeys(pubkeys: altair.LightClientUpdate["nextSyncCommittee"]["pubkeys"]): PublicKey[] {
|
|
54
|
+
return pubkeys.map((pk) => bls.PublicKey.fromBytes(pk));
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function serializePubkeys(pubkeys: PublicKey[]): altair.LightClientUpdate["nextSyncCommittee"]["pubkeys"] {
|
|
58
|
+
return pubkeys.map((pk) => pk.toBytes());
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export function deserializeSyncCommittee(syncCommittee: altair.SyncCommittee): SyncCommitteeFast {
|
|
62
|
+
return {
|
|
63
|
+
pubkeys: deserializePubkeys(syncCommittee.pubkeys),
|
|
64
|
+
aggregatePubkey: bls.PublicKey.fromBytes(syncCommittee.aggregatePubkey),
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export function serializeSyncCommittee(syncCommittee: SyncCommitteeFast): altair.SyncCommittee {
|
|
69
|
+
return {
|
|
70
|
+
pubkeys: serializePubkeys(syncCommittee.pubkeys),
|
|
71
|
+
aggregatePubkey: syncCommittee.aggregatePubkey.toBytes(),
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export function isEmptyHeader(header: BeaconBlockHeader): boolean {
|
|
76
|
+
const emptyValue = ssz.phase0.BeaconBlockHeader.defaultValue();
|
|
77
|
+
return ssz.phase0.BeaconBlockHeader.equals(emptyValue, header);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Thanks https://github.com/iliakan/detect-node/blob/master/index.esm.js
|
|
81
|
+
export const isNode =
|
|
82
|
+
Object.prototype.toString.call(typeof process !== "undefined" ? process : 0) === "[object process]";
|
|
83
|
+
|
|
84
|
+
export async function getGenesisData(api: Pick<ApiClient, "beacon">): Promise<GenesisData> {
|
|
85
|
+
const {genesisTime, genesisValidatorsRoot} = (await api.beacon.getGenesis()).value();
|
|
86
|
+
|
|
87
|
+
return {
|
|
88
|
+
genesisTime,
|
|
89
|
+
genesisValidatorsRoot,
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export async function getFinalizedSyncCheckpoint(api: Pick<ApiClient, "beacon">): Promise<Bytes32> {
|
|
94
|
+
return (await api.beacon.getStateFinalityCheckpoints({stateId: "head"})).value().finalized.root;
|
|
95
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import {hasher} from "@chainsafe/persistent-merkle-tree";
|
|
2
|
+
import {byteArrayEquals} from "@chainsafe/ssz";
|
|
3
|
+
|
|
4
|
+
export const SYNC_COMMITTEES_DEPTH = 4;
|
|
5
|
+
export const SYNC_COMMITTEES_INDEX = 11;
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Verify that the given ``leaf`` is on the merkle branch ``proof``
|
|
9
|
+
* starting with the given ``root``.
|
|
10
|
+
*
|
|
11
|
+
* Browser friendly version of verifyMerkleBranch
|
|
12
|
+
*/
|
|
13
|
+
export function isValidMerkleBranch(
|
|
14
|
+
leaf: Uint8Array,
|
|
15
|
+
proof: Uint8Array[],
|
|
16
|
+
depth: number,
|
|
17
|
+
index: number,
|
|
18
|
+
root: Uint8Array
|
|
19
|
+
): boolean {
|
|
20
|
+
let value = leaf;
|
|
21
|
+
for (let i = 0; i < depth; i++) {
|
|
22
|
+
if (Math.floor(index / 2 ** i) % 2) {
|
|
23
|
+
value = hasher.digest64(proof[i], value);
|
|
24
|
+
} else {
|
|
25
|
+
value = hasher.digest64(value, proof[i]);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return byteArrayEquals(value, root);
|
|
29
|
+
}
|
package/src/utils.ts
ADDED