@typeberry/lib 0.5.5 → 0.5.6-63e5ccc
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/lib/scripts/build-lib.js +1 -0
- package/package.json +5 -1
- package/packages/core/networking/package.json +1 -1
- package/packages/core/pvm-host-calls/ecalli-trace-logger.js +1 -1
- package/packages/core/pvm-host-calls/ecalli-trace-logger.test.js +7 -7
- package/packages/core/pvm-host-calls/host-call-registers.js +1 -1
- package/packages/core/telemetry/package.json +1 -1
- package/packages/jam/node/package.json +1 -1
- package/packages/jam/safrole/bandersnatch-vrf.d.ts +7 -2
- package/packages/jam/safrole/bandersnatch-vrf.d.ts.map +1 -1
- package/packages/jam/safrole/bandersnatch-vrf.js +34 -1
- package/packages/jam/safrole/bandersnatch-vrf.test.js +15 -0
- package/packages/jam/safrole/bandersnatch-wasm.d.ts +1 -0
- package/packages/jam/safrole/bandersnatch-wasm.d.ts.map +1 -1
- package/packages/jam/safrole/bandersnatch-wasm.js +3 -0
- package/packages/jam/transition/accumulate/accumulate-data.d.ts.map +1 -1
- package/packages/jam/transition/accumulate/accumulate-data.js +6 -1
- package/packages/jam/transition/accumulate/accumulate.d.ts +0 -9
- package/packages/jam/transition/accumulate/accumulate.d.ts.map +1 -1
- package/packages/jam/transition/accumulate/accumulate.js +4 -37
- package/packages/workers/block-authorship/main.d.ts.map +1 -1
- package/packages/workers/block-authorship/main.js +31 -1
- package/packages/workers/block-authorship/package.json +1 -1
- package/packages/workers/block-authorship/ticket-generator.d.ts +21 -0
- package/packages/workers/block-authorship/ticket-generator.d.ts.map +1 -0
- package/packages/workers/block-authorship/ticket-generator.js +37 -0
- package/packages/workers/block-authorship/ticket-generator.test.d.ts +2 -0
- package/packages/workers/block-authorship/ticket-generator.test.d.ts.map +1 -0
- package/packages/workers/block-authorship/ticket-generator.test.js +100 -0
- package/packages/workers/importer/package.json +1 -1
|
@@ -301,6 +301,7 @@ function createDistPackageJson(packageMap) {
|
|
|
301
301
|
exports: {},
|
|
302
302
|
imports: buildImportsField(packageMap),
|
|
303
303
|
dependencies: Object.fromEntries(filteredDeps),
|
|
304
|
+
repository: sourcePackageJson.repository,
|
|
304
305
|
author: sourcePackageJson.author,
|
|
305
306
|
license: sourcePackageJson.license,
|
|
306
307
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@typeberry/lib",
|
|
3
|
-
"version": "0.5.
|
|
3
|
+
"version": "0.5.6-63e5ccc",
|
|
4
4
|
"description": "Typeberry Library",
|
|
5
5
|
"main": "./bin/lib/index.js",
|
|
6
6
|
"types": "./bin/lib/index.d.ts",
|
|
@@ -267,6 +267,10 @@
|
|
|
267
267
|
"@typeberry/native": "0.0.4-4c0cd28",
|
|
268
268
|
"hash-wasm": "4.12.0"
|
|
269
269
|
},
|
|
270
|
+
"repository": {
|
|
271
|
+
"type": "git",
|
|
272
|
+
"url": "https://github.com/FluffyLabs/typeberry"
|
|
273
|
+
},
|
|
270
274
|
"author": "Fluffy Labs",
|
|
271
275
|
"license": "MPL-2.0"
|
|
272
276
|
}
|
|
@@ -107,7 +107,7 @@ export class EcalliTraceLogger {
|
|
|
107
107
|
*/
|
|
108
108
|
logSetReg(index, value) {
|
|
109
109
|
const paddedIdx = index.toString().padStart(2, "0");
|
|
110
|
-
this.output(`setreg r${paddedIdx} <- ${value.toString(16)}`);
|
|
110
|
+
this.output(`setreg r${paddedIdx} <- 0x${value.toString(16)}`);
|
|
111
111
|
}
|
|
112
112
|
/**
|
|
113
113
|
* Log gas overwrite operation.
|
|
@@ -47,7 +47,7 @@ describe("IoTraceLogger", () => {
|
|
|
47
47
|
[9, 0x10000n],
|
|
48
48
|
]));
|
|
49
49
|
logger.logStart(0, tryAsSmallGas(10000), regs);
|
|
50
|
-
assert.strictEqual(lines[0], "start pc=0 gas=10000 r07=
|
|
50
|
+
assert.strictEqual(lines[0], "start pc=0 gas=10000 r07=0x10 r09=0x10000");
|
|
51
51
|
});
|
|
52
52
|
it("handles no non-zero registers", () => {
|
|
53
53
|
const lines = [];
|
|
@@ -66,7 +66,7 @@ describe("IoTraceLogger", () => {
|
|
|
66
66
|
[3, 0x1000n],
|
|
67
67
|
]));
|
|
68
68
|
logger.logEcalli(tryAsHostCallIndex(10), 42, tryAsSmallGas(10000), regs);
|
|
69
|
-
assert.strictEqual(lines[0], "ecalli=10 pc=42 gas=10000 r01=
|
|
69
|
+
assert.strictEqual(lines[0], "ecalli=10 pc=42 gas=10000 r01=0x1 r03=0x1000");
|
|
70
70
|
});
|
|
71
71
|
it("omits zero registers", () => {
|
|
72
72
|
const lines = [];
|
|
@@ -76,7 +76,7 @@ describe("IoTraceLogger", () => {
|
|
|
76
76
|
[1, 1n],
|
|
77
77
|
]));
|
|
78
78
|
logger.logEcalli(tryAsHostCallIndex(5), 0, tryAsSmallGas(5000), regs);
|
|
79
|
-
assert.strictEqual(lines[0], "ecalli=5 pc=0 gas=5000 r01=
|
|
79
|
+
assert.strictEqual(lines[0], "ecalli=5 pc=0 gas=5000 r01=0x1");
|
|
80
80
|
});
|
|
81
81
|
it("handles no non-zero registers", () => {
|
|
82
82
|
const lines = [];
|
|
@@ -107,13 +107,13 @@ describe("IoTraceLogger", () => {
|
|
|
107
107
|
const lines = [];
|
|
108
108
|
const logger = EcalliTraceLogger.new((line) => lines.push(line));
|
|
109
109
|
logger.logSetReg(0, 0x100n);
|
|
110
|
-
assert.strictEqual(lines[0], "setreg r00 <-
|
|
110
|
+
assert.strictEqual(lines[0], "setreg r00 <- 0x100");
|
|
111
111
|
});
|
|
112
112
|
it("formats two-digit register index", () => {
|
|
113
113
|
const lines = [];
|
|
114
114
|
const logger = EcalliTraceLogger.new((line) => lines.push(line));
|
|
115
115
|
logger.logSetReg(12, 0x4n);
|
|
116
|
-
assert.strictEqual(lines[0], "setreg r12 <-
|
|
116
|
+
assert.strictEqual(lines[0], "setreg r12 <- 0x4");
|
|
117
117
|
});
|
|
118
118
|
});
|
|
119
119
|
describe("logSetGas", () => {
|
|
@@ -136,7 +136,7 @@ describe("IoTraceLogger", () => {
|
|
|
136
136
|
assert.strictEqual(lines.length, 4);
|
|
137
137
|
assert.strictEqual(lines[0], "memread 0x00001000 len=1 -> 0xcd");
|
|
138
138
|
assert.strictEqual(lines[1], "memwrite 0x00002000 len=1 <- 0xab");
|
|
139
|
-
assert.strictEqual(lines[2], "setreg r00 <-
|
|
139
|
+
assert.strictEqual(lines[2], "setreg r00 <- 0x100");
|
|
140
140
|
assert.strictEqual(lines[3], "setgas <- 9950");
|
|
141
141
|
});
|
|
142
142
|
it("sorts reads by address", () => {
|
|
@@ -163,7 +163,7 @@ describe("IoTraceLogger", () => {
|
|
|
163
163
|
const logger = EcalliTraceLogger.new((line) => lines.push(line));
|
|
164
164
|
const regs = createRegisters(new Map([[0, 0x100n]]));
|
|
165
165
|
logger.logHalt(42, tryAsSmallGas(9920), regs);
|
|
166
|
-
assert.strictEqual(lines[0], "HALT pc=42 gas=9920 r00=
|
|
166
|
+
assert.strictEqual(lines[0], "HALT pc=42 gas=9920 r00=0x100");
|
|
167
167
|
});
|
|
168
168
|
it("logs OOG", () => {
|
|
169
169
|
const lines = [];
|
|
@@ -38,7 +38,7 @@ export class HostCallRegisters {
|
|
|
38
38
|
const entries = [];
|
|
39
39
|
for (const [idx, value] of values.entries()) {
|
|
40
40
|
if (value !== 0n) {
|
|
41
|
-
entries.push(`r${idx.toString().padStart(2, "0")}
|
|
41
|
+
entries.push(`r${idx.toString().padStart(2, "0")}=0x${value.toString(16)}`);
|
|
42
42
|
}
|
|
43
43
|
}
|
|
44
44
|
return entries.join(" ");
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import type { EntropyHash } from "#@typeberry/block";
|
|
2
|
-
import
|
|
1
|
+
import type { EntropyHash, TicketAttempt } from "#@typeberry/block";
|
|
2
|
+
import { SignedTicket } from "#@typeberry/block/tickets.js";
|
|
3
3
|
import { BytesBlob } from "#@typeberry/bytes";
|
|
4
4
|
import type { BandersnatchKey, BandersnatchSecretSeed } from "#@typeberry/crypto";
|
|
5
5
|
import { type BandersnatchRingRoot, type BandersnatchVrfSignature } from "#@typeberry/crypto/bandersnatch.js";
|
|
@@ -13,6 +13,7 @@ declare const FUNCTIONS: {
|
|
|
13
13
|
getRingCommitment: typeof getRingCommitment;
|
|
14
14
|
generateSeal: typeof generateSeal;
|
|
15
15
|
getVrfOutputHash: typeof getVrfOutputHash;
|
|
16
|
+
generateTickets: typeof generateTickets;
|
|
16
17
|
};
|
|
17
18
|
export default FUNCTIONS;
|
|
18
19
|
declare function verifyHeaderSeals(bandersnatch: BandernsatchWasm, authorKey: BandersnatchKey, signature: BandersnatchVrfSignature, payload: BytesBlob, encodedUnsealedHeader: BytesBlob, entropySignature: BandersnatchVrfSignature, entropyPayloadPrefix: BytesBlob): Promise<Result<[EntropyHash, EntropyHash], null>>;
|
|
@@ -25,4 +26,8 @@ declare function verifyTickets(bandersnatch: BandernsatchWasm, numberOfValidator
|
|
|
25
26
|
declare function generateSeal(bandersnatch: BandernsatchWasm, authorKey: BandersnatchSecretSeed, input: BytesBlob, auxData: BytesBlob): Promise<Result<BandersnatchVrfSignature, null>>;
|
|
26
27
|
export type VrfOutputHash = Opaque<OpaqueHash, "VRF Output Hash">;
|
|
27
28
|
declare function getVrfOutputHash(bandersnatch: BandernsatchWasm, authorKey: BandersnatchSecretSeed, input: BytesBlob): Promise<Result<VrfOutputHash, null>>;
|
|
29
|
+
/**
|
|
30
|
+
* Generates signed tickets for all attempts at once using batch ring VRF.
|
|
31
|
+
*/
|
|
32
|
+
declare function generateTickets(bandersnatch: BandernsatchWasm, ringKeys: BandersnatchKey[], proverKeyIndex: number, key: BandersnatchSecretSeed, entropy: EntropyHash, ticketsPerValidator: TicketAttempt): Promise<Result<SignedTicket[], null>>;
|
|
28
33
|
//# sourceMappingURL=bandersnatch-vrf.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"bandersnatch-vrf.d.ts","sourceRoot":"","sources":["../../../../../packages/jam/safrole/bandersnatch-vrf.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;
|
|
1
|
+
{"version":3,"file":"bandersnatch-vrf.d.ts","sourceRoot":"","sources":["../../../../../packages/jam/safrole/bandersnatch-vrf.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACnE,OAAO,EAAE,YAAY,EAAsB,MAAM,6BAA6B,CAAC;AAC/E,OAAO,EAAS,SAAS,EAAE,MAAM,kBAAkB,CAAC;AACpD,OAAO,KAAK,EAAE,eAAe,EAAE,sBAAsB,EAAE,MAAM,mBAAmB,CAAC;AACjF,OAAO,EAIL,KAAK,oBAAoB,EACzB,KAAK,wBAAwB,EAC9B,MAAM,mCAAmC,CAAC;AAC3C,OAAO,EAAa,KAAK,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7D,OAAO,EAAE,KAAK,MAAM,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AACvD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAwB/D,QAAA,MAAM,SAAS;;;;;;;;CAQd,CAAC;AAKF,eAAe,SAAS,CAAC;AAEzB,iBAAe,iBAAiB,CAC9B,YAAY,EAAE,gBAAgB,EAC9B,SAAS,EAAE,eAAe,EAC1B,SAAS,EAAE,wBAAwB,EACnC,OAAO,EAAE,SAAS,EAClB,qBAAqB,EAAE,SAAS,EAChC,gBAAgB,EAAE,wBAAwB,EAC1C,oBAAoB,EAAE,SAAS,GAC9B,OAAO,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,WAAW,CAAC,EAAE,IAAI,CAAC,CAAC,CAkBnD;AAED,iBAAe,UAAU,CACvB,YAAY,EAAE,gBAAgB,EAC9B,SAAS,EAAE,eAAe,EAC1B,SAAS,EAAE,wBAAwB,EACnC,OAAO,EAAE,SAAS,EAClB,qBAAqB,EAAE,SAAS,GAC/B,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,IAAI,CAAC,CAAC,CAapC;AAED,iBAAS,iBAAiB,CACxB,YAAY,EAAE,gBAAgB,EAC9B,UAAU,EAAE,eAAe,EAAE,GAC5B,OAAO,CAAC,MAAM,CAAC,oBAAoB,EAAE,IAAI,CAAC,CAAC,CAmB7C;AAkBD,iBAAe,aAAa,CAC1B,YAAY,EAAE,gBAAgB,EAC9B,kBAAkB,EAAE,MAAM,EAC1B,SAAS,EAAE,oBAAoB,EAC/B,OAAO,EAAE,SAAS,YAAY,EAAE,EAChC,OAAO,EAAE,WAAW,GACnB,OAAO,CAAC;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,WAAW,EAAE,WAAW,CAAA;CAAE,EAAE,CAAC,CAqB3D;AAED,iBAAe,YAAY,CACzB,YAAY,EAAE,gBAAgB,EAC9B,SAAS,EAAE,sBAAsB,EACjC,KAAK,EAAE,SAAS,EAChB,OAAO,EAAE,SAAS,GACjB,OAAO,CAAC,MAAM,CAAC,wBAAwB,EAAE,IAAI,CAAC,CAAC,CAQjD;AAED,MAAM,MAAM,aAAa,GAAG,MAAM,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAC;AAElE,iBAAe,gBAAgB,CAC7B,YAAY,EAAE,gBAAgB,EAC9B,SAAS,EAAE,sBAAsB,EACjC,KAAK,EAAE,SAAS,GACf,OAAO,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC,CAQtC;AAKD;;GAEG;AACH,iBAAe,eAAe,CAC5B,YAAY,EAAE,gBAAgB,EAC9B,QAAQ,EAAE,eAAe,EAAE,EAC3B,cAAc,EAAE,MAAM,EACtB,GAAG,EAAE,sBAAsB,EAC3B,OAAO,EAAE,WAAW,EACpB,mBAAmB,EAAE,aAAa,GACjC,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC,CA0CvC"}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
+
import { SignedTicket, tryAsTicketAttempt } from "#@typeberry/block/tickets.js";
|
|
1
2
|
import { Bytes, BytesBlob } from "#@typeberry/bytes";
|
|
2
|
-
import { BANDERSNATCH_RING_ROOT_BYTES, BANDERSNATCH_VRF_SIGNATURE_BYTES, } from "#@typeberry/crypto/bandersnatch.js";
|
|
3
|
+
import { BANDERSNATCH_PROOF_BYTES, BANDERSNATCH_RING_ROOT_BYTES, BANDERSNATCH_VRF_SIGNATURE_BYTES, } from "#@typeberry/crypto/bandersnatch.js";
|
|
3
4
|
import { HASH_SIZE } from "#@typeberry/hash";
|
|
4
5
|
import { Result } from "#@typeberry/utils";
|
|
5
6
|
import { JAM_TICKET_SEAL } from "./constants.js";
|
|
@@ -25,6 +26,7 @@ const FUNCTIONS = {
|
|
|
25
26
|
getRingCommitment,
|
|
26
27
|
generateSeal,
|
|
27
28
|
getVrfOutputHash,
|
|
29
|
+
generateTickets,
|
|
28
30
|
};
|
|
29
31
|
// NOTE [ToDr] We export the entire object to allow mocking in tests.
|
|
30
32
|
// Ideally we would just export functions and figure out how to mock
|
|
@@ -99,3 +101,34 @@ async function getVrfOutputHash(bandersnatch, authorKey, input) {
|
|
|
99
101
|
}
|
|
100
102
|
return Result.ok(Bytes.fromBlob(result.subarray(1), HASH_SIZE).asOpaque());
|
|
101
103
|
}
|
|
104
|
+
// One byte for result discriminator and the rest is the ring VRF signature.
|
|
105
|
+
const GENERATE_RESULT_ENTRY_LENGTH = 1 + BANDERSNATCH_PROOF_BYTES;
|
|
106
|
+
/**
|
|
107
|
+
* Generates signed tickets for all attempts at once using batch ring VRF.
|
|
108
|
+
*/
|
|
109
|
+
async function generateTickets(bandersnatch, ringKeys, proverKeyIndex, key, entropy, ticketsPerValidator) {
|
|
110
|
+
// Build VRF inputs: JAM_TICKET_SEAL || entropy || attempt_byte for each attempt
|
|
111
|
+
const vrfInputParts = [];
|
|
112
|
+
for (let attempt = 0; attempt < ticketsPerValidator; attempt++) {
|
|
113
|
+
vrfInputParts.push(BytesBlob.blobFromParts([JAM_TICKET_SEAL, entropy.raw, Uint8Array.of(attempt)]).raw);
|
|
114
|
+
}
|
|
115
|
+
const attemptLength = 1;
|
|
116
|
+
const vrfInputDataLen = JAM_TICKET_SEAL.length + entropy.length + attemptLength;
|
|
117
|
+
const inputsData = BytesBlob.blobFromParts(vrfInputParts).raw;
|
|
118
|
+
const ringKeysData = BytesBlob.blobFromParts(ringKeys.map((k) => k.raw)).raw;
|
|
119
|
+
const result = await bandersnatch.batchGenerateRingVrf(ringKeysData, proverKeyIndex, key.raw, inputsData, vrfInputDataLen);
|
|
120
|
+
const tickets = [];
|
|
121
|
+
for (let attempt = 0; attempt < ticketsPerValidator; attempt++) {
|
|
122
|
+
const offset = attempt * GENERATE_RESULT_ENTRY_LENGTH;
|
|
123
|
+
const resultByte = result[offset];
|
|
124
|
+
if (resultByte === ResultValues.Error) {
|
|
125
|
+
return Result.error(null, () => `Ring VRF proof generation failed for attempt ${attempt}`);
|
|
126
|
+
}
|
|
127
|
+
const signature = Bytes.fromBlob(new Uint8Array(result.subarray(offset + 1, offset + GENERATE_RESULT_ENTRY_LENGTH)), BANDERSNATCH_PROOF_BYTES).asOpaque();
|
|
128
|
+
tickets.push(SignedTicket.create({
|
|
129
|
+
attempt: tryAsTicketAttempt(attempt),
|
|
130
|
+
signature,
|
|
131
|
+
}));
|
|
132
|
+
}
|
|
133
|
+
return Result.ok(tickets);
|
|
134
|
+
}
|
|
@@ -155,4 +155,19 @@ describe("Bandersnatch verification", () => {
|
|
|
155
155
|
deepEqual(verificationResult, Result.ok(BytesBlob.parseBlob("0x000b0e5c06e70a23d6cfed372763de718b0c21119ea51f7afe1e69b0000de620")));
|
|
156
156
|
});
|
|
157
157
|
});
|
|
158
|
+
describe("generateTickets", () => {
|
|
159
|
+
it("should generate tickets that pass verification (consistency check)", async () => {
|
|
160
|
+
// Generate tickets and verify them - checks consistency between generate and verify
|
|
161
|
+
const secrets = [0, 1, 2].map((i) => Bytes.fill(SEED_SIZE, i).asOpaque());
|
|
162
|
+
const ringKeys = secrets.map((secret) => deriveBandersnatchPublicKey(secret));
|
|
163
|
+
const proverIndex = 0;
|
|
164
|
+
const entropy = Bytes.fill(HASH_SIZE, 123).asOpaque();
|
|
165
|
+
const genResult = await bandersnatchVrf.generateTickets(await bandersnatchWasm, ringKeys, proverIndex, secrets[proverIndex], entropy, tryAsTicketAttempt(2));
|
|
166
|
+
assert.ok(genResult.isOk);
|
|
167
|
+
const commitment = await bandersnatchVrf.getRingCommitment(await bandersnatchWasm, ringKeys);
|
|
168
|
+
assert.ok(commitment.isOk);
|
|
169
|
+
const verifyResult = await bandersnatchVrf.verifyTickets(await bandersnatchWasm, ringKeys.length, commitment.ok, genResult.ok, entropy);
|
|
170
|
+
assert.ok(verifyResult.every((r) => r.isValid), "Generated tickets should pass verification");
|
|
171
|
+
});
|
|
172
|
+
});
|
|
158
173
|
});
|
|
@@ -7,5 +7,6 @@ export declare class BandernsatchWasm {
|
|
|
7
7
|
batchVerifyTicket(ringSize: number, commitment: Uint8Array, ticketsData: Uint8Array, contextLength: number): Promise<Uint8Array<ArrayBufferLike>>;
|
|
8
8
|
generateSeal(authorKey: Uint8Array, input: Uint8Array, auxData: Uint8Array): Promise<Uint8Array<ArrayBufferLike>>;
|
|
9
9
|
getVrfOutputHash(authorKey: Uint8Array, input: Uint8Array): Promise<Uint8Array<ArrayBufferLike>>;
|
|
10
|
+
batchGenerateRingVrf(ringKeys: Uint8Array, proverKeyIndex: number, secretSeed: Uint8Array, inputsData: Uint8Array, vrfInputDataLen: number): Promise<Uint8Array<ArrayBufferLike>>;
|
|
10
11
|
}
|
|
11
12
|
//# sourceMappingURL=bandersnatch-wasm.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"bandersnatch-wasm.d.ts","sourceRoot":"","sources":["../../../../../packages/jam/safrole/bandersnatch-wasm.ts"],"names":[],"mappings":"AAEA,qBAAa,gBAAgB;IAC3B,OAAO;WAEM,GAAG;IAKV,UAAU,CAAC,SAAS,EAAE,UAAU,EAAE,SAAS,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU;IAIjG,iBAAiB,CACrB,SAAS,EAAE,UAAU,EACrB,UAAU,EAAE,UAAU,EACtB,iBAAiB,EAAE,UAAU,EAC7B,cAAc,EAAE,UAAU,EAC1B,WAAW,EAAE,UAAU,EACvB,oBAAoB,EAAE,UAAU;IAY5B,iBAAiB,CAAC,IAAI,EAAE,UAAU;IAIlC,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,WAAW,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM;IAI1G,YAAY,CAAC,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU;IAI1E,gBAAgB,CAAC,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,UAAU;
|
|
1
|
+
{"version":3,"file":"bandersnatch-wasm.d.ts","sourceRoot":"","sources":["../../../../../packages/jam/safrole/bandersnatch-wasm.ts"],"names":[],"mappings":"AAEA,qBAAa,gBAAgB;IAC3B,OAAO;WAEM,GAAG;IAKV,UAAU,CAAC,SAAS,EAAE,UAAU,EAAE,SAAS,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU;IAIjG,iBAAiB,CACrB,SAAS,EAAE,UAAU,EACrB,UAAU,EAAE,UAAU,EACtB,iBAAiB,EAAE,UAAU,EAC7B,cAAc,EAAE,UAAU,EAC1B,WAAW,EAAE,UAAU,EACvB,oBAAoB,EAAE,UAAU;IAY5B,iBAAiB,CAAC,IAAI,EAAE,UAAU;IAIlC,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,WAAW,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM;IAI1G,YAAY,CAAC,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU;IAI1E,gBAAgB,CAAC,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,UAAU;IAIzD,oBAAoB,CACxB,QAAQ,EAAE,UAAU,EACpB,cAAc,EAAE,MAAM,EACtB,UAAU,EAAE,UAAU,EACtB,UAAU,EAAE,UAAU,EACtB,eAAe,EAAE,MAAM;CAI1B"}
|
|
@@ -23,4 +23,7 @@ export class BandernsatchWasm {
|
|
|
23
23
|
async getVrfOutputHash(authorKey, input) {
|
|
24
24
|
return bandersnatchWasm.vrfOutputHash(authorKey, input);
|
|
25
25
|
}
|
|
26
|
+
async batchGenerateRingVrf(ringKeys, proverKeyIndex, secretSeed, inputsData, vrfInputDataLen) {
|
|
27
|
+
return bandersnatchWasm.batchGenerateRingVrf(ringKeys, proverKeyIndex, secretSeed, inputsData, vrfInputDataLen);
|
|
28
|
+
}
|
|
26
29
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"accumulate-data.d.ts","sourceRoot":"","sources":["../../../../../../packages/jam/transition/accumulate/accumulate-data.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,UAAU,EAAE,KAAK,SAAS,EAAmB,MAAM,kBAAkB,CAAC;AACpF,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,iCAAiC,CAAC;AAClE,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,wBAAwB,CAAC;AACxD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AACjE,OAAO,EAAmC,KAAK,GAAG,EAAE,MAAM,oBAAoB,CAAC;AAC/E,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AAavC;;;;;GAKG;AACH,qBAAa,cAAc;IACzB,OAAO,CAAC,QAAQ,CAAC,sBAAsB,CAAqC;IAC5E,OAAO,CAAC,QAAQ,CAAC,oBAAoB,CAAoC;IACzE,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAc;IACzC,OAAO,CAAC,QAAQ,CAAC,mBAAmB,CAA6B;gBAG/D,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,EAC9B,SAAS,EAAE,eAAe,EAAE,EAC5B,iCAAiC,EAAE,GAAG,CAAC,SAAS,EAAE,UAAU,CAAC;IAwC/D;;;;OAIG;IACH,OAAO,CAAC,wBAAwB;IAahC,oCAAoC;IACpC,OAAO,CAAC,eAAe;
|
|
1
|
+
{"version":3,"file":"accumulate-data.d.ts","sourceRoot":"","sources":["../../../../../../packages/jam/transition/accumulate/accumulate-data.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,UAAU,EAAE,KAAK,SAAS,EAAmB,MAAM,kBAAkB,CAAC;AACpF,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,iCAAiC,CAAC;AAClE,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,wBAAwB,CAAC;AACxD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AACjE,OAAO,EAAmC,KAAK,GAAG,EAAE,MAAM,oBAAoB,CAAC;AAC/E,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AAavC;;;;;GAKG;AACH,qBAAa,cAAc;IACzB,OAAO,CAAC,QAAQ,CAAC,sBAAsB,CAAqC;IAC5E,OAAO,CAAC,QAAQ,CAAC,oBAAoB,CAAoC;IACzE,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAc;IACzC,OAAO,CAAC,QAAQ,CAAC,mBAAmB,CAA6B;gBAG/D,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,EAC9B,SAAS,EAAE,eAAe,EAAE,EAC5B,iCAAiC,EAAE,GAAG,CAAC,SAAS,EAAE,UAAU,CAAC;IAwC/D;;;;OAIG;IACH,OAAO,CAAC,wBAAwB;IAahC,oCAAoC;IACpC,OAAO,CAAC,eAAe;IAiBvB;;;;;OAKG;IACH,OAAO,CAAC,kBAAkB;IAmB1B;;OAEG;IAEH;;;;;OAKG;IACH,OAAO,CAAC,gBAAgB;IA8CxB,0DAA0D;IAC1D,WAAW,CAAC,SAAS,EAAE,SAAS,GAAG,OAAO,EAAE;IAI5C,2DAA2D;IAC3D,YAAY,CAAC,SAAS,EAAE,SAAS,GAAG,eAAe,EAAE;IAIrD,0EAA0E;IAC1E,gBAAgB,CAAC,SAAS,EAAE,SAAS,GAAG,GAAG;IAI3C,mDAAmD;IACnD,WAAW,CAAC,SAAS,EAAE,SAAS,GAAG,UAAU;IAI7C;;;;OAIG;IACH,aAAa,IAAI,SAAS,EAAE;CAG7B"}
|
|
@@ -63,7 +63,12 @@ export class AccumulateData {
|
|
|
63
63
|
merged.add(serviceId);
|
|
64
64
|
}
|
|
65
65
|
}
|
|
66
|
-
|
|
66
|
+
/**
|
|
67
|
+
* Services have to be sorted
|
|
68
|
+
*
|
|
69
|
+
* https://graypaper.fluffylabs.dev/#/ab2cdbd/177003177003?v=0.7.2
|
|
70
|
+
*/
|
|
71
|
+
return Array.from(merged).sort((a, b) => a - b);
|
|
67
72
|
}
|
|
68
73
|
/**
|
|
69
74
|
* Transform the list of pending transfers into:
|
|
@@ -30,15 +30,6 @@ export declare class Accumulate {
|
|
|
30
30
|
* https://graypaper.fluffylabs.dev/#/7e6ff6a/18d70118d701?v=0.6.7
|
|
31
31
|
*/
|
|
32
32
|
private accumulateSingleService;
|
|
33
|
-
/**
|
|
34
|
-
* The outer accumulation function ∆+ which transforms a gas-limit, a sequence of work-reports,
|
|
35
|
-
* an initial partial-state and a dictionary of services enjoying free accumulation,
|
|
36
|
-
* into a tuple of the number of work-results accumulated, a posterior state-context,
|
|
37
|
-
* the resultant deferred-transfers and accumulation-output pairing.
|
|
38
|
-
*
|
|
39
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/179d00179d00?v=0.6.7
|
|
40
|
-
*/
|
|
41
|
-
private accumulateSequentiallyLegacy;
|
|
42
33
|
/**
|
|
43
34
|
* The outer accumulation function ∆+ which transforms a gas-limit, a sequence of work-reports,
|
|
44
35
|
* an initial partial-state and a dictionary of services enjoying free accumulation,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"accumulate.d.ts","sourceRoot":"","sources":["../../../../../../packages/jam/transition/accumulate/accumulate.ts"],"names":[],"mappings":"AAAA,OAAO,EAGL,KAAK,SAAS,EAIf,MAAM,kBAAkB,CAAC;AAM1B,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAEnD,OAAO,EAAE,KAAK,OAAO,EAA8B,MAAM,iBAAiB,CAAC;AAiB3E,OAAO,EAAyC,MAAM,EAAa,MAAM,kBAAkB,CAAC;AAM5F,OAAO,EACL,KAAK,eAAe,EACpB,KAAK,gBAAgB,EACrB,KAAK,eAAe,EAGrB,MAAM,uBAAuB,CAAC;AAO/B,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAC;AAEtD,eAAO,MAAM,kBAAkB,8BAA8B,CAAC;AAC9D,MAAM,MAAM,kBAAkB,GAAG,OAAO,kBAAkB,CAAC;AA2B3D,qBAAa,UAAU;aAEH,SAAS,EAAE,SAAS;aACpB,OAAO,EAAE,OAAO;aAChB,KAAK,EAAE,eAAe;aACtB,OAAO,EAAE,iBAAiB;gBAH1B,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,OAAO,EAChB,KAAK,EAAE,eAAe,EACtB,OAAO,EAAE,iBAAiB;IAO5C;;;;OAIG;IACH,OAAO,CAAC,qBAAqB;IAkB7B;;;;OAIG;YACW,uBAAuB;IAmGrC;;;;OAIG;YACW,uBAAuB;IAiDrC;;;;;;;OAOG;YACW,
|
|
1
|
+
{"version":3,"file":"accumulate.d.ts","sourceRoot":"","sources":["../../../../../../packages/jam/transition/accumulate/accumulate.ts"],"names":[],"mappings":"AAAA,OAAO,EAGL,KAAK,SAAS,EAIf,MAAM,kBAAkB,CAAC;AAM1B,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAEnD,OAAO,EAAE,KAAK,OAAO,EAA8B,MAAM,iBAAiB,CAAC;AAiB3E,OAAO,EAAyC,MAAM,EAAa,MAAM,kBAAkB,CAAC;AAM5F,OAAO,EACL,KAAK,eAAe,EACpB,KAAK,gBAAgB,EACrB,KAAK,eAAe,EAGrB,MAAM,uBAAuB,CAAC;AAO/B,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAC;AAEtD,eAAO,MAAM,kBAAkB,8BAA8B,CAAC;AAC9D,MAAM,MAAM,kBAAkB,GAAG,OAAO,kBAAkB,CAAC;AA2B3D,qBAAa,UAAU;aAEH,SAAS,EAAE,SAAS;aACpB,OAAO,EAAE,OAAO;aAChB,KAAK,EAAE,eAAe;aACtB,OAAO,EAAE,iBAAiB;gBAH1B,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,OAAO,EAChB,KAAK,EAAE,eAAe,EACtB,OAAO,EAAE,iBAAiB;IAO5C;;;;OAIG;IACH,OAAO,CAAC,qBAAqB;IAkB7B;;;;OAIG;YACW,uBAAuB;IAmGrC;;;;OAIG;YACW,uBAAuB;IAiDrC;;;;;;;OAOG;YACW,sBAAsB;IAuEpC,OAAO,CAAC,gBAAgB;IAuCxB,OAAO,CAAC,kBAAkB;IAsB1B;;;;;;;;;OASG;YACW,oBAAoB;IA6ClC;;OAEG;IACH,OAAO,CAAC,0BAA0B;IAkDlC;;;;;;OAMG;IACH,OAAO,CAAC,WAAW;IAcnB;;;;;;OAMG;IACI,6BAA6B,CAAC,UAAU,EAAE,SAAS,EAAE,GAAG,OAAO;IAKhE,UAAU,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,EAAE,eAAe,GAAG,OAAO,CAAC,MAAM,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;CA0FrH"}
|
|
@@ -157,40 +157,6 @@ export class Accumulate {
|
|
|
157
157
|
logger.log `Accumulation successful for ${serviceId}. Consumed: ${result.ok.consumedGas}`;
|
|
158
158
|
return result.ok;
|
|
159
159
|
}
|
|
160
|
-
/**
|
|
161
|
-
* The outer accumulation function ∆+ which transforms a gas-limit, a sequence of work-reports,
|
|
162
|
-
* an initial partial-state and a dictionary of services enjoying free accumulation,
|
|
163
|
-
* into a tuple of the number of work-results accumulated, a posterior state-context,
|
|
164
|
-
* the resultant deferred-transfers and accumulation-output pairing.
|
|
165
|
-
*
|
|
166
|
-
* https://graypaper.fluffylabs.dev/#/7e6ff6a/179d00179d00?v=0.6.7
|
|
167
|
-
*/
|
|
168
|
-
async accumulateSequentiallyLegacy(gasLimit, reports, slot, entropy, statistics, stateUpdate, autoAccumulateServices, yieldedRoots, transfers) {
|
|
169
|
-
const i = this.findReportCutoffIndex(gasLimit, reports);
|
|
170
|
-
if (i === 0) {
|
|
171
|
-
return {
|
|
172
|
-
accumulatedReports: tryAsU32(0),
|
|
173
|
-
gasCost: tryAsServiceGas(0),
|
|
174
|
-
state: stateUpdate,
|
|
175
|
-
};
|
|
176
|
-
}
|
|
177
|
-
const reportsToAccumulateInParallel = reports.subview(0, i);
|
|
178
|
-
const accumulateData = new AccumulateData(reportsToAccumulateInParallel, [], autoAccumulateServices);
|
|
179
|
-
const reportsToAccumulateSequentially = reports.subview(i);
|
|
180
|
-
const results = await this.accumulateInParallel(accumulateData, slot, entropy, stateUpdate);
|
|
181
|
-
this.updateStatistics(results, statistics, accumulateData);
|
|
182
|
-
this.updateYieldedRoots(results, yieldedRoots);
|
|
183
|
-
const { state: stateAfterParallelAcc, totalGasCost, transfers: newTransfers, } = mergePerallelAccumulationResults(this.chainSpec, this.state, stateUpdate, results);
|
|
184
|
-
transfers.push(...newTransfers);
|
|
185
|
-
// NOTE [ToDr] recursive invocation
|
|
186
|
-
const { accumulatedReports, gasCost: seqGasCost, state, ...seqRest } = await this.accumulateSequentiallyLegacy(tryAsServiceGas(gasLimit - totalGasCost), reportsToAccumulateSequentially, slot, entropy, statistics, stateAfterParallelAcc, new Map(), yieldedRoots, transfers);
|
|
187
|
-
assertEmpty(seqRest);
|
|
188
|
-
return {
|
|
189
|
-
accumulatedReports: tryAsU32(i + accumulatedReports),
|
|
190
|
-
gasCost: tryAsServiceGas(totalGasCost + seqGasCost),
|
|
191
|
-
state,
|
|
192
|
-
};
|
|
193
|
-
}
|
|
194
160
|
/**
|
|
195
161
|
* The outer accumulation function ∆+ which transforms a gas-limit, a sequence of work-reports,
|
|
196
162
|
* an initial partial-state and a dictionary of services enjoying free accumulation,
|
|
@@ -201,7 +167,8 @@ export class Accumulate {
|
|
|
201
167
|
*/
|
|
202
168
|
async accumulateSequentially(gasLimit, reports, transfers, slot, entropy, statistics, stateUpdate, autoAccumulateServices, yieldedRoots) {
|
|
203
169
|
const i = this.findReportCutoffIndex(gasLimit, reports);
|
|
204
|
-
|
|
170
|
+
/** https://graypaper.fluffylabs.dev/#/ab2cdbd/17e50117e501?v=0.7.2 */
|
|
171
|
+
const n = transfers.length + i + autoAccumulateServices.size;
|
|
205
172
|
if (n === 0) {
|
|
206
173
|
return {
|
|
207
174
|
accumulatedReports: tryAsU32(0),
|
|
@@ -217,11 +184,11 @@ export class Accumulate {
|
|
|
217
184
|
this.updateYieldedRoots(results, yieldedRoots);
|
|
218
185
|
const { state: stateAfterParallelAcc, totalGasCost, transfers: newTransfers, } = mergePerallelAccumulationResults(this.chainSpec, this.state, stateUpdate, results);
|
|
219
186
|
/**
|
|
220
|
-
* Gas limit from transfers is added to the next round of accumulation
|
|
187
|
+
* Gas limit from transfers (from `t`, not `t*`) is added to the next round of accumulation
|
|
221
188
|
*
|
|
222
189
|
* https://graypaper.fluffylabs.dev/#/ab2cdbd/172b02172b02?v=0.7.2
|
|
223
190
|
*/
|
|
224
|
-
const transfersGas =
|
|
191
|
+
const transfersGas = transfers.map((t) => t.gas);
|
|
225
192
|
const { value: newGasLimit, overflow } = sumU64(tryAsServiceGas(gasLimit - totalGasCost), ...transfersGas);
|
|
226
193
|
// NOTE [ToDr] recursive invocation
|
|
227
194
|
const { accumulatedReports, gasCost: seqGasCost, state, ...seqRest } = await this.accumulateSequentially(tryAsServiceGas(overflow ? MAX_VALUE_U64 : newGasLimit), reportsToAccumulateSequentially, newTransfers, slot, entropy, statistics, stateAfterParallelAcc, new Map(), yieldedRoots);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"main.d.ts","sourceRoot":"","sources":["../../../../../packages/workers/block-authorship/main.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"main.d.ts","sourceRoot":"","sources":["../../../../../packages/workers/block-authorship/main.ts"],"names":[],"mappings":"AA2BA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,wBAAwB,CAAC;AAE3D,OAAO,KAAK,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAK9E,KAAK,MAAM,GAAG,YAAY,CAAC,qBAAqB,CAAC,CAAC;AAkBlD,wBAAsB,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,iBAAiB,iBAyMlE"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { setTimeout } from "node:timers/promises";
|
|
2
|
-
import { tryAsTimeSlot, tryAsValidatorIndex, } from "#@typeberry/block";
|
|
2
|
+
import { tryAsEpoch, tryAsTimeSlot, tryAsValidatorIndex, } from "#@typeberry/block";
|
|
3
3
|
import { BytesBlob } from "#@typeberry/bytes";
|
|
4
|
+
import { HashSet } from "#@typeberry/collections/hash-set.js";
|
|
4
5
|
import { initWasm } from "#@typeberry/crypto";
|
|
5
6
|
import { deriveBandersnatchPublicKey, deriveEd25519PublicKey, } from "#@typeberry/crypto/key-derivation.js";
|
|
6
7
|
import { Blake2b, keccak } from "#@typeberry/hash";
|
|
@@ -12,6 +13,7 @@ import { JAM_FALLBACK_SEAL, JAM_TICKET_SEAL } from "#@typeberry/safrole/constant
|
|
|
12
13
|
import { SafroleSealingKeysKind } from "#@typeberry/state";
|
|
13
14
|
import { asOpaqueType, assertNever, Result } from "#@typeberry/utils";
|
|
14
15
|
import { Generator } from "./generator.js";
|
|
16
|
+
import { generateTickets } from "./ticket-generator.js";
|
|
15
17
|
const logger = Logger.new(import.meta.filename, "author");
|
|
16
18
|
export async function main(config, comms) {
|
|
17
19
|
await initWasm();
|
|
@@ -88,6 +90,7 @@ export async function main(config, comms) {
|
|
|
88
90
|
}
|
|
89
91
|
const isFastForward = config.workerParams.isFastForward;
|
|
90
92
|
let lastGeneratedSlot = startTimeSlot;
|
|
93
|
+
let ticketsGeneratedForEpoch = -1;
|
|
91
94
|
while (!isFinished) {
|
|
92
95
|
const hash = blocks.getBestHeaderHash();
|
|
93
96
|
const state = states.getState(hash);
|
|
@@ -110,6 +113,33 @@ export async function main(config, comms) {
|
|
|
110
113
|
continue;
|
|
111
114
|
}
|
|
112
115
|
const isNewEpoch = isEpochChanged(lastTimeSlot, timeSlot);
|
|
116
|
+
// Generate tickets if within contest period and not yet generated for this epoch
|
|
117
|
+
const epoch = tryAsEpoch(Math.floor(timeSlot / chainSpec.epochLength));
|
|
118
|
+
const slotInEpoch = timeSlot % chainSpec.epochLength;
|
|
119
|
+
const shouldGenerateTickets = slotInEpoch < chainSpec.contestLength && ticketsGeneratedForEpoch !== epoch;
|
|
120
|
+
if (shouldGenerateTickets) {
|
|
121
|
+
const designatedValidatorData = state.designatedValidatorData;
|
|
122
|
+
const ringKeys = designatedValidatorData.map((data) => data.bandersnatch);
|
|
123
|
+
const designatedKeySet = HashSet.from(ringKeys);
|
|
124
|
+
const validatorKeys = keys
|
|
125
|
+
.filter((k) => designatedKeySet.has(k.bandersnatchPublic))
|
|
126
|
+
.map((k) => ({ secret: k.bandersnatchSecret, public: k.bandersnatchPublic }));
|
|
127
|
+
if (validatorKeys.length > 0) {
|
|
128
|
+
// If state is from the previous epoch, entropy hasn't been shifted yet (index 1).
|
|
129
|
+
// After epoch change, it has been shifted to index 2.
|
|
130
|
+
const ticketEntropy = isNewEpoch ? state.entropy[1] : state.entropy[2];
|
|
131
|
+
logger.info `Epoch ${epoch}, slot ${slotInEpoch}/${chainSpec.contestLength}. Generating tickets for ${validatorKeys.length} validators...`;
|
|
132
|
+
const ticketsResult = await generateTickets(bandersnatch, ringKeys, validatorKeys, ticketEntropy, chainSpec.ticketsPerValidator);
|
|
133
|
+
if (ticketsResult.isError) {
|
|
134
|
+
logger.warn `Failed to generate tickets for epoch ${epoch}: ${ticketsResult.error}`;
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
logger.log `Generated ${ticketsResult.ok.length} tickets for epoch ${epoch}.`;
|
|
138
|
+
// TODO [MaSi]: Sending out tickets
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
ticketsGeneratedForEpoch = epoch;
|
|
142
|
+
}
|
|
113
143
|
const selingKeySeriesResult = await getSealingKeySeries(isNewEpoch, timeSlot, state);
|
|
114
144
|
if (selingKeySeriesResult.isError) {
|
|
115
145
|
continue;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { EntropyHash } from "#@typeberry/block";
|
|
2
|
+
import { type SignedTicket } from "#@typeberry/block/tickets.js";
|
|
3
|
+
import type { BandersnatchKey, BandersnatchSecretSeed } from "#@typeberry/crypto";
|
|
4
|
+
import type { BandernsatchWasm } from "#@typeberry/safrole/bandersnatch-wasm.js";
|
|
5
|
+
import { Result } from "#@typeberry/utils";
|
|
6
|
+
export declare enum TicketGeneratorError {
|
|
7
|
+
TicketGenerationFailed = "TicketGenerationFailed",
|
|
8
|
+
ValidatorNotInRing = "ValidatorNotInRing"
|
|
9
|
+
}
|
|
10
|
+
export type ValidatorKey = {
|
|
11
|
+
secret: BandersnatchSecretSeed;
|
|
12
|
+
public: BandersnatchKey;
|
|
13
|
+
};
|
|
14
|
+
/**
|
|
15
|
+
* Generates tickets for all validator keys.
|
|
16
|
+
*
|
|
17
|
+
* Each validator key produces `ticketsPerValidator` tickets using ring VRF proofs.
|
|
18
|
+
* The ring keys define the anonymous set - only members can produce valid proofs.
|
|
19
|
+
*/
|
|
20
|
+
export declare function generateTickets(bandersnatch: BandernsatchWasm, ringKeys: BandersnatchKey[], validatorKeys: ValidatorKey[], entropy: EntropyHash, ticketsPerValidator: number): Promise<Result<SignedTicket[], TicketGeneratorError>>;
|
|
21
|
+
//# sourceMappingURL=ticket-generator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ticket-generator.d.ts","sourceRoot":"","sources":["../../../../../packages/workers/block-authorship/ticket-generator.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AACpD,OAAO,EAAE,KAAK,YAAY,EAAsB,MAAM,6BAA6B,CAAC;AACpF,OAAO,KAAK,EAAE,eAAe,EAAE,sBAAsB,EAAE,MAAM,mBAAmB,CAAC;AAGjF,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,yCAAyC,CAAC;AAChF,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAI1C,oBAAY,oBAAoB;IAC9B,sBAAsB,2BAA2B;IACjD,kBAAkB,uBAAuB;CAC1C;AAED,MAAM,MAAM,YAAY,GAAG;IACzB,MAAM,EAAE,sBAAsB,CAAC;IAC/B,MAAM,EAAE,eAAe,CAAC;CACzB,CAAC;AAEF;;;;;GAKG;AACH,wBAAsB,eAAe,CACnC,YAAY,EAAE,gBAAgB,EAC9B,QAAQ,EAAE,eAAe,EAAE,EAC3B,aAAa,EAAE,YAAY,EAAE,EAC7B,OAAO,EAAE,WAAW,EACpB,mBAAmB,EAAE,MAAM,GAC1B,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,EAAE,oBAAoB,CAAC,CAAC,CAkCvD"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { tryAsTicketAttempt } from "#@typeberry/block/tickets.js";
|
|
2
|
+
import { Logger } from "#@typeberry/logger";
|
|
3
|
+
import bandersnatchVrf from "#@typeberry/safrole/bandersnatch-vrf.js";
|
|
4
|
+
import { Result } from "#@typeberry/utils";
|
|
5
|
+
const logger = Logger.new(import.meta.filename, "tickets-generator");
|
|
6
|
+
export var TicketGeneratorError;
|
|
7
|
+
(function (TicketGeneratorError) {
|
|
8
|
+
TicketGeneratorError["TicketGenerationFailed"] = "TicketGenerationFailed";
|
|
9
|
+
TicketGeneratorError["ValidatorNotInRing"] = "ValidatorNotInRing";
|
|
10
|
+
})(TicketGeneratorError || (TicketGeneratorError = {}));
|
|
11
|
+
/**
|
|
12
|
+
* Generates tickets for all validator keys.
|
|
13
|
+
*
|
|
14
|
+
* Each validator key produces `ticketsPerValidator` tickets using ring VRF proofs.
|
|
15
|
+
* The ring keys define the anonymous set - only members can produce valid proofs.
|
|
16
|
+
*/
|
|
17
|
+
export async function generateTickets(bandersnatch, ringKeys, validatorKeys, entropy, ticketsPerValidator) {
|
|
18
|
+
const allTickets = [];
|
|
19
|
+
for (const validatorKey of validatorKeys) {
|
|
20
|
+
const proverIndex = ringKeys.findIndex((k) => k.isEqualTo(validatorKey.public));
|
|
21
|
+
if (proverIndex < 0) {
|
|
22
|
+
logger.warn `Validator public key not found in the ring, skipping ticket generation for this key`;
|
|
23
|
+
continue;
|
|
24
|
+
}
|
|
25
|
+
const ticketResult = await bandersnatchVrf.generateTickets(bandersnatch, ringKeys, proverIndex, validatorKey.secret, entropy, tryAsTicketAttempt(ticketsPerValidator));
|
|
26
|
+
if (ticketResult.isOk) {
|
|
27
|
+
allTickets.push(...ticketResult.ok);
|
|
28
|
+
}
|
|
29
|
+
else {
|
|
30
|
+
logger.warn `Failed to generate tickets for validator, skipping`;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
if (validatorKeys.length > 0 && allTickets.length === 0) {
|
|
34
|
+
return Result.error(TicketGeneratorError.TicketGenerationFailed, () => "Failed to generate tickets for all validators");
|
|
35
|
+
}
|
|
36
|
+
return Result.ok(allTickets);
|
|
37
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ticket-generator.test.d.ts","sourceRoot":"","sources":["../../../../../packages/workers/block-authorship/ticket-generator.test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import assert from "node:assert";
|
|
2
|
+
import { afterEach, beforeEach, describe, it, mock } from "node:test";
|
|
3
|
+
import { tryAsTicketAttempt } from "#@typeberry/block/tickets.js";
|
|
4
|
+
import { Bytes } from "#@typeberry/bytes";
|
|
5
|
+
import { BANDERSNATCH_KEY_BYTES, initWasm, SEED_SIZE } from "#@typeberry/crypto";
|
|
6
|
+
import { HASH_SIZE } from "#@typeberry/hash";
|
|
7
|
+
import bandersnatchVrf from "#@typeberry/safrole/bandersnatch-vrf.js";
|
|
8
|
+
import { Result } from "#@typeberry/utils";
|
|
9
|
+
import { generateTickets, TicketGeneratorError } from "./ticket-generator.js";
|
|
10
|
+
const MOCK_ENTROPY = Bytes.zero(HASH_SIZE).asOpaque();
|
|
11
|
+
const MOCK_BANDERSNATCH = {};
|
|
12
|
+
function createMockRingKeys(count) {
|
|
13
|
+
return Array.from({ length: count }, (_, i) => Bytes.fill(BANDERSNATCH_KEY_BYTES, i).asOpaque());
|
|
14
|
+
}
|
|
15
|
+
function createMockValidatorKeys(count) {
|
|
16
|
+
return Array.from({ length: count }, (_, i) => ({
|
|
17
|
+
secret: Bytes.fill(SEED_SIZE, i).asOpaque(),
|
|
18
|
+
public: Bytes.fill(BANDERSNATCH_KEY_BYTES, i).asOpaque(),
|
|
19
|
+
}));
|
|
20
|
+
}
|
|
21
|
+
describe("Ticket Generator", () => {
|
|
22
|
+
beforeEach(async () => {
|
|
23
|
+
await initWasm();
|
|
24
|
+
mock.method(bandersnatchVrf, "generateTickets", async (_bandersnatch, _ringKeys, _proverIndex, _key, _entropy, ticketsPerValidator) => {
|
|
25
|
+
const tickets = [];
|
|
26
|
+
for (let attempt = 0; attempt < ticketsPerValidator; attempt++) {
|
|
27
|
+
tickets.push({
|
|
28
|
+
attempt: tryAsTicketAttempt(attempt),
|
|
29
|
+
signature: Bytes.zero(784).asOpaque(),
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
return Result.ok(tickets);
|
|
33
|
+
});
|
|
34
|
+
});
|
|
35
|
+
afterEach(() => {
|
|
36
|
+
mock.restoreAll();
|
|
37
|
+
});
|
|
38
|
+
describe("generateTickets", () => {
|
|
39
|
+
it("should generate correct total number of tickets", async () => {
|
|
40
|
+
const ringKeys = createMockRingKeys(3);
|
|
41
|
+
const validatorKeys = createMockValidatorKeys(3);
|
|
42
|
+
const ticketsPerValidator = 2;
|
|
43
|
+
const result = await generateTickets(MOCK_BANDERSNATCH, ringKeys, validatorKeys, MOCK_ENTROPY, ticketsPerValidator);
|
|
44
|
+
assert.ok(result.isOk);
|
|
45
|
+
assert.strictEqual(result.ok.length, 6);
|
|
46
|
+
});
|
|
47
|
+
it("should generate tickets with correct attempt values", async () => {
|
|
48
|
+
const ringKeys = createMockRingKeys(2);
|
|
49
|
+
const validatorKeys = createMockValidatorKeys(2);
|
|
50
|
+
const ticketsPerValidator = 2;
|
|
51
|
+
const result = await generateTickets(MOCK_BANDERSNATCH, ringKeys, validatorKeys, MOCK_ENTROPY, ticketsPerValidator);
|
|
52
|
+
assert.ok(result.isOk);
|
|
53
|
+
const tickets = result.ok;
|
|
54
|
+
assert.strictEqual(tickets[0].attempt, tryAsTicketAttempt(0));
|
|
55
|
+
assert.strictEqual(tickets[1].attempt, tryAsTicketAttempt(1));
|
|
56
|
+
assert.strictEqual(tickets[2].attempt, tryAsTicketAttempt(0));
|
|
57
|
+
assert.strictEqual(tickets[3].attempt, tryAsTicketAttempt(1));
|
|
58
|
+
});
|
|
59
|
+
it("should return empty array for no validator keys", async () => {
|
|
60
|
+
const ringKeys = createMockRingKeys(3);
|
|
61
|
+
const ticketsPerValidator = 2;
|
|
62
|
+
const result = await generateTickets(MOCK_BANDERSNATCH, ringKeys, [], MOCK_ENTROPY, ticketsPerValidator);
|
|
63
|
+
assert.ok(result.isOk);
|
|
64
|
+
assert.strictEqual(result.ok.length, 0);
|
|
65
|
+
});
|
|
66
|
+
it("should skip validators not in the ring and return tickets for valid ones", async () => {
|
|
67
|
+
const ticketsPerValidator = 2;
|
|
68
|
+
const ringKeys = createMockRingKeys(3);
|
|
69
|
+
const correctValidatorKeys = createMockValidatorKeys(2);
|
|
70
|
+
const incorrectValidatorKeys = [
|
|
71
|
+
{
|
|
72
|
+
secret: Bytes.fill(SEED_SIZE, 99).asOpaque(),
|
|
73
|
+
public: Bytes.fill(BANDERSNATCH_KEY_BYTES, 99).asOpaque(),
|
|
74
|
+
},
|
|
75
|
+
];
|
|
76
|
+
const validatorKeys = [...correctValidatorKeys, ...incorrectValidatorKeys];
|
|
77
|
+
const result = await generateTickets(MOCK_BANDERSNATCH, ringKeys, validatorKeys, MOCK_ENTROPY, ticketsPerValidator);
|
|
78
|
+
assert.ok(result.isOk);
|
|
79
|
+
// Only the 2 valid validators should produce tickets
|
|
80
|
+
assert.strictEqual(result.ok.length, 4);
|
|
81
|
+
});
|
|
82
|
+
it("should error when all validators fail", async () => {
|
|
83
|
+
const ticketsPerValidator = 2;
|
|
84
|
+
const ringKeys = createMockRingKeys(3);
|
|
85
|
+
const invalidValidatorKeys = [
|
|
86
|
+
{
|
|
87
|
+
secret: Bytes.fill(SEED_SIZE, 99).asOpaque(),
|
|
88
|
+
public: Bytes.fill(BANDERSNATCH_KEY_BYTES, 99).asOpaque(),
|
|
89
|
+
},
|
|
90
|
+
{
|
|
91
|
+
secret: Bytes.fill(SEED_SIZE, 98).asOpaque(),
|
|
92
|
+
public: Bytes.fill(BANDERSNATCH_KEY_BYTES, 98).asOpaque(),
|
|
93
|
+
},
|
|
94
|
+
];
|
|
95
|
+
const result = await generateTickets(MOCK_BANDERSNATCH, ringKeys, invalidValidatorKeys, MOCK_ENTROPY, ticketsPerValidator);
|
|
96
|
+
assert.ok(result.isError);
|
|
97
|
+
assert.strictEqual(result.error, TicketGeneratorError.TicketGenerationFailed);
|
|
98
|
+
});
|
|
99
|
+
});
|
|
100
|
+
});
|