@noble/curves 2.0.1 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +214 -122
- package/abstract/bls.d.ts +299 -16
- package/abstract/bls.d.ts.map +1 -1
- package/abstract/bls.js +82 -22
- package/abstract/bls.js.map +1 -1
- package/abstract/curve.d.ts +274 -27
- package/abstract/curve.d.ts.map +1 -1
- package/abstract/curve.js +177 -23
- package/abstract/curve.js.map +1 -1
- package/abstract/edwards.d.ts +166 -30
- package/abstract/edwards.d.ts.map +1 -1
- package/abstract/edwards.js +221 -86
- package/abstract/edwards.js.map +1 -1
- package/abstract/fft.d.ts +322 -10
- package/abstract/fft.d.ts.map +1 -1
- package/abstract/fft.js +154 -12
- package/abstract/fft.js.map +1 -1
- package/abstract/frost.d.ts +293 -0
- package/abstract/frost.d.ts.map +1 -0
- package/abstract/frost.js +704 -0
- package/abstract/frost.js.map +1 -0
- package/abstract/hash-to-curve.d.ts +173 -24
- package/abstract/hash-to-curve.d.ts.map +1 -1
- package/abstract/hash-to-curve.js +170 -31
- package/abstract/hash-to-curve.js.map +1 -1
- package/abstract/modular.d.ts +429 -37
- package/abstract/modular.d.ts.map +1 -1
- package/abstract/modular.js +414 -119
- package/abstract/modular.js.map +1 -1
- package/abstract/montgomery.d.ts +83 -12
- package/abstract/montgomery.d.ts.map +1 -1
- package/abstract/montgomery.js +32 -7
- package/abstract/montgomery.js.map +1 -1
- package/abstract/oprf.d.ts +164 -91
- package/abstract/oprf.d.ts.map +1 -1
- package/abstract/oprf.js +88 -29
- package/abstract/oprf.js.map +1 -1
- package/abstract/poseidon.d.ts +138 -7
- package/abstract/poseidon.d.ts.map +1 -1
- package/abstract/poseidon.js +178 -15
- package/abstract/poseidon.js.map +1 -1
- package/abstract/tower.d.ts +122 -3
- package/abstract/tower.d.ts.map +1 -1
- package/abstract/tower.js +323 -139
- package/abstract/tower.js.map +1 -1
- package/abstract/weierstrass.d.ts +339 -76
- package/abstract/weierstrass.d.ts.map +1 -1
- package/abstract/weierstrass.js +395 -205
- package/abstract/weierstrass.js.map +1 -1
- package/bls12-381.d.ts +16 -2
- package/bls12-381.d.ts.map +1 -1
- package/bls12-381.js +199 -209
- package/bls12-381.js.map +1 -1
- package/bn254.d.ts +11 -2
- package/bn254.d.ts.map +1 -1
- package/bn254.js +93 -38
- package/bn254.js.map +1 -1
- package/ed25519.d.ts +125 -14
- package/ed25519.d.ts.map +1 -1
- package/ed25519.js +202 -40
- package/ed25519.js.map +1 -1
- package/ed448.d.ts +108 -14
- package/ed448.d.ts.map +1 -1
- package/ed448.js +194 -42
- package/ed448.js.map +1 -1
- package/index.js +7 -1
- package/index.js.map +1 -1
- package/misc.d.ts +106 -7
- package/misc.d.ts.map +1 -1
- package/misc.js +141 -32
- package/misc.js.map +1 -1
- package/nist.d.ts +112 -11
- package/nist.d.ts.map +1 -1
- package/nist.js +139 -17
- package/nist.js.map +1 -1
- package/package.json +11 -6
- package/secp256k1.d.ts +92 -15
- package/secp256k1.d.ts.map +1 -1
- package/secp256k1.js +211 -28
- package/secp256k1.js.map +1 -1
- package/src/abstract/bls.ts +350 -67
- package/src/abstract/curve.ts +327 -44
- package/src/abstract/edwards.ts +367 -143
- package/src/abstract/fft.ts +369 -36
- package/src/abstract/frost.ts +1092 -0
- package/src/abstract/hash-to-curve.ts +255 -56
- package/src/abstract/modular.ts +591 -144
- package/src/abstract/montgomery.ts +114 -30
- package/src/abstract/oprf.ts +383 -194
- package/src/abstract/poseidon.ts +235 -35
- package/src/abstract/tower.ts +428 -159
- package/src/abstract/weierstrass.ts +710 -312
- package/src/bls12-381.ts +239 -236
- package/src/bn254.ts +107 -46
- package/src/ed25519.ts +227 -55
- package/src/ed448.ts +227 -57
- package/src/index.ts +7 -1
- package/src/misc.ts +154 -35
- package/src/nist.ts +143 -20
- package/src/secp256k1.ts +284 -41
- package/src/utils.ts +583 -81
- package/src/webcrypto.ts +302 -73
- package/utils.d.ts +457 -24
- package/utils.d.ts.map +1 -1
- package/utils.js +410 -53
- package/utils.js.map +1 -1
- package/webcrypto.d.ts +167 -25
- package/webcrypto.d.ts.map +1 -1
- package/webcrypto.js +165 -58
- package/webcrypto.js.map +1 -1
|
@@ -0,0 +1,704 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FROST: Flexible Round-Optimized Schnorr Threshold Protocol for Two-Round Schnorr Signatures.
|
|
3
|
+
*
|
|
4
|
+
* See [RFC 9591](https://datatracker.ietf.org/doc/rfc9591/) and [frost.zfnd.org](https://frost.zfnd.org).
|
|
5
|
+
* @module
|
|
6
|
+
*/
|
|
7
|
+
import { utf8ToBytes } from '@noble/hashes/utils.js';
|
|
8
|
+
import { bytesToHex, bytesToNumberBE, bytesToNumberLE, concatBytes, hexToBytes, randomBytes, validateObject, } from "../utils.js";
|
|
9
|
+
import { pippenger, validatePointCons } from "./curve.js";
|
|
10
|
+
import { poly } from "./fft.js";
|
|
11
|
+
import {} from "./hash-to-curve.js";
|
|
12
|
+
import { getMinHashLength, mapHashToField } from "./modular.js";
|
|
13
|
+
// PubKey = commitments, verifyingShares
|
|
14
|
+
// PrivKey = id, signingShare, commitment
|
|
15
|
+
const validateSigners = (signers) => {
|
|
16
|
+
if (!Number.isSafeInteger(signers.min) || !Number.isSafeInteger(signers.max))
|
|
17
|
+
throw new Error('Wrong signers info: min=' + signers.min + ' max=' + signers.max);
|
|
18
|
+
// Compatibility with frost-rs intentionally narrows RFC 9591's positive-nonzero threshold rule
|
|
19
|
+
// to `min >= 2`, even though the RFC text itself allows `MIN_PARTICIPANTS = 1`.
|
|
20
|
+
// This API is for actual threshold signing across participants; 1-of-n degenerates to ordinary
|
|
21
|
+
// single-signer mode, which does not need FROST's network/coordination machinery at all.
|
|
22
|
+
if (signers.min < 2 || signers.max < 2 || signers.min > signers.max)
|
|
23
|
+
throw new Error('Wrong signers info: min=' + signers.min + ' max=' + signers.max);
|
|
24
|
+
};
|
|
25
|
+
const validateCommitmentsNum = (signers, len) => {
|
|
26
|
+
// RFC 9591 Sections 5.2/5.3 require MIN_PARTICIPANTS <= NUM_PARTICIPANTS <= MAX_PARTICIPANTS.
|
|
27
|
+
if (len < signers.min || len > signers.max)
|
|
28
|
+
throw new Error('Wrong number of commitments=' + len);
|
|
29
|
+
};
|
|
30
|
+
class AggErr extends Error {
|
|
31
|
+
// Empty means aggregation failed before per-share verification could attribute a signer.
|
|
32
|
+
cheaters;
|
|
33
|
+
constructor(msg, cheaters) {
|
|
34
|
+
super(msg);
|
|
35
|
+
this.cheaters = cheaters;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
export function createFROST(opts) {
|
|
39
|
+
validateObject(opts, {
|
|
40
|
+
name: 'string',
|
|
41
|
+
hash: 'function',
|
|
42
|
+
}, {
|
|
43
|
+
hashToScalar: 'function',
|
|
44
|
+
validatePoint: 'function',
|
|
45
|
+
parsePublicKey: 'function',
|
|
46
|
+
adjustScalar: 'function',
|
|
47
|
+
adjustPoint: 'function',
|
|
48
|
+
challenge: 'function',
|
|
49
|
+
adjustNonces: 'function',
|
|
50
|
+
adjustSecret: 'function',
|
|
51
|
+
adjustPublic: 'function',
|
|
52
|
+
adjustGroupCommitmentShare: 'function',
|
|
53
|
+
adjustDKG: 'function',
|
|
54
|
+
});
|
|
55
|
+
// Cheap constructor-surface sanity check only: this verifies the generic static hooks/fields that
|
|
56
|
+
// FROST consumes, but it does not certify point semantics like BASE/ZERO correctness.
|
|
57
|
+
validatePointCons(opts.Point);
|
|
58
|
+
const { Point } = opts;
|
|
59
|
+
const Fn = opts.Fn === undefined ? Point.Fn : opts.Fn;
|
|
60
|
+
// Hashes
|
|
61
|
+
const hashBytes = opts.hash;
|
|
62
|
+
const hashToScalar = opts.hashToScalar === undefined
|
|
63
|
+
? (msg, opts = { DST: new Uint8Array() }) => {
|
|
64
|
+
const t = hashBytes(concatBytes(opts.DST, msg));
|
|
65
|
+
return Fn.create(Fn.isLE ? bytesToNumberLE(t) : bytesToNumberBE(t));
|
|
66
|
+
}
|
|
67
|
+
: opts.hashToScalar;
|
|
68
|
+
const H1Prefix = utf8ToBytes(opts.H1 !== undefined ? opts.H1 : opts.name + 'rho');
|
|
69
|
+
const H2Prefix = utf8ToBytes(opts.H2 !== undefined ? opts.H2 : opts.name + 'chal');
|
|
70
|
+
const H3Prefix = utf8ToBytes(opts.H3 !== undefined ? opts.H3 : opts.name + 'nonce');
|
|
71
|
+
const H4Prefix = utf8ToBytes(opts.H4 !== undefined ? opts.H4 : opts.name + 'msg');
|
|
72
|
+
const H5Prefix = utf8ToBytes(opts.H5 !== undefined ? opts.H5 : opts.name + 'com');
|
|
73
|
+
const HDKGPrefix = utf8ToBytes(opts.HDKG !== undefined ? opts.HDKG : opts.name + 'dkg');
|
|
74
|
+
const HIDPrefix = utf8ToBytes(opts.HID !== undefined ? opts.HID : opts.name + 'id');
|
|
75
|
+
const H1 = (msg) => hashToScalar(msg, { DST: H1Prefix });
|
|
76
|
+
// Empty H2 still passes `{ DST: new Uint8Array() }` into custom hashToScalar hooks.
|
|
77
|
+
// The built-in fallback hashes that identically to omitted DST, which is how
|
|
78
|
+
// the Ed25519 suite models RFC 9591's undecorated H2 challenge hash.
|
|
79
|
+
const H2 = (msg) => hashToScalar(msg, { DST: H2Prefix });
|
|
80
|
+
const H3 = (msg) => hashToScalar(msg, { DST: H3Prefix });
|
|
81
|
+
const H4 = (msg) => hashBytes(concatBytes(H4Prefix, msg));
|
|
82
|
+
const H5 = (msg) => hashBytes(concatBytes(H5Prefix, msg));
|
|
83
|
+
const HDKG = (msg) => hashToScalar(msg, { DST: HDKGPrefix });
|
|
84
|
+
const HID = (msg) => hashToScalar(msg, { DST: HIDPrefix });
|
|
85
|
+
// /Hashes
|
|
86
|
+
const randomScalar = (rng = randomBytes) => {
|
|
87
|
+
// Intentional divergence from RFC 9591 §4.1 / §5.1: the RFC nonce_generate helper outputs a
|
|
88
|
+
// Scalar in [0, p-1], but round-one commit publishes ScalarBaseMult(nonce) values and §3.1
|
|
89
|
+
// requires SerializeElement / DeserializeElement to reject the identity element. Keep noble's
|
|
90
|
+
// mapHashToField generation here so round-one public nonce commitments stay in 1..n-1.
|
|
91
|
+
const t = mapHashToField(rng(getMinHashLength(Fn.ORDER)), Fn.ORDER, Fn.isLE);
|
|
92
|
+
// We cannot use Fn.fromBytes here because the field can have a different
|
|
93
|
+
// byte width, like ed448.
|
|
94
|
+
return Fn.isLE ? bytesToNumberLE(t) : bytesToNumberBE(t);
|
|
95
|
+
};
|
|
96
|
+
const serializePoint = (p) => p.toBytes();
|
|
97
|
+
const parsePoint = (bytes) => {
|
|
98
|
+
// RFC 9591 Section 3.1 requires DeserializeElement validation. Suite-specific validatePoint
|
|
99
|
+
// hooks tighten this further for ciphersuites in Section 6. Bare createFROST(...) only gets
|
|
100
|
+
// canonical point decoding unless the caller installs those extra subgroup / identity checks.
|
|
101
|
+
const p = Point.fromBytes(bytes);
|
|
102
|
+
if (opts.validatePoint)
|
|
103
|
+
opts.validatePoint(p);
|
|
104
|
+
return p;
|
|
105
|
+
};
|
|
106
|
+
// RFC 9591 Sections 4.1/5.1 model each participant's round-one output as two public commitments.
|
|
107
|
+
const nonceCommitments = (identifier, nonces) => ({
|
|
108
|
+
identifier,
|
|
109
|
+
hiding: serializePoint(Point.BASE.multiply(Fn.fromBytes(nonces.hiding))),
|
|
110
|
+
binding: serializePoint(Point.BASE.multiply(Fn.fromBytes(nonces.binding))),
|
|
111
|
+
});
|
|
112
|
+
const adjustPoint = opts.adjustPoint === undefined ? (n) => n : opts.adjustPoint;
|
|
113
|
+
// We use hex to make it easier to use inside objects
|
|
114
|
+
const validateIdentifier = (n) => {
|
|
115
|
+
// Identifiers are canonical non-zero scalars. Custom / derived identifiers are allowed, so this
|
|
116
|
+
// is intentionally not bounded by the current signers.max slot count.
|
|
117
|
+
if (!Fn.isValid(n) || Fn.is0(n))
|
|
118
|
+
throw new Error('Invalid identifier ' + n);
|
|
119
|
+
return n;
|
|
120
|
+
};
|
|
121
|
+
const serializeIdentifier = (id) => bytesToHex(Fn.toBytes(validateIdentifier(id)));
|
|
122
|
+
const parseIdentifier = (id) => {
|
|
123
|
+
const n = validateIdentifier(Fn.fromBytes(hexToBytes(id)));
|
|
124
|
+
// Keep string-keyed maps stable by accepting only the canonical serialized form.
|
|
125
|
+
if (serializeIdentifier(n) !== id)
|
|
126
|
+
throw new Error('expected canonical identifier hex');
|
|
127
|
+
return n;
|
|
128
|
+
};
|
|
129
|
+
const Signature = {
|
|
130
|
+
// RFC 9591 Appendix A encodes signatures canonically as
|
|
131
|
+
// SerializeElement(R) || SerializeScalar(z).
|
|
132
|
+
encode: (R, z) => {
|
|
133
|
+
let res = concatBytes(serializePoint(R), Fn.toBytes(z));
|
|
134
|
+
if (opts.adjustTx)
|
|
135
|
+
res = opts.adjustTx.encode(res);
|
|
136
|
+
return res;
|
|
137
|
+
},
|
|
138
|
+
decode: (sig) => {
|
|
139
|
+
if (opts.adjustTx)
|
|
140
|
+
sig = opts.adjustTx.decode(sig);
|
|
141
|
+
// We don't know size of point, but we know size of scalar
|
|
142
|
+
const R = parsePoint(sig.subarray(0, -Fn.BYTES));
|
|
143
|
+
const z = Fn.fromBytes(sig.subarray(-Fn.BYTES));
|
|
144
|
+
return { R, z };
|
|
145
|
+
},
|
|
146
|
+
};
|
|
147
|
+
// Generates pair of (scalar, point)
|
|
148
|
+
const genPointScalarPair = (rng = randomBytes) => {
|
|
149
|
+
let n = randomScalar(rng);
|
|
150
|
+
if (opts.adjustScalar)
|
|
151
|
+
n = opts.adjustScalar(n);
|
|
152
|
+
let p = Point.BASE.multiply(n);
|
|
153
|
+
return { scalar: n, point: p };
|
|
154
|
+
};
|
|
155
|
+
// No roots here: root-based methods will throw.
|
|
156
|
+
// `poly` expects a structured roots-of-unity domain, but FROST uses an
|
|
157
|
+
// arbitrary domain and only needs the non-root operations below.
|
|
158
|
+
const nrErr = 'roots are unavailable in FROST polynomial mode';
|
|
159
|
+
const noRoots = {
|
|
160
|
+
info: { G: Fn.ZERO, oddFactor: Fn.ZERO, powerOfTwo: 0 },
|
|
161
|
+
roots() {
|
|
162
|
+
throw new Error(nrErr);
|
|
163
|
+
},
|
|
164
|
+
brp() {
|
|
165
|
+
throw new Error(nrErr);
|
|
166
|
+
},
|
|
167
|
+
inverse() {
|
|
168
|
+
throw new Error(nrErr);
|
|
169
|
+
},
|
|
170
|
+
omega() {
|
|
171
|
+
throw new Error(nrErr);
|
|
172
|
+
},
|
|
173
|
+
clear() { },
|
|
174
|
+
};
|
|
175
|
+
const Poly = poly(Fn, noRoots);
|
|
176
|
+
const msm = (points, scalars) => pippenger(Point, points, scalars);
|
|
177
|
+
// Internal stuff uses bigints & Points, external Uint8Arrays
|
|
178
|
+
const polynomialEvaluate = (x, coeffs) => {
|
|
179
|
+
if (!coeffs.length)
|
|
180
|
+
throw new Error('empty coefficients');
|
|
181
|
+
return Poly.monomial.eval(coeffs, x);
|
|
182
|
+
};
|
|
183
|
+
const deriveInterpolatingValue = (L, xi) => {
|
|
184
|
+
const err = 'invalid parameters';
|
|
185
|
+
// Generates lagrange coefficient
|
|
186
|
+
if (!L.some((x) => Fn.eql(x, xi)))
|
|
187
|
+
throw new Error(err);
|
|
188
|
+
// Throws error if any x-coordinate is represented more than once in L.
|
|
189
|
+
const Lset = new Set(L);
|
|
190
|
+
if (Lset.size !== L.length)
|
|
191
|
+
throw new Error(err);
|
|
192
|
+
// Or if xi is missing
|
|
193
|
+
if (!Lset.has(xi))
|
|
194
|
+
throw new Error(err);
|
|
195
|
+
let num = Fn.ONE;
|
|
196
|
+
let den = Fn.ONE;
|
|
197
|
+
for (const x of L) {
|
|
198
|
+
if (Fn.eql(x, xi))
|
|
199
|
+
continue;
|
|
200
|
+
num = Fn.mul(num, x); // num *= x
|
|
201
|
+
den = Fn.mul(den, Fn.sub(x, xi)); // RFC 9591 §4.2: denominator *= x_j - x_i
|
|
202
|
+
}
|
|
203
|
+
return Fn.div(num, den);
|
|
204
|
+
};
|
|
205
|
+
const evalutateVSS = (identifier, commitment) => {
|
|
206
|
+
// RFC 9591 Appendix C.2: S_i' = Σ_j ScalarMult(vss_commitment[j], i^j).
|
|
207
|
+
const monomial = Poly.monomial.basis(identifier, commitment.length);
|
|
208
|
+
return msm(commitment, monomial);
|
|
209
|
+
};
|
|
210
|
+
// High-level internal stuff
|
|
211
|
+
const generateSecretPolynomial = (signers, secret, coeffs, rng = randomBytes) => {
|
|
212
|
+
validateSigners(signers);
|
|
213
|
+
// Dealer/DKG polynomial sampling reuses the same hardened scalar derivation as round-one
|
|
214
|
+
// nonces: overriding `rng` only swaps the entropy source, not the non-zero `1..n-1` policy.
|
|
215
|
+
const secretScalar = secret === undefined ? randomScalar(rng) : Fn.fromBytes(secret);
|
|
216
|
+
if (!coeffs) {
|
|
217
|
+
coeffs = [];
|
|
218
|
+
for (let i = 0; i < signers.min - 1; i++)
|
|
219
|
+
coeffs.push(randomScalar(rng));
|
|
220
|
+
}
|
|
221
|
+
if (coeffs.length !== signers.min - 1)
|
|
222
|
+
throw new Error('wrong coefficients length');
|
|
223
|
+
const coefficients = [secretScalar, ...coeffs];
|
|
224
|
+
// RFC 9591 Appendix C.2 commits to every polynomial coefficient with ScalarBaseMult.
|
|
225
|
+
const commitment = coefficients.map((i) => Point.BASE.multiply(i));
|
|
226
|
+
return { coefficients, commitment, secret: secretScalar };
|
|
227
|
+
};
|
|
228
|
+
// Pretty much sign+verify, same as basic
|
|
229
|
+
const ProofOfKnowledge = {
|
|
230
|
+
challenge: (id, verKey, R) => HDKG(concatBytes(Fn.toBytes(id), serializePoint(verKey), serializePoint(R))),
|
|
231
|
+
compute(id, coefficents, commitments, rng = randomBytes) {
|
|
232
|
+
if (coefficents.length < 1)
|
|
233
|
+
throw new Error('coefficients should have at least one element');
|
|
234
|
+
const { point: R, scalar: k } = genPointScalarPair(rng);
|
|
235
|
+
const verKey = commitments[0]; // verify key is first one
|
|
236
|
+
const c = this.challenge(id, verKey, R);
|
|
237
|
+
const mu = Fn.add(k, Fn.mul(coefficents[0], c)); // mu = k + coeff[0] * c
|
|
238
|
+
return Signature.encode(R, mu);
|
|
239
|
+
},
|
|
240
|
+
validate(id, commitment, proof) {
|
|
241
|
+
if (commitment.length < 1)
|
|
242
|
+
throw new Error('commitment should have at least one element');
|
|
243
|
+
const { R, z } = Signature.decode(proof);
|
|
244
|
+
const phi = parsePoint(commitment[0]);
|
|
245
|
+
const c = this.challenge(id, phi, R);
|
|
246
|
+
// R === z*G - phi*c
|
|
247
|
+
if (!R.equals(Point.BASE.multiply(z).subtract(phi.multiply(c))))
|
|
248
|
+
throw new Error('invalid proof of knowledge');
|
|
249
|
+
},
|
|
250
|
+
};
|
|
251
|
+
const Basic = {
|
|
252
|
+
challenge: (R, PK, msg) => {
|
|
253
|
+
if (opts.challenge)
|
|
254
|
+
return opts.challenge(R, PK, msg);
|
|
255
|
+
return H2(concatBytes(serializePoint(R), serializePoint(PK), msg));
|
|
256
|
+
},
|
|
257
|
+
sign(msg, sk, rng = randomBytes) {
|
|
258
|
+
const { point: R, scalar: r } = genPointScalarPair(rng);
|
|
259
|
+
const PK = Point.BASE.multiply(sk); // sk*G
|
|
260
|
+
const c = this.challenge(R, PK, msg);
|
|
261
|
+
const z = Fn.add(r, Fn.mul(c, sk)); // r + c * sk
|
|
262
|
+
return [R, z];
|
|
263
|
+
},
|
|
264
|
+
verify(msg, R, z, PK) {
|
|
265
|
+
if (opts.adjustPoint)
|
|
266
|
+
PK = opts.adjustPoint(PK);
|
|
267
|
+
if (opts.adjustPoint)
|
|
268
|
+
R = opts.adjustPoint(R);
|
|
269
|
+
const c = this.challenge(R, PK, msg);
|
|
270
|
+
const zB = Point.BASE.multiply(z); // z*G
|
|
271
|
+
const cA = PK.multiply(c); // c*PK
|
|
272
|
+
let check = zB.subtract(cA).subtract(R); // zB - cA - R
|
|
273
|
+
// No clearCoffactor on ristretto
|
|
274
|
+
if (check.clearCofactor)
|
|
275
|
+
check = check.clearCofactor();
|
|
276
|
+
return Point.ZERO.equals(check);
|
|
277
|
+
},
|
|
278
|
+
};
|
|
279
|
+
// === vssVerify
|
|
280
|
+
const validateSecretShare = (identifier, commitment, signingShare) => {
|
|
281
|
+
// RFC 9591 Appendix C.2 `vss_verify(share_i, vss_commitment)` is purely algebraic.
|
|
282
|
+
// Public FROST packages still go through Section 3.1 element encoding,
|
|
283
|
+
// which rejects identity points, so a zero share or commitment does not
|
|
284
|
+
// become valid wire data just because VSS matches.
|
|
285
|
+
if (!Point.BASE.multiply(signingShare).equals(evalutateVSS(identifier, commitment)))
|
|
286
|
+
throw new Error('invalid secret share');
|
|
287
|
+
};
|
|
288
|
+
const Identifier = {
|
|
289
|
+
fromNumber(n) {
|
|
290
|
+
if (!Number.isSafeInteger(n))
|
|
291
|
+
throw new Error('expected safe interger');
|
|
292
|
+
return serializeIdentifier(BigInt(n));
|
|
293
|
+
},
|
|
294
|
+
// Not in spec, but in FROST implementation,
|
|
295
|
+
// seems useful and nice, no need to sync identifiers (would require more interactions)
|
|
296
|
+
derive(s) {
|
|
297
|
+
if (typeof s !== 'string')
|
|
298
|
+
throw new Error('wrong identifier string: ' + s);
|
|
299
|
+
// Derived identifiers may land anywhere in the scalar field; they are not restricted to
|
|
300
|
+
// sequential `1..max_signers` values.
|
|
301
|
+
return serializeIdentifier(HID(utf8ToBytes(s)));
|
|
302
|
+
},
|
|
303
|
+
};
|
|
304
|
+
// RFC 9591 §4.1: nonce_generate() hashes 32 fresh RNG bytes with SerializeScalar(secret).
|
|
305
|
+
const generateNonce = (secret, rng = randomBytes) => H3(concatBytes(rng(32), Fn.toBytes(secret)));
|
|
306
|
+
const getGroupCommitment = (GPK, commitmentList, msg) => {
|
|
307
|
+
const CL = commitmentList.map((i) => [
|
|
308
|
+
i.identifier,
|
|
309
|
+
parseIdentifier(i.identifier),
|
|
310
|
+
parsePoint(i.hiding),
|
|
311
|
+
parsePoint(i.binding),
|
|
312
|
+
]);
|
|
313
|
+
// RFC 9591 Sections 4.3/4.4/4.5 and 5.2/5.3 treat commitment_list as sorted by identifier.
|
|
314
|
+
CL.sort((a, b) => (a[1] < b[1] ? -1 : a[1] > b[1] ? 1 : 0));
|
|
315
|
+
// Encode commitment list
|
|
316
|
+
const Cbytes = [];
|
|
317
|
+
for (const [_, id, hC, bC] of CL)
|
|
318
|
+
Cbytes.push(Fn.toBytes(id), serializePoint(hC), serializePoint(bC));
|
|
319
|
+
const encodedCommitmentHash = H5(concatBytes(...Cbytes));
|
|
320
|
+
const rhoPrefix = concatBytes(serializePoint(GPK), H4(msg), encodedCommitmentHash);
|
|
321
|
+
// Compute binding factors
|
|
322
|
+
const bindingFactors = {};
|
|
323
|
+
for (const [i, id] of CL) {
|
|
324
|
+
bindingFactors[i] = H1(concatBytes(rhoPrefix, Fn.toBytes(id)));
|
|
325
|
+
}
|
|
326
|
+
const points = [];
|
|
327
|
+
const scalars = [];
|
|
328
|
+
for (const [i, _, hC, bC] of CL) {
|
|
329
|
+
if (Point.ZERO.equals(hC) || Point.ZERO.equals(bC))
|
|
330
|
+
throw new Error('infinity commitment');
|
|
331
|
+
points.push(hC, bC);
|
|
332
|
+
scalars.push(Fn.ONE, bindingFactors[i]);
|
|
333
|
+
}
|
|
334
|
+
const groupCommitment = msm(points, scalars); // GC += hC + bC*bindingFactor
|
|
335
|
+
const identifiers = CL.map((i) => i[1]);
|
|
336
|
+
return { identifiers, groupCommitment, bindingFactors };
|
|
337
|
+
};
|
|
338
|
+
const prepareShare = (PK, commitmentList, msg, identifier) => {
|
|
339
|
+
// RFC 9591 Sections 4.4/4.5/4.6 feed directly into the Section 5.2 signer computation.
|
|
340
|
+
const GPK = adjustPoint(parsePoint(PK));
|
|
341
|
+
const id = parseIdentifier(identifier);
|
|
342
|
+
const { identifiers, groupCommitment, bindingFactors } = getGroupCommitment(GPK, commitmentList, msg);
|
|
343
|
+
const bindingFactor = bindingFactors[identifier];
|
|
344
|
+
const lambda = deriveInterpolatingValue(identifiers, id);
|
|
345
|
+
const challenge = Basic.challenge(groupCommitment, GPK, msg);
|
|
346
|
+
return { lambda, challenge, bindingFactor, groupCommitment };
|
|
347
|
+
};
|
|
348
|
+
Object.freeze(Identifier);
|
|
349
|
+
const frost = {
|
|
350
|
+
Identifier,
|
|
351
|
+
// DKG is Distributed Key Generation, not Trusted Dealer Key Generation.
|
|
352
|
+
DKG: Object.freeze({
|
|
353
|
+
// NOTE: we allow to pass secret scalar from user side,
|
|
354
|
+
// this way it can be derived, instead of random generation
|
|
355
|
+
round1: (id, signers, secret, rng = randomBytes) => {
|
|
356
|
+
validateSigners(signers);
|
|
357
|
+
const idNum = parseIdentifier(id);
|
|
358
|
+
const { coefficients, commitment } = generateSecretPolynomial(signers, secret, undefined, rng);
|
|
359
|
+
const proofOfKnowledge = ProofOfKnowledge.compute(idNum, coefficients, commitment, rng);
|
|
360
|
+
const commitmentBytes = commitment.map(serializePoint);
|
|
361
|
+
const round1Public = {
|
|
362
|
+
identifier: serializeIdentifier(idNum),
|
|
363
|
+
commitment: commitmentBytes,
|
|
364
|
+
proofOfKnowledge,
|
|
365
|
+
};
|
|
366
|
+
// store secret information for signing
|
|
367
|
+
const round1Secret = {
|
|
368
|
+
identifier: idNum,
|
|
369
|
+
coefficients,
|
|
370
|
+
commitment: commitment.map(serializePoint),
|
|
371
|
+
// Copy threshold metadata instead of retaining the caller-owned object by reference.
|
|
372
|
+
signers: { min: signers.min, max: signers.max },
|
|
373
|
+
step: 1,
|
|
374
|
+
};
|
|
375
|
+
return { public: round1Public, secret: round1Secret };
|
|
376
|
+
},
|
|
377
|
+
round2: (secret, others) => {
|
|
378
|
+
if (others.length !== secret.signers.max - 1)
|
|
379
|
+
throw new Error('wrong number of round1 packages');
|
|
380
|
+
if (!secret.coefficients || secret.step === 3)
|
|
381
|
+
throw new Error('round3 package used in round2');
|
|
382
|
+
const res = {};
|
|
383
|
+
for (const p of others) {
|
|
384
|
+
if (p.commitment.length !== secret.signers.min)
|
|
385
|
+
throw new Error('wrong number of commitments');
|
|
386
|
+
const id = parseIdentifier(p.identifier);
|
|
387
|
+
if (id === secret.identifier)
|
|
388
|
+
throw new Error('duplicate id=' + serializeIdentifier(id));
|
|
389
|
+
ProofOfKnowledge.validate(id, p.commitment, p.proofOfKnowledge);
|
|
390
|
+
for (const c of p.commitment)
|
|
391
|
+
parsePoint(c);
|
|
392
|
+
if (res[p.identifier])
|
|
393
|
+
throw new Error('Duplicate id=' + id);
|
|
394
|
+
const signingShare = Fn.toBytes(polynomialEvaluate(id, secret.coefficients));
|
|
395
|
+
res[p.identifier] = {
|
|
396
|
+
identifier: serializeIdentifier(secret.identifier),
|
|
397
|
+
signingShare: signingShare,
|
|
398
|
+
};
|
|
399
|
+
}
|
|
400
|
+
secret.step = 2;
|
|
401
|
+
return res;
|
|
402
|
+
},
|
|
403
|
+
round3: (secret, round1, round2) => {
|
|
404
|
+
// DKG is outside RFC 9591's signing flow; callers are expected to reuse the same
|
|
405
|
+
// remote round1 packages already accepted in round2, like frost-rs documents.
|
|
406
|
+
if (round1.length !== secret.signers.max - 1)
|
|
407
|
+
throw new Error('wrong length of round1 packages');
|
|
408
|
+
if (!secret.coefficients || secret.step !== 2)
|
|
409
|
+
throw new Error('round2 package used in round3');
|
|
410
|
+
if (round2.length !== round1.length)
|
|
411
|
+
throw new Error('wrong length of round2 packages');
|
|
412
|
+
const merged = {};
|
|
413
|
+
for (const r1 of round1) {
|
|
414
|
+
if (!r1.identifier || !r1.commitment)
|
|
415
|
+
throw new Error('wrong round1 share');
|
|
416
|
+
merged[r1.identifier] = { ...r1 };
|
|
417
|
+
}
|
|
418
|
+
for (const r2 of round2) {
|
|
419
|
+
if (!r2.identifier || !r2.signingShare)
|
|
420
|
+
throw new Error('wrong round2 share');
|
|
421
|
+
if (!merged[r2.identifier])
|
|
422
|
+
throw new Error('round1 share for ' + r2.identifier + ' is missing');
|
|
423
|
+
merged[r2.identifier].signingShare = r2.signingShare;
|
|
424
|
+
}
|
|
425
|
+
if (Object.keys(merged).length !== round1.length)
|
|
426
|
+
throw new Error('mismatch identifiers between rounds');
|
|
427
|
+
let signingShare = Fn.ZERO;
|
|
428
|
+
if (secret.commitment.length !== secret.signers.min)
|
|
429
|
+
throw new Error('wrong commitments length');
|
|
430
|
+
const localCommitment = secret.commitment.map(parsePoint);
|
|
431
|
+
const localShare = polynomialEvaluate(secret.identifier, secret.coefficients);
|
|
432
|
+
validateSecretShare(secret.identifier, localCommitment, localShare);
|
|
433
|
+
const localCommitmentBytes = localCommitment.map(serializePoint);
|
|
434
|
+
const commitments = {
|
|
435
|
+
[serializeIdentifier(secret.identifier)]: localCommitmentBytes,
|
|
436
|
+
};
|
|
437
|
+
for (const k in merged) {
|
|
438
|
+
const v = merged[k];
|
|
439
|
+
if (!v.signingShare || !v.commitment)
|
|
440
|
+
throw new Error('mismatch identifiers');
|
|
441
|
+
const id = parseIdentifier(k); // from
|
|
442
|
+
const signingSharePart = Fn.fromBytes(v.signingShare);
|
|
443
|
+
const commitment = v.commitment.map(parsePoint);
|
|
444
|
+
validateSecretShare(secret.identifier, commitment, signingSharePart);
|
|
445
|
+
signingShare = Fn.add(signingShare, signingSharePart);
|
|
446
|
+
const idSer = serializeIdentifier(id);
|
|
447
|
+
if (commitments[idSer])
|
|
448
|
+
throw new Error('duplicated id=' + idSer);
|
|
449
|
+
commitments[idSer] = v.commitment;
|
|
450
|
+
}
|
|
451
|
+
signingShare = Fn.add(signingShare, localShare);
|
|
452
|
+
const mergedCommitment = new Array(secret.signers.min).fill(Point.ZERO);
|
|
453
|
+
for (const k in commitments) {
|
|
454
|
+
const v = commitments[k];
|
|
455
|
+
if (v.length !== secret.signers.min)
|
|
456
|
+
throw new Error('wrong commitments length');
|
|
457
|
+
for (let i = 0; i < v.length; i++)
|
|
458
|
+
mergedCommitment[i] = mergedCommitment[i].add(parsePoint(v[i]));
|
|
459
|
+
}
|
|
460
|
+
const mergedCommitmentBytes = mergedCommitment.map(serializePoint);
|
|
461
|
+
const verifyingShares = {};
|
|
462
|
+
for (const k in commitments)
|
|
463
|
+
verifyingShares[k] = serializePoint(evalutateVSS(parseIdentifier(k), mergedCommitment));
|
|
464
|
+
// This is enough to sign stuff
|
|
465
|
+
let res = {
|
|
466
|
+
public: {
|
|
467
|
+
signers: { min: secret.signers.min, max: secret.signers.max },
|
|
468
|
+
commitments: mergedCommitmentBytes,
|
|
469
|
+
verifyingShares: Object.fromEntries(Object.entries(verifyingShares).map(([k, v]) => [k, v.slice()])),
|
|
470
|
+
},
|
|
471
|
+
secret: {
|
|
472
|
+
identifier: serializeIdentifier(secret.identifier),
|
|
473
|
+
signingShare: Fn.toBytes(signingShare),
|
|
474
|
+
},
|
|
475
|
+
};
|
|
476
|
+
if (opts.adjustDKG)
|
|
477
|
+
res = opts.adjustDKG(res);
|
|
478
|
+
for (let i = 0; i < secret.coefficients.length; i++)
|
|
479
|
+
secret.coefficients[i] -= secret.coefficients[i];
|
|
480
|
+
delete secret.coefficients;
|
|
481
|
+
secret.step = 3;
|
|
482
|
+
return res;
|
|
483
|
+
},
|
|
484
|
+
clean(secret) {
|
|
485
|
+
// Instead of replacing secret bigint with another (zero?), we subtract it from itself
|
|
486
|
+
// in the hope that JIT will modify it inplace, instead of creating new value.
|
|
487
|
+
// This is unverified and may not work, but it is best we can do in regard of bigints.
|
|
488
|
+
secret.identifier -= secret.identifier;
|
|
489
|
+
if (secret.coefficients) {
|
|
490
|
+
for (let i = 0; i < secret.coefficients.length; i++)
|
|
491
|
+
secret.coefficients[i] -= secret.coefficients[i];
|
|
492
|
+
}
|
|
493
|
+
// for (const c of secret.commitment) c.fill(0);
|
|
494
|
+
secret.step = 3;
|
|
495
|
+
},
|
|
496
|
+
}),
|
|
497
|
+
// Trusted dealer setup
|
|
498
|
+
// Generates keys for all participants
|
|
499
|
+
trustedDealer(signers, identifiers, secret, rng = randomBytes) {
|
|
500
|
+
// if no identifiers provided, we generated default identifiers
|
|
501
|
+
validateSigners(signers);
|
|
502
|
+
if (identifiers === undefined) {
|
|
503
|
+
identifiers = [];
|
|
504
|
+
for (let i = 1; i <= signers.max; i++)
|
|
505
|
+
identifiers.push(Identifier.fromNumber(i));
|
|
506
|
+
}
|
|
507
|
+
else {
|
|
508
|
+
if (!Array.isArray(identifiers) || identifiers.length !== signers.max)
|
|
509
|
+
throw new Error('identifiers should be array of ' + signers.max);
|
|
510
|
+
}
|
|
511
|
+
const identifierNums = {};
|
|
512
|
+
for (const id of identifiers) {
|
|
513
|
+
const idNum = parseIdentifier(id);
|
|
514
|
+
if (id in identifierNums)
|
|
515
|
+
throw new Error('duplicated id=' + id);
|
|
516
|
+
identifierNums[id] = idNum;
|
|
517
|
+
}
|
|
518
|
+
const sp = generateSecretPolynomial(signers, secret, undefined, rng);
|
|
519
|
+
const commitmentBytes = sp.commitment.map(serializePoint);
|
|
520
|
+
const secretShares = {};
|
|
521
|
+
const verifyingShares = {};
|
|
522
|
+
for (const id of identifiers) {
|
|
523
|
+
const signingShare = polynomialEvaluate(identifierNums[id], sp.coefficients);
|
|
524
|
+
verifyingShares[id] = serializePoint(Point.BASE.multiply(signingShare));
|
|
525
|
+
secretShares[id] = {
|
|
526
|
+
identifier: id,
|
|
527
|
+
signingShare: Fn.toBytes(signingShare),
|
|
528
|
+
};
|
|
529
|
+
}
|
|
530
|
+
return {
|
|
531
|
+
public: {
|
|
532
|
+
signers: { min: signers.min, max: signers.max },
|
|
533
|
+
commitments: commitmentBytes,
|
|
534
|
+
verifyingShares,
|
|
535
|
+
},
|
|
536
|
+
secretShares,
|
|
537
|
+
};
|
|
538
|
+
},
|
|
539
|
+
// Validate secret (from trusted dealer or DKG)
|
|
540
|
+
validateSecret(secret, pub) {
|
|
541
|
+
const id = parseIdentifier(secret.identifier);
|
|
542
|
+
const commitment = pub.commitments.map(parsePoint);
|
|
543
|
+
const signingShare = Fn.fromBytes(secret.signingShare);
|
|
544
|
+
validateSecretShare(id, commitment, signingShare);
|
|
545
|
+
},
|
|
546
|
+
// Actual signing
|
|
547
|
+
// Round 1: each participant commit to nonces
|
|
548
|
+
// Nonces kept private, commitments sent to coordinator (or every other participant)
|
|
549
|
+
// NOTE: we don't need the message at this point, which lets a coordinator
|
|
550
|
+
// keep multiple nonce commitments per participant in advance and skip
|
|
551
|
+
// round1 for signing.
|
|
552
|
+
// But then each participant needs to remember generated shares
|
|
553
|
+
commit(secret, rng = randomBytes) {
|
|
554
|
+
const secretScalar = Fn.fromBytes(secret.signingShare);
|
|
555
|
+
const hiding = generateNonce(secretScalar, rng);
|
|
556
|
+
const binding = generateNonce(secretScalar, rng);
|
|
557
|
+
const nonces = { hiding: Fn.toBytes(hiding), binding: Fn.toBytes(binding) };
|
|
558
|
+
return { nonces, commitments: nonceCommitments(secret.identifier, nonces) };
|
|
559
|
+
},
|
|
560
|
+
// Round2: sign. Each participant creates a signature share from the secret
|
|
561
|
+
// and the selected nonce commitments.
|
|
562
|
+
signShare(secret, pub, nonces, commitmentList, msg) {
|
|
563
|
+
validateCommitmentsNum(pub.signers, commitmentList.length);
|
|
564
|
+
const hidingNonce0 = Fn.fromBytes(nonces.hiding);
|
|
565
|
+
const bindingNonce0 = Fn.fromBytes(nonces.binding);
|
|
566
|
+
if (Fn.is0(hidingNonce0) || Fn.is0(bindingNonce0))
|
|
567
|
+
throw new Error('signing nonces already used');
|
|
568
|
+
// Reject a coordinator-assigned commitment pair that does not match the signer's own nonce
|
|
569
|
+
// pair. This must happen before suite-specific nonce adjustment; secp256k1-tr may negate the
|
|
570
|
+
// actual signing nonces later, but the coordinator still assigns the original commitments.
|
|
571
|
+
const expectedCommitment = {
|
|
572
|
+
identifier: secret.identifier,
|
|
573
|
+
hiding: serializePoint(Point.BASE.multiply(hidingNonce0)),
|
|
574
|
+
binding: serializePoint(Point.BASE.multiply(bindingNonce0)),
|
|
575
|
+
};
|
|
576
|
+
const commitment = commitmentList.find((i) => i.identifier === secret.identifier);
|
|
577
|
+
if (!commitment)
|
|
578
|
+
throw new Error('missing signer commitment');
|
|
579
|
+
if (bytesToHex(commitment.hiding) !== bytesToHex(expectedCommitment.hiding) ||
|
|
580
|
+
bytesToHex(commitment.binding) !== bytesToHex(expectedCommitment.binding))
|
|
581
|
+
throw new Error('incorrect signer commitment');
|
|
582
|
+
if (opts.adjustSecret)
|
|
583
|
+
secret = opts.adjustSecret(secret, pub);
|
|
584
|
+
if (opts.adjustPublic)
|
|
585
|
+
pub = opts.adjustPublic(pub);
|
|
586
|
+
const SK = Fn.fromBytes(secret.signingShare);
|
|
587
|
+
const { lambda, challenge, bindingFactor, groupCommitment } = prepareShare(pub.commitments[0], commitmentList, msg, secret.identifier);
|
|
588
|
+
const N = opts.adjustNonces ? opts.adjustNonces(groupCommitment, nonces) : nonces;
|
|
589
|
+
const hidingNonce = opts.adjustNonces ? Fn.fromBytes(N.hiding) : hidingNonce0;
|
|
590
|
+
const bindingNonce = opts.adjustNonces ? Fn.fromBytes(N.binding) : bindingNonce0;
|
|
591
|
+
const t = Fn.mul(Fn.mul(lambda, SK), challenge); // challenge * lambda * SK
|
|
592
|
+
const t2 = Fn.mul(bindingNonce, bindingFactor); // bindingNonce * bindingFactor
|
|
593
|
+
const r = Fn.toBytes(Fn.add(Fn.add(hidingNonce, t2), t)); // t + t2 + hidingNonce
|
|
594
|
+
// RFC 9591 round-one commitments are one-time-use, and round two must use the nonce
|
|
595
|
+
// corresponding to the published commitment. This API returns mutable local nonce bytes,
|
|
596
|
+
// so consume them after a successful signShare() call: later all-zero reuse fails closed.
|
|
597
|
+
nonces.hiding.fill(0);
|
|
598
|
+
nonces.binding.fill(0);
|
|
599
|
+
return r;
|
|
600
|
+
},
|
|
601
|
+
// Each participant (or coordinator) can verify signatures from other participants
|
|
602
|
+
verifyShare(pub, commitmentList, msg, identifier, sigShare) {
|
|
603
|
+
if (opts.adjustPublic)
|
|
604
|
+
pub = opts.adjustPublic(pub);
|
|
605
|
+
const comm = commitmentList.find((i) => i.identifier === identifier);
|
|
606
|
+
if (!comm)
|
|
607
|
+
throw new Error('cannot find identifier commitment');
|
|
608
|
+
const PK = parsePoint(pub.verifyingShares[identifier]);
|
|
609
|
+
const hidingNonceCommitment = parsePoint(comm.hiding);
|
|
610
|
+
const bindingNonceCommitment = parsePoint(comm.binding);
|
|
611
|
+
const { lambda, challenge, bindingFactor, groupCommitment } = prepareShare(pub.commitments[0], commitmentList, msg, identifier);
|
|
612
|
+
// hC + bC * bF
|
|
613
|
+
let commShare = hidingNonceCommitment.add(bindingNonceCommitment.multiply(bindingFactor));
|
|
614
|
+
if (opts.adjustGroupCommitmentShare)
|
|
615
|
+
commShare = opts.adjustGroupCommitmentShare(groupCommitment, commShare);
|
|
616
|
+
const l = Point.BASE.multiply(Fn.fromBytes(sigShare)); // sigShare*G
|
|
617
|
+
// commShare + PK * (challenge * lambda)
|
|
618
|
+
const r = commShare.add(PK.multiply(Fn.mul(challenge, lambda)));
|
|
619
|
+
return l.equals(r);
|
|
620
|
+
},
|
|
621
|
+
// Aggregate multiple signature shares into groupSignature
|
|
622
|
+
aggregate(pub, commitmentList, msg, sigShares) {
|
|
623
|
+
if (opts.adjustPublic)
|
|
624
|
+
pub = opts.adjustPublic(pub);
|
|
625
|
+
try {
|
|
626
|
+
validateCommitmentsNum(pub.signers, commitmentList.length);
|
|
627
|
+
}
|
|
628
|
+
catch {
|
|
629
|
+
throw new AggErr('aggregation failed', []);
|
|
630
|
+
}
|
|
631
|
+
const ids = commitmentList.map((i) => i.identifier);
|
|
632
|
+
if (ids.length !== Object.keys(sigShares).length)
|
|
633
|
+
throw new AggErr('aggregation failed', []);
|
|
634
|
+
for (const id of ids) {
|
|
635
|
+
if (!(id in sigShares) || !(id in pub.verifyingShares))
|
|
636
|
+
throw new AggErr('aggregation failed', []);
|
|
637
|
+
}
|
|
638
|
+
const GPK = parsePoint(pub.commitments[0]);
|
|
639
|
+
const { groupCommitment } = getGroupCommitment(GPK, commitmentList, msg);
|
|
640
|
+
let z = Fn.ZERO;
|
|
641
|
+
// RFC 9591 Section 5.3 aggregates by summing the validated signature shares.
|
|
642
|
+
for (const id of ids)
|
|
643
|
+
z = Fn.add(z, Fn.fromBytes(sigShares[id])); // z += zi
|
|
644
|
+
if (!Basic.verify(msg, groupCommitment, z, GPK)) {
|
|
645
|
+
const cheaters = [];
|
|
646
|
+
for (const id of ids) {
|
|
647
|
+
if (!this.verifyShare(pub, commitmentList, msg, id, sigShares[id]))
|
|
648
|
+
cheaters.push(id);
|
|
649
|
+
}
|
|
650
|
+
throw new AggErr('aggregation failed', cheaters);
|
|
651
|
+
}
|
|
652
|
+
return Signature.encode(groupCommitment, z);
|
|
653
|
+
},
|
|
654
|
+
// Basic sign/verify using single key
|
|
655
|
+
sign(msg, secretKey) {
|
|
656
|
+
let sk = Fn.fromBytes(secretKey);
|
|
657
|
+
// Taproot single-key signing needs the same scalar normalization as threshold keys.
|
|
658
|
+
if (opts.adjustScalar)
|
|
659
|
+
sk = opts.adjustScalar(sk);
|
|
660
|
+
const [R, z] = Basic.sign(msg, sk);
|
|
661
|
+
return Signature.encode(R, z);
|
|
662
|
+
},
|
|
663
|
+
verify(sig, msg, publicKey) {
|
|
664
|
+
const PK = opts.parsePublicKey ? opts.parsePublicKey(publicKey) : parsePoint(publicKey);
|
|
665
|
+
const { R, z } = Signature.decode(sig);
|
|
666
|
+
return Basic.verify(msg, R, z, PK);
|
|
667
|
+
},
|
|
668
|
+
// Combine multiple secret shares to restore secret
|
|
669
|
+
combineSecret(shares, signers) {
|
|
670
|
+
validateSigners(signers);
|
|
671
|
+
if (!Array.isArray(shares) || shares.length < signers.min)
|
|
672
|
+
throw new Error('wrong secret shares array');
|
|
673
|
+
const points = [];
|
|
674
|
+
const seen = {};
|
|
675
|
+
// Interpolate over the full provided share set and reject duplicate identifiers.
|
|
676
|
+
for (const s of shares) {
|
|
677
|
+
const idNum = parseIdentifier(s.identifier);
|
|
678
|
+
const id = serializeIdentifier(idNum);
|
|
679
|
+
if (seen[id])
|
|
680
|
+
throw new Error('duplicated id=' + id);
|
|
681
|
+
seen[id] = true;
|
|
682
|
+
points.push([idNum, Fn.fromBytes(s.signingShare)]);
|
|
683
|
+
}
|
|
684
|
+
const xCoords = points.map(([x]) => x);
|
|
685
|
+
let res = Fn.ZERO;
|
|
686
|
+
for (const [x, y] of points)
|
|
687
|
+
res = Fn.add(res, Fn.mul(y, deriveInterpolatingValue(xCoords, x)));
|
|
688
|
+
return Fn.toBytes(res);
|
|
689
|
+
},
|
|
690
|
+
// Utils
|
|
691
|
+
utils: Object.freeze({
|
|
692
|
+
Fn, // NOTE: we re-export it here because it may be different from Point.Fn (ed448 is fun!)
|
|
693
|
+
// Test RNG overrides still go through noble's non-zero scalar derivation; this is not a raw
|
|
694
|
+
// "bytes become scalar" escape hatch.
|
|
695
|
+
randomScalar: (rng = randomBytes) => Fn.toBytes(genPointScalarPair(rng).scalar),
|
|
696
|
+
generateSecretPolynomial: (signers, secret, coeffs, rng) => {
|
|
697
|
+
const res = generateSecretPolynomial(signers, secret, coeffs, rng);
|
|
698
|
+
return { ...res, commitment: res.commitment.map(serializePoint) };
|
|
699
|
+
},
|
|
700
|
+
}),
|
|
701
|
+
};
|
|
702
|
+
return Object.freeze(frost);
|
|
703
|
+
}
|
|
704
|
+
//# sourceMappingURL=frost.js.map
|