@voter-protocol/noir-prover 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/circuits/district_membership_14.json +1 -0
  2. package/circuits/district_membership_18.json +1 -0
  3. package/circuits/district_membership_20.json +1 -0
  4. package/circuits/district_membership_22.json +1 -0
  5. package/circuits/district_membership_24.json +1 -0
  6. package/circuits/two_tree_membership_18.json +1 -0
  7. package/circuits/two_tree_membership_20.json +1 -0
  8. package/circuits/two_tree_membership_22.json +1 -0
  9. package/circuits/two_tree_membership_24.json +1 -0
  10. package/dist/district_membership_18-Breq2tlt.js +128 -0
  11. package/dist/district_membership_18-DSj7IJGE.cjs +139 -0
  12. package/dist/district_membership_20-Bu0PWpWT.js +128 -0
  13. package/dist/district_membership_20-DKRyEvX4.cjs +139 -0
  14. package/dist/district_membership_22-DlrHPQtq.js +128 -0
  15. package/dist/district_membership_22-pal861Dz.cjs +139 -0
  16. package/dist/district_membership_24-BAJ-CEhq.cjs +139 -0
  17. package/dist/district_membership_24-CM66_Yd9.js +128 -0
  18. package/dist/index.cjs +1041 -123
  19. package/dist/index.js +1030 -124
  20. package/dist/noir-prover/src/cross-origin-isolation.d.ts +1 -0
  21. package/dist/noir-prover/src/cross-origin-isolation.d.ts.map +1 -0
  22. package/dist/noir-prover/src/fixtures.d.ts +82 -0
  23. package/dist/noir-prover/src/fixtures.d.ts.map +1 -0
  24. package/dist/noir-prover/src/hash.worker.d.ts +2 -0
  25. package/dist/noir-prover/src/hash.worker.d.ts.map +1 -0
  26. package/dist/noir-prover/src/index.d.ts +10 -2
  27. package/dist/noir-prover/src/index.d.ts.map +1 -0
  28. package/dist/noir-prover/src/profiler.d.ts +74 -0
  29. package/dist/noir-prover/src/profiler.d.ts.map +1 -0
  30. package/dist/noir-prover/src/prover-e2e.test.d.ts +1 -0
  31. package/dist/noir-prover/src/prover-e2e.test.d.ts.map +1 -0
  32. package/dist/noir-prover/src/prover-orchestrator.d.ts +104 -0
  33. package/dist/noir-prover/src/prover-orchestrator.d.ts.map +1 -0
  34. package/dist/noir-prover/src/prover.d.ts +42 -1
  35. package/dist/noir-prover/src/prover.d.ts.map +1 -0
  36. package/dist/noir-prover/src/prover.test.d.ts +1 -0
  37. package/dist/noir-prover/src/prover.test.d.ts.map +1 -0
  38. package/dist/noir-prover/src/two-tree-prover.d.ts +106 -0
  39. package/dist/noir-prover/src/two-tree-prover.d.ts.map +1 -0
  40. package/dist/noir-prover/src/two-tree-prover.test.d.ts +18 -0
  41. package/dist/noir-prover/src/two-tree-prover.test.d.ts.map +1 -0
  42. package/dist/noir-prover/src/types.d.ts +283 -20
  43. package/dist/noir-prover/src/types.d.ts.map +1 -0
  44. package/dist/noir-prover/src/worker-protocol.d.ts +75 -0
  45. package/dist/noir-prover/src/worker-protocol.d.ts.map +1 -0
  46. package/dist/two_tree_membership_18-Dfr1mYE-.cjs +195 -0
  47. package/dist/two_tree_membership_18-DufFLCM8.js +184 -0
  48. package/dist/two_tree_membership_20-DhrOeOFx.js +184 -0
  49. package/dist/two_tree_membership_20-jDMJJKIC.cjs +195 -0
  50. package/dist/two_tree_membership_22-CjwYhC_e.cjs +195 -0
  51. package/dist/two_tree_membership_22-iMLJVJEK.js +184 -0
  52. package/dist/two_tree_membership_24-Br8I-xLQ.cjs +195 -0
  53. package/dist/two_tree_membership_24-Df3SNDL0.js +184 -0
  54. package/package.json +12 -8
package/dist/index.cjs CHANGED
@@ -2352,116 +2352,316 @@ ieee754.write = function (buffer, value, offset, isLE, mLen, nBytes) {
2352
2352
  }
2353
2353
  } (buffer));
2354
2354
 
2355
- const noir_version = "1.0.0-beta.16+2d46fca7203545cbbfb31a0d0328de6c10a8db95";
2356
- const hash = "7475465969754700296";
2357
- const abi = {
2355
+ const DEFAULT_CIRCUIT_DEPTH = 20;
2356
+ const PUBLIC_INPUT_COUNT = 5;
2357
+ function validateAuthorityLevel(level) {
2358
+ if (level < 1 || level > 5 || !Number.isInteger(level)) {
2359
+ throw new Error(`Invalid authority level: ${level}. Must be integer 1-5.`);
2360
+ }
2361
+ return level;
2362
+ }
2363
+ const DISTRICT_SLOT_COUNT = 24;
2364
+ const TWO_TREE_PUBLIC_INPUT_COUNT = 29;
2365
+
2366
+ const circuitLoaders = {
2367
+ 18: async () => {
2368
+ const module = await Promise.resolve().then(() => require('./district_membership_18-DSj7IJGE.cjs'));
2369
+ return module.default;
2370
+ },
2371
+ 20: async () => {
2372
+ const module = await Promise.resolve().then(() => require('./district_membership_20-DKRyEvX4.cjs'));
2373
+ return module.default;
2374
+ },
2375
+ 22: async () => {
2376
+ const module = await Promise.resolve().then(() => require('./district_membership_22-pal861Dz.cjs'));
2377
+ return module.default;
2378
+ },
2379
+ 24: async () => {
2380
+ const module = await Promise.resolve().then(() => require('./district_membership_24-BAJ-CEhq.cjs'));
2381
+ return module.default;
2382
+ }
2383
+ };
2384
+ function detectThreads$1() {
2385
+ const hasSharedArrayBuffer = typeof SharedArrayBuffer !== "undefined";
2386
+ if (!hasSharedArrayBuffer) {
2387
+ console.log("[NoirProver] SharedArrayBuffer unavailable - using single-threaded mode");
2388
+ return 1;
2389
+ }
2390
+ const cores = typeof navigator !== "undefined" ? navigator.hardwareConcurrency || 4 : 4;
2391
+ return Math.min(cores, 8);
2392
+ }
2393
+ class NoirProver {
2394
+ backend = null;
2395
+ noir = null;
2396
+ config;
2397
+ threads;
2398
+ depth;
2399
+ constructor(config = {}) {
2400
+ this.config = {
2401
+ circuitName: "district_membership",
2402
+ ...config
2403
+ };
2404
+ this.threads = config.threads ?? detectThreads$1();
2405
+ this.depth = config.depth ?? DEFAULT_CIRCUIT_DEPTH;
2406
+ }
2407
+ /**
2408
+ * Initialize the prover (must be called before generating proofs)
2409
+ * Lazily loads the circuit for the configured depth
2410
+ */
2411
+ async init() {
2412
+ if (this.backend && this.noir) return;
2413
+ console.log(`[NoirProver] Initializing depth=${this.depth} with ${this.threads} thread(s)...`);
2414
+ const start = Date.now();
2415
+ const loader = circuitLoaders[this.depth];
2416
+ if (!loader) {
2417
+ throw new Error(`Unsupported circuit depth: ${this.depth}. Must be 18, 20, 22, or 24.`);
2418
+ }
2419
+ const circuit = await loader();
2420
+ this.noir = new noir_js.Noir(circuit);
2421
+ this.backend = new bb_js.UltraHonkBackend(circuit.bytecode, { threads: this.threads });
2422
+ console.log(`[NoirProver] Initialized depth=${this.depth} in ${Date.now() - start}ms (${this.threads} threads)`);
2423
+ }
2424
+ /**
2425
+ * Get the circuit depth for this prover instance
2426
+ */
2427
+ getDepth() {
2428
+ return this.depth;
2429
+ }
2430
+ /**
2431
+ * Pre-warm the prover by initializing backend
2432
+ * Call this on app load to hide latency from user
2433
+ */
2434
+ async warmup() {
2435
+ await this.init();
2436
+ console.log("[NoirProver] Warmup complete (backend initialized)");
2437
+ }
2438
+ /** Maximum allowed Merkle depth (prevents DoS via oversized arrays) */
2439
+ static MAX_MERKLE_DEPTH = 24;
2440
+ /**
2441
+ * Generate a ZK proof for district membership
2442
+ *
2443
+ * The new secure circuit computes leaf and nullifier internally:
2444
+ * - leaf = hash(userSecret, districtId, authorityLevel, registrationSalt)
2445
+ * - nullifier = hash(userSecret, actionDomain)
2446
+ *
2447
+ * This prevents attackers from submitting arbitrary leaves or nullifiers.
2448
+ */
2449
+ async prove(inputs) {
2450
+ await this.init();
2451
+ if (!Array.isArray(inputs.merklePath)) {
2452
+ throw new Error("merklePath must be an array");
2453
+ }
2454
+ if (inputs.merklePath.length > NoirProver.MAX_MERKLE_DEPTH) {
2455
+ throw new Error(
2456
+ `merklePath exceeds maximum allowed depth: ${inputs.merklePath.length} > ${NoirProver.MAX_MERKLE_DEPTH}`
2457
+ );
2458
+ }
2459
+ if (inputs.merklePath.length !== this.depth) {
2460
+ throw new Error(
2461
+ `merklePath length mismatch: expected ${this.depth}, got ${inputs.merklePath.length}. Did you initialize the prover with the wrong depth?`
2462
+ );
2463
+ }
2464
+ validateAuthorityLevel(inputs.authorityLevel);
2465
+ console.log("[NoirProver] Generating witness...");
2466
+ const witnessStart = Date.now();
2467
+ const noirInputs = {
2468
+ // Public inputs (contract-controlled)
2469
+ merkle_root: inputs.merkleRoot,
2470
+ action_domain: inputs.actionDomain,
2471
+ // Private inputs (user secrets - never revealed)
2472
+ user_secret: inputs.userSecret,
2473
+ district_id: inputs.districtId,
2474
+ authority_level: inputs.authorityLevel.toString(),
2475
+ registration_salt: inputs.registrationSalt,
2476
+ // Merkle proof data
2477
+ merkle_path: inputs.merklePath,
2478
+ leaf_index: inputs.leafIndex
2479
+ };
2480
+ const { witness } = await this.noir.execute(noirInputs);
2481
+ console.log(`[NoirProver] Witness generated in ${Date.now() - witnessStart}ms`);
2482
+ console.log("[NoirProver] Generating proof...");
2483
+ const proofStart = Date.now();
2484
+ const { proof, publicInputs } = await this.backend.generateProof(witness);
2485
+ console.log(`[NoirProver] Proof generated in ${Date.now() - proofStart}ms`);
2486
+ if (publicInputs.length !== PUBLIC_INPUT_COUNT) {
2487
+ throw new Error(
2488
+ `Expected ${PUBLIC_INPUT_COUNT} public inputs from circuit, got ${publicInputs.length}`
2489
+ );
2490
+ }
2491
+ const rawAuthorityLevel = parseInt(publicInputs[2], 16) || parseInt(publicInputs[2], 10);
2492
+ return {
2493
+ proof,
2494
+ publicInputs: {
2495
+ merkleRoot: publicInputs[0],
2496
+ nullifier: publicInputs[1],
2497
+ authorityLevel: validateAuthorityLevel(rawAuthorityLevel),
2498
+ actionDomain: publicInputs[3],
2499
+ districtId: publicInputs[4]
2500
+ }
2501
+ };
2502
+ }
2503
+ /**
2504
+ * Verify a proof
2505
+ */
2506
+ async verify(proof, publicInputs) {
2507
+ await this.init();
2508
+ return this.backend.verifyProof({ proof, publicInputs });
2509
+ }
2510
+ /**
2511
+ * Clean up resources
2512
+ */
2513
+ async destroy() {
2514
+ if (this.backend) {
2515
+ await this.backend.destroy();
2516
+ this.backend = null;
2517
+ this.noir = null;
2518
+ }
2519
+ }
2520
+ }
2521
+ const proverInstances = /* @__PURE__ */ new Map();
2522
+ const initializationPromises = /* @__PURE__ */ new Map();
2523
+ async function getProverForDepth(depth = DEFAULT_CIRCUIT_DEPTH, config) {
2524
+ const existingInstance = proverInstances.get(depth);
2525
+ if (existingInstance) {
2526
+ return existingInstance;
2527
+ }
2528
+ const existingPromise = initializationPromises.get(depth);
2529
+ if (existingPromise) {
2530
+ return existingPromise;
2531
+ }
2532
+ let resolveInit;
2533
+ let rejectInit;
2534
+ const initPromise = new Promise((resolve, reject) => {
2535
+ resolveInit = resolve;
2536
+ rejectInit = reject;
2537
+ });
2538
+ initializationPromises.set(depth, initPromise);
2539
+ (async () => {
2540
+ try {
2541
+ const prover = new NoirProver({ ...config, depth });
2542
+ await prover.init();
2543
+ proverInstances.set(depth, prover);
2544
+ initializationPromises.delete(depth);
2545
+ resolveInit(prover);
2546
+ } catch (err) {
2547
+ initializationPromises.delete(depth);
2548
+ rejectInit(err instanceof Error ? err : new Error(String(err)));
2549
+ }
2550
+ })();
2551
+ return initPromise;
2552
+ }
2553
+ async function getProver(config) {
2554
+ const depth = config?.depth ?? DEFAULT_CIRCUIT_DEPTH;
2555
+ return getProverForDepth(depth, config);
2556
+ }
2557
+ async function resetProverSingleton() {
2558
+ const destroyPromises = Array.from(proverInstances.values()).map(
2559
+ (prover) => prover.destroy()
2560
+ );
2561
+ await Promise.all(destroyPromises);
2562
+ proverInstances.clear();
2563
+ initializationPromises.clear();
2564
+ }
2565
+ async function resetProverForDepth(depth) {
2566
+ const prover = proverInstances.get(depth);
2567
+ if (prover) {
2568
+ await prover.destroy();
2569
+ proverInstances.delete(depth);
2570
+ }
2571
+ initializationPromises.delete(depth);
2572
+ }
2573
+
2574
+ const noir_version$1 = "1.0.0-beta.16+2d46fca7203545cbbfb31a0d0328de6c10a8db95";
2575
+ const hash$1 = "571519090691839533";
2576
+ const abi$1 = {
2358
2577
  parameters: [
2359
2578
  {
2360
- name: "merkle_root",
2361
- type: {
2362
- kind: "field"
2363
- },
2364
- visibility: "private"
2365
- },
2366
- {
2367
- name: "nullifier",
2368
- type: {
2369
- kind: "field"
2370
- },
2371
- visibility: "private"
2372
- },
2373
- {
2374
- name: "authority_hash",
2375
- type: {
2376
- kind: "field"
2377
- },
2378
- visibility: "private"
2379
- },
2380
- {
2381
- name: "epoch_id",
2382
- type: {
2383
- kind: "field"
2384
- },
2385
- visibility: "private"
2386
- },
2387
- {
2388
- name: "campaign_id",
2389
- type: {
2390
- kind: "field"
2391
- },
2392
- visibility: "private"
2393
- },
2394
- {
2395
- name: "leaf",
2396
- type: {
2397
- kind: "field"
2398
- },
2399
- visibility: "private"
2400
- },
2401
- {
2402
- name: "merkle_path",
2579
+ name: "inputs",
2403
2580
  type: {
2404
2581
  kind: "array",
2405
- length: 14,
2582
+ length: 4,
2406
2583
  type: {
2407
2584
  kind: "field"
2408
2585
  }
2409
2586
  },
2410
2587
  visibility: "private"
2588
+ }
2589
+ ],
2590
+ return_type: {
2591
+ abi_type: {
2592
+ kind: "field"
2411
2593
  },
2594
+ visibility: "public"
2595
+ },
2596
+ error_types: {
2597
+ }
2598
+ };
2599
+ const bytecode$1 = "H4sIAAAAAAAA/42PPQ5AQBCFd/0XDqGkI44gEpUoNQqJRkFEp3QEcQGnEI6jU2r0JrErY6t9yZeX3XmZH0peacybsm4tcIW9KWCjOkXOUZirKKcDBmACFvmL93aIlKiGsn6V9keweFsWr+OYF254JsPeTdFxzxebK9n325WgO1ThT8xxPV5UJ+c4AQAA";
2600
+ const debug_symbols$1 = "jZDRCoQgEEX/ZZ590C1h61eWJcymEETFNFiif1+LbOsh2Kdx5nruDHeGDts4NMr0doT6NUPrldZqaLSVIihr0nReCOS2CR4xjeCkJ8oJjyZAbaLWBCah4/ZpdMJsNQifVEoATZdqMuyVxvW1kB9N71FG2Q6zojhwzi48u+fLJ+O7Qfnkj8OBVX9fUFT5gpJfLninTkjlL5kBhXpdMwmvRKtxz7GPRp5iDR+XlRy881ZiFz2udpuWFnwB";
2601
+ const file_map$1 = {
2602
+ "19": {
2603
+ source: "// Exposed only for usage in `std::meta`\npub(crate) mod poseidon2;\n\nuse crate::default::Default;\nuse crate::embedded_curve_ops::{\n EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul, multi_scalar_mul_array_return,\n};\nuse crate::meta::derive_via;\n\n#[foreign(sha256_compression)]\n// docs:start:sha256_compression\npub fn sha256_compression(input: [u32; 16], state: [u32; 8]) -> [u32; 8] {}\n// docs:end:sha256_compression\n\n#[foreign(keccakf1600)]\n// docs:start:keccakf1600\npub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {}\n// docs:end:keccakf1600\n\npub mod keccak {\n #[deprecated(\"This function has been moved to std::hash::keccakf1600\")]\n pub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {\n super::keccakf1600(input)\n }\n}\n\n#[foreign(blake2s)]\n// docs:start:blake2s\npub fn blake2s<let N: u32>(input: [u8; N]) -> [u8; 32]\n// docs:end:blake2s\n{}\n\n// docs:start:blake3\npub fn blake3<let N: u32>(input: [u8; N]) -> [u8; 32]\n// docs:end:blake3\n{\n if crate::runtime::is_unconstrained() {\n // Temporary measure while Barretenberg is main proving system.\n // Please open an issue if you're working on another proving system and running into problems due to this.\n crate::static_assert(\n N <= 1024,\n \"Barretenberg cannot prove blake3 hashes with inputs larger than 1024 bytes\",\n );\n }\n __blake3(input)\n}\n\n#[foreign(blake3)]\nfn __blake3<let N: u32>(input: [u8; N]) -> [u8; 32] {}\n\n// docs:start:pedersen_commitment\npub fn pedersen_commitment<let N: u32>(input: [Field; N]) -> EmbeddedCurvePoint {\n // docs:end:pedersen_commitment\n pedersen_commitment_with_separator(input, 0)\n}\n\n#[inline_always]\npub fn pedersen_commitment_with_separator<let N: u32>(\n input: [Field; N],\n separator: u32,\n) -> EmbeddedCurvePoint {\n let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N];\n for i in 0..N {\n // we use the unsafe version because the multi_scalar_mul will constrain the scalars.\n points[i] = from_field_unsafe(input[i]);\n }\n let generators = derive_generators(\"DEFAULT_DOMAIN_SEPARATOR\".as_bytes(), separator);\n multi_scalar_mul(generators, points)\n}\n\n// docs:start:pedersen_hash\npub fn pedersen_hash<let N: u32>(input: [Field; N]) -> Field\n// docs:end:pedersen_hash\n{\n pedersen_hash_with_separator(input, 0)\n}\n\n#[no_predicates]\npub fn pedersen_hash_with_separator<let N: u32>(input: [Field; N], separator: u32) -> Field {\n let mut scalars: [EmbeddedCurveScalar; N + 1] = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N + 1];\n let mut generators: [EmbeddedCurvePoint; N + 1] =\n [EmbeddedCurvePoint::point_at_infinity(); N + 1];\n let domain_generators: [EmbeddedCurvePoint; N] =\n derive_generators(\"DEFAULT_DOMAIN_SEPARATOR\".as_bytes(), separator);\n\n for i in 0..N {\n scalars[i] = from_field_unsafe(input[i]);\n generators[i] = domain_generators[i];\n }\n scalars[N] = EmbeddedCurveScalar { lo: N as Field, hi: 0 as Field };\n\n let length_generator: [EmbeddedCurvePoint; 1] =\n derive_generators(\"pedersen_hash_length\".as_bytes(), 0);\n generators[N] = length_generator[0];\n multi_scalar_mul_array_return(generators, scalars, true)[0].x\n}\n\n#[field(bn254)]\n#[inline_always]\npub fn derive_generators<let N: u32, let M: u32>(\n domain_separator_bytes: [u8; M],\n starting_index: u32,\n) -> [EmbeddedCurvePoint; N] {\n crate::assert_constant(domain_separator_bytes);\n // TODO(https://github.com/noir-lang/noir/issues/5672): Add back assert_constant on starting_index\n __derive_generators(domain_separator_bytes, starting_index)\n}\n\n#[builtin(derive_pedersen_generators)]\n#[field(bn254)]\nfn __derive_generators<let N: u32, let M: u32>(\n domain_separator_bytes: [u8; M],\n starting_index: u32,\n) -> [EmbeddedCurvePoint; N] {}\n\n#[field(bn254)]\n// Decompose the input 'bn254 scalar' into two 128 bits limbs.\n// It is called 'unsafe' because it does not assert the limbs are 128 bits\n// Assuming the limbs are 128 bits:\n// Assert the decomposition does not overflow the field size.\nfn from_field_unsafe(scalar: Field) -> EmbeddedCurveScalar {\n // Safety: xlo and xhi decomposition is checked below\n let (xlo, xhi) = unsafe { crate::field::bn254::decompose_hint(scalar) };\n // Check that the decomposition is correct\n assert_eq(scalar, xlo + crate::field::bn254::TWO_POW_128 * xhi);\n // Check that the decomposition does not overflow the field size\n let (a, b) = if xhi == crate::field::bn254::PHI {\n (xlo, crate::field::bn254::PLO)\n } else {\n (xhi, crate::field::bn254::PHI)\n };\n crate::field::bn254::assert_lt(a, b);\n\n EmbeddedCurveScalar { lo: xlo, hi: xhi }\n}\n\npub fn poseidon2_permutation<let N: u32>(input: [Field; N], state_len: u32) -> [Field; N] {\n assert_eq(input.len(), state_len);\n poseidon2_permutation_internal(input)\n}\n\n#[foreign(poseidon2_permutation)]\nfn poseidon2_permutation_internal<let N: u32>(input: [Field; N]) -> [Field; N] {}\n\n// Generic hashing support.\n// Partially ported and impacted by rust.\n\n// Hash trait shall be implemented per type.\n#[derive_via(derive_hash)]\npub trait Hash {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher;\n}\n\n// docs:start:derive_hash\ncomptime fn derive_hash(s: TypeDefinition) -> Quoted {\n let name = quote { $crate::hash::Hash };\n let signature = quote { fn hash<H>(_self: Self, _state: &mut H) where H: $crate::hash::Hasher };\n let for_each_field = |name| quote { _self.$name.hash(_state); };\n crate::meta::make_trait_impl(\n s,\n name,\n signature,\n for_each_field,\n quote {},\n |fields| fields,\n )\n}\n// docs:end:derive_hash\n\n// Hasher trait shall be implemented by algorithms to provide hash-agnostic means.\n// TODO: consider making the types generic here ([u8], [Field], etc.)\npub trait Hasher {\n fn finish(self) -> Field;\n\n fn write(&mut self, input: Field);\n}\n\n// BuildHasher is a factory trait, responsible for production of specific Hasher.\npub trait BuildHasher {\n type H: Hasher;\n\n fn build_hasher(self) -> H;\n}\n\npub struct BuildHasherDefault<H>;\n\nimpl<H> BuildHasher for BuildHasherDefault<H>\nwhere\n H: Hasher + Default,\n{\n type H = H;\n\n fn build_hasher(_self: Self) -> H {\n H::default()\n }\n}\n\nimpl<H> Default for BuildHasherDefault<H>\nwhere\n H: Hasher + Default,\n{\n fn default() -> Self {\n BuildHasherDefault {}\n }\n}\n\nimpl Hash for Field {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self);\n }\n}\n\nimpl Hash for u1 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u8 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u16 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u32 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u64 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u128 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for i8 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u8 as Field);\n }\n}\n\nimpl Hash for i16 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u16 as Field);\n }\n}\n\nimpl Hash for i32 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u32 as Field);\n }\n}\n\nimpl Hash for i64 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u64 as Field);\n }\n}\n\nimpl Hash for bool {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for () {\n fn hash<H>(_self: Self, _state: &mut H)\n where\n H: Hasher,\n {}\n}\n\nimpl<T, let N: u32> Hash for [T; N]\nwhere\n T: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n for elem in self {\n elem.hash(state);\n }\n }\n}\n\nimpl<T> Hash for [T]\nwhere\n T: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.len().hash(state);\n for elem in self {\n elem.hash(state);\n }\n }\n}\n\nimpl<A, B> Hash for (A, B)\nwhere\n A: Hash,\n B: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n }\n}\n\nimpl<A, B, C> Hash for (A, B, C)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n }\n}\n\nimpl<A, B, C, D> Hash for (A, B, C, D)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n D: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n self.3.hash(state);\n }\n}\n\nimpl<A, B, C, D, E> Hash for (A, B, C, D, E)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n D: Hash,\n E: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n self.3.hash(state);\n self.4.hash(state);\n }\n}\n\n// Some test vectors for Pedersen hash and Pedersen Commitment.\n// They have been generated using the same functions so the tests are for now useless\n// but they will be useful when we switch to Noir implementation.\n#[test]\nfn assert_pedersen() {\n assert_eq(\n pedersen_hash_with_separator([1], 1),\n 0x1b3f4b1a83092a13d8d1a59f7acb62aba15e7002f4440f2275edb99ebbc2305f,\n );\n assert_eq(\n pedersen_commitment_with_separator([1], 1),\n EmbeddedCurvePoint {\n x: 0x054aa86a73cb8a34525e5bbed6e43ba1198e860f5f3950268f71df4591bde402,\n y: 0x209dcfbf2cfb57f9f6046f44d71ac6faf87254afc7407c04eb621a6287cac126,\n is_infinite: false,\n },\n );\n\n assert_eq(\n pedersen_hash_with_separator([1, 2], 2),\n 0x26691c129448e9ace0c66d11f0a16d9014a9e8498ee78f4d69f0083168188255,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2], 2),\n EmbeddedCurvePoint {\n x: 0x2e2b3b191e49541fe468ec6877721d445dcaffe41728df0a0eafeb15e87b0753,\n y: 0x2ff4482400ad3a6228be17a2af33e2bcdf41be04795f9782bd96efe7e24f8778,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3], 3),\n 0x0bc694b7a1f8d10d2d8987d07433f26bd616a2d351bc79a3c540d85b6206dbe4,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3], 3),\n EmbeddedCurvePoint {\n x: 0x1fee4e8cf8d2f527caa2684236b07c4b1bad7342c01b0f75e9a877a71827dc85,\n y: 0x2f9fedb9a090697ab69bf04c8bc15f7385b3e4b68c849c1536e5ae15ff138fd1,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4], 4),\n 0xdae10fb32a8408521803905981a2b300d6a35e40e798743e9322b223a5eddc,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4], 4),\n EmbeddedCurvePoint {\n x: 0x07ae3e202811e1fca39c2d81eabe6f79183978e6f12be0d3b8eda095b79bdbc9,\n y: 0x0afc6f892593db6fbba60f2da558517e279e0ae04f95758587760ba193145014,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5], 5),\n 0xfc375b062c4f4f0150f7100dfb8d9b72a6d28582dd9512390b0497cdad9c22,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5], 5),\n EmbeddedCurvePoint {\n x: 0x1754b12bd475a6984a1094b5109eeca9838f4f81ac89c5f0a41dbce53189bb29,\n y: 0x2da030e3cfcdc7ddad80eaf2599df6692cae0717d4e9f7bfbee8d073d5d278f7,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6], 6),\n 0x1696ed13dc2730062a98ac9d8f9de0661bb98829c7582f699d0273b18c86a572,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6], 6),\n EmbeddedCurvePoint {\n x: 0x190f6c0e97ad83e1e28da22a98aae156da083c5a4100e929b77e750d3106a697,\n y: 0x1f4b60f34ef91221a0b49756fa0705da93311a61af73d37a0c458877706616fb,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7], 7),\n 0x128c0ff144fc66b6cb60eeac8a38e23da52992fc427b92397a7dffd71c45ede3,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7], 7),\n EmbeddedCurvePoint {\n x: 0x015441e9d29491b06563fac16fc76abf7a9534c715421d0de85d20dbe2965939,\n y: 0x1d2575b0276f4e9087e6e07c2cb75aa1baafad127af4be5918ef8a2ef2fea8fc,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8),\n 0x2f960e117482044dfc99d12fece2ef6862fba9242be4846c7c9a3e854325a55c,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8),\n EmbeddedCurvePoint {\n x: 0x1657737676968887fceb6dd516382ea13b3a2c557f509811cd86d5d1199bc443,\n y: 0x1f39f0cb569040105fa1e2f156521e8b8e08261e635a2b210bdc94e8d6d65f77,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9),\n 0x0c96db0790602dcb166cc4699e2d306c479a76926b81c2cb2aaa92d249ec7be7,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9),\n EmbeddedCurvePoint {\n x: 0x0a3ceae42d14914a432aa60ec7fded4af7dad7dd4acdbf2908452675ec67e06d,\n y: 0xfc19761eaaf621ad4aec9a8b2e84a4eceffdba78f60f8b9391b0bd9345a2f2,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10),\n 0x2cd37505871bc460a62ea1e63c7fe51149df5d0801302cf1cbc48beb8dff7e94,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10),\n EmbeddedCurvePoint {\n x: 0x2fb3f8b3d41ddde007c8c3c62550f9a9380ee546fcc639ffbb3fd30c8d8de30c,\n y: 0x300783be23c446b11a4c0fabf6c91af148937cea15fcf5fb054abf7f752ee245,\n is_infinite: false,\n },\n );\n}\n",
2604
+ path: "std/hash/mod.nr"
2605
+ },
2606
+ "51": {
2607
+ source: "use dep::std::hash::poseidon2_permutation;\n\nfn main(inputs: [Field; 4]) -> pub Field {\n let out = poseidon2_permutation(inputs, 4);\n out[0]\n}\n",
2608
+ path: "/Users/noot/Documents/voter-protocol/packages/crypto/noir/fixtures/src/main.nr"
2609
+ }
2610
+ };
2611
+ const expression_width$1 = {
2612
+ Bounded: {
2613
+ width: 4
2614
+ }
2615
+ };
2616
+ const fixturesCircuit = {
2617
+ noir_version: noir_version$1,
2618
+ hash: hash$1,
2619
+ abi: abi$1,
2620
+ bytecode: bytecode$1,
2621
+ debug_symbols: debug_symbols$1,
2622
+ file_map: file_map$1,
2623
+ expression_width: expression_width$1
2624
+ };
2625
+
2626
+ const noir_version = "1.0.0-beta.16+2d46fca7203545cbbfb31a0d0328de6c10a8db95";
2627
+ const hash = "17841920136822657451";
2628
+ const abi = {
2629
+ parameters: [
2412
2630
  {
2413
- name: "leaf_index",
2414
- type: {
2415
- kind: "integer",
2416
- sign: "unsigned",
2417
- width: 32
2418
- },
2419
- visibility: "private"
2420
- },
2421
- {
2422
- name: "user_secret",
2631
+ name: "inputs",
2423
2632
  type: {
2424
- kind: "field"
2633
+ kind: "array",
2634
+ length: 4,
2635
+ type: {
2636
+ kind: "field"
2637
+ }
2425
2638
  },
2426
2639
  visibility: "private"
2427
2640
  }
2428
2641
  ],
2429
2642
  return_type: {
2430
2643
  abi_type: {
2431
- kind: "tuple",
2432
- fields: [
2433
- {
2434
- kind: "field"
2435
- },
2436
- {
2437
- kind: "field"
2438
- },
2439
- {
2440
- kind: "field"
2441
- },
2442
- {
2443
- kind: "field"
2444
- },
2445
- {
2446
- kind: "field"
2447
- }
2448
- ]
2644
+ kind: "array",
2645
+ length: 4,
2646
+ type: {
2647
+ kind: "field"
2648
+ }
2449
2649
  },
2450
2650
  visibility: "public"
2451
2651
  },
2452
2652
  error_types: {
2453
2653
  }
2454
2654
  };
2455
- const bytecode = "H4sIAAAAAAAA/9Vcf5RVVRX+3jCjCFhkgz/4MTytSEEEMioUgRILRNCiAhEkkQFHfijTgPywgkIRrEhHEazIUAQrKhQBi2oQwYoKRbCiQkaopDLpj9aqtdRx78O53vMO9zp733vfc72z1mavuefc7/vO++4738C8IYfjo9L2GdfVzTxG/bf26xxVG9urqfLO2gqcOHK25yEauWrh2paWpsjrebTOwX+4WrvY3jWYDTbYNQKwjZzIwLkb6jt5VH1zvwfO23rVZZsXLRo74dwLXxo274mb77q0+b9304uc6yJb23gsRlvU8F8o35DWeFxNrY2340GIG3nZsgpXazfba4wChA8CT3T3rtUg3FQwivRwNL7S0tLSDXLTaqAzLYcT9wLoTatRaHT5uqch5JsrvPtaI++u4MgrcJPuIQ/9OzMv5ygkVPKcDTkPY3dAeBLw1yfZXmX72U4P1p1D9R6q91K9LwanysOLwulB9X6qc6nOQ/mfSD1t72UUIDx9eMI/kXqhtCdST8gfol5yDZma1hOlN+1823sbBQgN4gnftN5Ib5rUCDbtfMhN6w2daVnFSG+FRpfvgjSEfLM2Ri5QcPRR4CbdQx/oj/c+cg4z/NeoNV287hyFHl7XIwFPDwVHXzluZBSdbHtfxEdRP6oPUF1I9cEYnL4eXhROf6oPUX2Y6iMoy0irdLUOsP0iowDhScgTNd41XuRvtIiR9toAyB+ii+QaMjVtAEpiWkGkXWz7QKMAoUE84UfaQKSPNKkRHGkXQ27aQOhMyyrSBio0unyXpCHkm7WRdomCY5ACN+keBkEfaYPkHGZoo4Yjs59CD6/vn4Cnv4JjsBw3Mora2j4Y8VE0hOqjVB+jujQGZ7CHF4UzlOoyqo9TfQJlGWltXa3DbB9uFCA8CXmim3dtOMLTMBhFjLT/D4P8IRou15CpacNQEtMKIu1y20cYBQgN4gk/0kYgfaRJjeBIuxxy00ZAZ1pWkTZCodHluyINId+sjbQrFBwjFbhJ9zAS+kgbKecwQxs1HJlDFHp4/dAEPEMVHKPkuJFRdIrtoxAfRVdSXUX1SapPxeCM8vCicEZTfZrqM1SfRVlGWkdX6xjbxxoFCE9CnujqXeNFlR5aESPtP2Mgf4jGyjVkatoYlMS0gki72vZxRgFCg3jCj7RxSB9pUiM40q6G3LRx0JmWVaSNU2h0+a5JQ8g3ayPtGgXHeAVu0j2Mhz7Sxss5zNBGDUfmlQo9vH50Ap7RCo4JctzIKGpn+wTER9G1VBOpPkd1XQzOBA8vCmcS1fVUk6lqUZaRlne1TrF9qlGA8CTkiS7eNV5U5aEVMdKap0D+EE2Va8jUtCkoiWkFkXaD7XVGAUKDeMKPtDqkjzSpERxpN0BuWh10pmUVaXUKjS7fjWkI+WZtpN2o4JimwE26h2nQR9o0OYcZ2qjhyLxWoYfXT0rAM0nBMV2OGxlF7W2fjvgomkE1k+omqptjcKZ7eFE4s6jqqT5P1YCyjLQhrtbZts8xChCehDzR2bvGi07y0IoYadtnQ/4QzZFryNS02SiJaQWRdovtc40ChAbxhB9pc5E+0qRGcKTdArlpc6EzLatIm6vQ6PLNS0PIN2sjbZ6CY74CN+ke5kMfafPlHGZoo4Yjc4ZCD6+flYBnloJjgRw3Moo62L4A8VF0K9UXqL5I9aUYnAUeXhTOQqpFVF+m+grKMtIWuloX236bUYDwJOSJs7xrvOhkD62IkbZoMeQP0W1yDZmathglMa0g0m63fYlRgNAgnvAjbQnSR5rUCI602yE3bQl0pmUVaUsUGl2+O9IQ8s3aSLtDwbFUgZt0D0uhj7Slcg4ztFHDkXmrQg+vX5iAZ6GCY5kcNzKKTrV9GeKj6E6qr1J9jerrMTjLPLwonOVU36C6i+pulGOk5Qq0Ntp+z/Gp8CTkiTO9a7yorQdXxEjLNUL+EN0j15CpaY0ohWmFkXav7SuMAoQG8YQfaSuQPtKkRnCk3Qu5aSugMy2rSFuh0Ojy3ZeGkG/WRtp9Co6VCtyke1gJfaStlHOYoY0ajsw7FXp4/fIEPMsVHKvkuJFR9A7bVyE+iu6n+ibVt6i+HYOzysOLwllN9R2qB6i+i3KMtIoCrWtsf9AoQHgS8sQZ3jVedIoHV7xIez23BvKH6EG5hkxNW4NSmFYYaQ/ZvtYoQGgQT/iRthbpI01qBEfaQ5CbthY607KKtLUKjS7fw2kI+WZtpD2s4FinwE26h3XQR9o6OYcZ2qjhyLxfoYfXr07As1rBsV6OGxlF77R9PeKj6BGq71F9n+oHMTjrPbwonA1UP6T6EdWPUY6RVlmgdaPtjxoFCE9Cnjjdu8aL2nlwxYu0V3MbIX+IHpVryNS0jSiFaYWR9pjtm4wChAbxhB9pm5A+0qRGcKQ9Brlpm6AzLatI26TQ6PI9noaQb9ZG2uMKjs0K3KR72Ax9pG2Wc5ihjRqOzEcUenj9hgQ8GxQcW+S4kVHU0fYtiI+irVRPUP2E6qcxOFs8vCicbVQ/o/o51S9QjpHWtkBrk+3bjQKEJyFPdPKu8aL2HlzxIu1/uSbIH6Ltcg2ZmtaEUphWGGlP2r7DKEBoEE/4kbYD6SOtCfJIexJy03ZAZ1pWkbZDodHleyoNId+sjbSnFBw7FbhJ97AT+kjbKecwQxs1HJlbFXp4/bYEPNsUHLvkuJFR9C7bdyE+ip6m+iXVr6h+HYOzy8OLwtlN9Rsc//8bf4dyjLSOBVr32P6MUYDwJOSJau8aL+rgwRUv0o7l9kD+ED0j15CpaXtQCtMKI+1Z2/caBQgN4gk/0vYifaRJjeBIexZy0/ZCZ1pWkbZXodHley4NId+sjbTnFBz7FLhJ97AP+kjbJ+cwQxs1HJlPK/Tw+t0JeHYrOPbLcSOj6DTb9yM+ip6n+j3VH6j+GIOz38OLwjlA9SeqP1P9BeUYafkCrQdtf8EoQHgS8sS7vWu86FQPrniRdih3EPKH6AW5hkxNO4hSmFYYaYdsbzYKEBrEE36kNSN9pEmN4Eg7BLlpzdCZllWkNSs0unwvpiHkm7WR9qKC47ACN+keDkMfaYflHGZoo4Yj83mFHl5/IAHPAQXHETluZBQFJ+8RxEfRX6n+RvV3qpdicI54eFE4R6n+QfVPqn/Za9rX5rDVE6yXPBNHoX8/uGtb03QU6TwI/o0yOHgqEB6swbqXqf5N9QrVMQ9Tu7cc5Ht7Wb42lSZ3bWuaTkdpNGlepzNQGk0VkGs6E6XR1AZyTWehNJoqIdfUWb7W/Ij8zRutruB9yv+TAv/qKf+uDn+4mT8Nxj8+55838Huev6Plj8/x5w34BzT8L1r8VwA+M/m71U4OdpXDx88We8mvXWdnTfCaBNcm19XXXt9QN6d2Yt3MhtqptfUTZ82+qaGudmZD8FHroLdzdqBwryK4v32y+0/4xVxXi4v7JqHtlRH35WK+rvD6W631r7vXOkTMBZin2e7qDfbR0XbXjTm19Q3BZwOD17CTw6d5BwX3Vye7P9ID98kLcAMe93XMQzeCb/+rIuZ8b9t4vEHP6flzcTqi/A68rHauBa/HG1KywaYoaAAA";
2456
- const debug_symbols = "tVfLbqNAEPwXzj5Q8+jp2V9ZrSLHIZEly7YcO9Iqyr8vTFc7yQEU2doLNYZ0QRVVEN67p+Hx8vKw3T8fXrtfv9+7x9N2t9u+POwOm/V5e9iPe98/Vp3/fDifhmHc1X05Pk4d16dhf+5+7S+73ap7W+8u7Y9ej+t9w/P6NB7tV92wfxpxJHze7oZp9bH6nO7nR5Fr4TQklStBxjcGzDOUUElQ8i3zKn75qumW+ezn1xpvmK9wA2rELfP5Oq+z+mV+PmrkfJzXvzCfenA+xTA3rwvziuwEmj8ZUH98BVGdIN1yB+tnBPswHyHc6eESwY9MRLzbxcVruNNGFdegRebmlx4Fcm0iRPUWhhL8ElBqP6thIY2AuI8IC7ei/leKXPxWSF/nCALuj8PCNciVQQJuuRnaeymg8Xug/oy/1pvt6dvbqMOoadWFto1tm9o2j7VaddK2pW21besobtWhN4DBOD6KQjQYCTDtzVOmRhRiISqxNsLQG8AgGMRGHJJBNpDGH4qB2mlCNbrYE0EMxNgYYzIwXdGExdKIoxqYttQ3/gSDYKdJ0ehSIlJdorpkJiVzKZm2bNqy2ZSDgWnLqfHnbCB2mkyTMk3KVCdUJ2aTmE1i2sS0idkkYmDaRBu/1Aalt9MUmlRoUqG6QnXFbCq8/6atmLZiNqlFQE2bWgTUIqCMgNIkpUlKdUp1ajZVs6matmraqtlULQLVtFWLQLUIVEag0iT0vS/gi+ALswp9ImaiEAuDrUQGHZYGAETmAaBlQPKFB/6aeJhxgBJNL5h5BO9PIEaiZQMhE5kOBO9Q8BJ5/OH5RzQbEQORytkARLMSUYhUHpUtrqwxs4LkZiY308sAbwMS7Uy0k3UA+4BEOzMfHKwEMh8dmc+OfH14uJnZzfRqwLuBTDuFdrIcYDsgtFMSkcrFUgQpROYI4mYWN9OLAm8KCu0stJNVAbuCQjsLg8S6QBkkZZDUg6RuprqZXht4b6C0U2kniwM2B5V2VgaJ5UFlkCqDVD1IU4Om/4ExdSiEaTFShumN8bY+bdePu4EfK8+X/ebLt8v579GP+NfN8XTYDE+X0zC9Wdqx8V3zDw==";
2655
+ const bytecode = "H4sIAAAAAAAA/72SPQqDQBCFZ/2J0c0hUiZdQo4QAqlCyjQpAmlSJAQ7S48gXsBTiB7HztLG3lkdZQWLXQQHHo9lhjcfu8ugK4v8+/r8OLpNZ4baSH0meS+D3JTm1igX5aE4jMsg34JSMUuaPbxvfnFM9tn9kobh47k7ldcg/0fnoo4r2quYO4vJ1mByYRmmlQaTB8swORpMXD13+GftEuIyJ/rCxVuJu3EmchrXOnNwAAMAAA==";
2656
+ const debug_symbols = "dZDRDoMgDEX/pc88iNHM+SvLYhCrISFAEJYshn9fNTL1wafSXs5tehcYsI9Tp8xoZ2hfC/Reaa2mTlspgrKGpktikNsueEQawUknygmPJkBrotYMPkLH7dPshNlqEJ7UggGagSoZjkrj+krsoIt7tKwyXD7KP17zC8/v+arh9W5QNfXhwJ8pvakTUvnLzVDQqrQaeiV6jXsOYzTyFEv4uqzk4Jy3EofocbXbNFrwAw==";
2457
2657
  const file_map = {
2458
2658
  "19": {
2459
2659
  source: "// Exposed only for usage in `std::meta`\npub(crate) mod poseidon2;\n\nuse crate::default::Default;\nuse crate::embedded_curve_ops::{\n EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul, multi_scalar_mul_array_return,\n};\nuse crate::meta::derive_via;\n\n#[foreign(sha256_compression)]\n// docs:start:sha256_compression\npub fn sha256_compression(input: [u32; 16], state: [u32; 8]) -> [u32; 8] {}\n// docs:end:sha256_compression\n\n#[foreign(keccakf1600)]\n// docs:start:keccakf1600\npub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {}\n// docs:end:keccakf1600\n\npub mod keccak {\n #[deprecated(\"This function has been moved to std::hash::keccakf1600\")]\n pub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {\n super::keccakf1600(input)\n }\n}\n\n#[foreign(blake2s)]\n// docs:start:blake2s\npub fn blake2s<let N: u32>(input: [u8; N]) -> [u8; 32]\n// docs:end:blake2s\n{}\n\n// docs:start:blake3\npub fn blake3<let N: u32>(input: [u8; N]) -> [u8; 32]\n// docs:end:blake3\n{\n if crate::runtime::is_unconstrained() {\n // Temporary measure while Barretenberg is main proving system.\n // Please open an issue if you're working on another proving system and running into problems due to this.\n crate::static_assert(\n N <= 1024,\n \"Barretenberg cannot prove blake3 hashes with inputs larger than 1024 bytes\",\n );\n }\n __blake3(input)\n}\n\n#[foreign(blake3)]\nfn __blake3<let N: u32>(input: [u8; N]) -> [u8; 32] {}\n\n// docs:start:pedersen_commitment\npub fn pedersen_commitment<let N: u32>(input: [Field; N]) -> EmbeddedCurvePoint {\n // docs:end:pedersen_commitment\n pedersen_commitment_with_separator(input, 0)\n}\n\n#[inline_always]\npub fn pedersen_commitment_with_separator<let N: u32>(\n input: [Field; N],\n separator: u32,\n) -> EmbeddedCurvePoint {\n let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N];\n for i in 0..N {\n // we use the unsafe version because the multi_scalar_mul will constrain the scalars.\n points[i] = from_field_unsafe(input[i]);\n }\n let generators = derive_generators(\"DEFAULT_DOMAIN_SEPARATOR\".as_bytes(), separator);\n multi_scalar_mul(generators, points)\n}\n\n// docs:start:pedersen_hash\npub fn pedersen_hash<let N: u32>(input: [Field; N]) -> Field\n// docs:end:pedersen_hash\n{\n pedersen_hash_with_separator(input, 0)\n}\n\n#[no_predicates]\npub fn pedersen_hash_with_separator<let N: u32>(input: [Field; N], separator: u32) -> Field {\n let mut scalars: [EmbeddedCurveScalar; N + 1] = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N + 1];\n let mut generators: [EmbeddedCurvePoint; N + 1] =\n [EmbeddedCurvePoint::point_at_infinity(); N + 1];\n let domain_generators: [EmbeddedCurvePoint; N] =\n derive_generators(\"DEFAULT_DOMAIN_SEPARATOR\".as_bytes(), separator);\n\n for i in 0..N {\n scalars[i] = from_field_unsafe(input[i]);\n generators[i] = domain_generators[i];\n }\n scalars[N] = EmbeddedCurveScalar { lo: N as Field, hi: 0 as Field };\n\n let length_generator: [EmbeddedCurvePoint; 1] =\n derive_generators(\"pedersen_hash_length\".as_bytes(), 0);\n generators[N] = length_generator[0];\n multi_scalar_mul_array_return(generators, scalars, true)[0].x\n}\n\n#[field(bn254)]\n#[inline_always]\npub fn derive_generators<let N: u32, let M: u32>(\n domain_separator_bytes: [u8; M],\n starting_index: u32,\n) -> [EmbeddedCurvePoint; N] {\n crate::assert_constant(domain_separator_bytes);\n // TODO(https://github.com/noir-lang/noir/issues/5672): Add back assert_constant on starting_index\n __derive_generators(domain_separator_bytes, starting_index)\n}\n\n#[builtin(derive_pedersen_generators)]\n#[field(bn254)]\nfn __derive_generators<let N: u32, let M: u32>(\n domain_separator_bytes: [u8; M],\n starting_index: u32,\n) -> [EmbeddedCurvePoint; N] {}\n\n#[field(bn254)]\n// Decompose the input 'bn254 scalar' into two 128 bits limbs.\n// It is called 'unsafe' because it does not assert the limbs are 128 bits\n// Assuming the limbs are 128 bits:\n// Assert the decomposition does not overflow the field size.\nfn from_field_unsafe(scalar: Field) -> EmbeddedCurveScalar {\n // Safety: xlo and xhi decomposition is checked below\n let (xlo, xhi) = unsafe { crate::field::bn254::decompose_hint(scalar) };\n // Check that the decomposition is correct\n assert_eq(scalar, xlo + crate::field::bn254::TWO_POW_128 * xhi);\n // Check that the decomposition does not overflow the field size\n let (a, b) = if xhi == crate::field::bn254::PHI {\n (xlo, crate::field::bn254::PLO)\n } else {\n (xhi, crate::field::bn254::PHI)\n };\n crate::field::bn254::assert_lt(a, b);\n\n EmbeddedCurveScalar { lo: xlo, hi: xhi }\n}\n\npub fn poseidon2_permutation<let N: u32>(input: [Field; N], state_len: u32) -> [Field; N] {\n assert_eq(input.len(), state_len);\n poseidon2_permutation_internal(input)\n}\n\n#[foreign(poseidon2_permutation)]\nfn poseidon2_permutation_internal<let N: u32>(input: [Field; N]) -> [Field; N] {}\n\n// Generic hashing support.\n// Partially ported and impacted by rust.\n\n// Hash trait shall be implemented per type.\n#[derive_via(derive_hash)]\npub trait Hash {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher;\n}\n\n// docs:start:derive_hash\ncomptime fn derive_hash(s: TypeDefinition) -> Quoted {\n let name = quote { $crate::hash::Hash };\n let signature = quote { fn hash<H>(_self: Self, _state: &mut H) where H: $crate::hash::Hasher };\n let for_each_field = |name| quote { _self.$name.hash(_state); };\n crate::meta::make_trait_impl(\n s,\n name,\n signature,\n for_each_field,\n quote {},\n |fields| fields,\n )\n}\n// docs:end:derive_hash\n\n// Hasher trait shall be implemented by algorithms to provide hash-agnostic means.\n// TODO: consider making the types generic here ([u8], [Field], etc.)\npub trait Hasher {\n fn finish(self) -> Field;\n\n fn write(&mut self, input: Field);\n}\n\n// BuildHasher is a factory trait, responsible for production of specific Hasher.\npub trait BuildHasher {\n type H: Hasher;\n\n fn build_hasher(self) -> H;\n}\n\npub struct BuildHasherDefault<H>;\n\nimpl<H> BuildHasher for BuildHasherDefault<H>\nwhere\n H: Hasher + Default,\n{\n type H = H;\n\n fn build_hasher(_self: Self) -> H {\n H::default()\n }\n}\n\nimpl<H> Default for BuildHasherDefault<H>\nwhere\n H: Hasher + Default,\n{\n fn default() -> Self {\n BuildHasherDefault {}\n }\n}\n\nimpl Hash for Field {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self);\n }\n}\n\nimpl Hash for u1 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u8 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u16 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u32 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u64 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u128 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for i8 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u8 as Field);\n }\n}\n\nimpl Hash for i16 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u16 as Field);\n }\n}\n\nimpl Hash for i32 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u32 as Field);\n }\n}\n\nimpl Hash for i64 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u64 as Field);\n }\n}\n\nimpl Hash for bool {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for () {\n fn hash<H>(_self: Self, _state: &mut H)\n where\n H: Hasher,\n {}\n}\n\nimpl<T, let N: u32> Hash for [T; N]\nwhere\n T: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n for elem in self {\n elem.hash(state);\n }\n }\n}\n\nimpl<T> Hash for [T]\nwhere\n T: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.len().hash(state);\n for elem in self {\n elem.hash(state);\n }\n }\n}\n\nimpl<A, B> Hash for (A, B)\nwhere\n A: Hash,\n B: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n }\n}\n\nimpl<A, B, C> Hash for (A, B, C)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n }\n}\n\nimpl<A, B, C, D> Hash for (A, B, C, D)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n D: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n self.3.hash(state);\n }\n}\n\nimpl<A, B, C, D, E> Hash for (A, B, C, D, E)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n D: Hash,\n E: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n self.3.hash(state);\n self.4.hash(state);\n }\n}\n\n// Some test vectors for Pedersen hash and Pedersen Commitment.\n// They have been generated using the same functions so the tests are for now useless\n// but they will be useful when we switch to Noir implementation.\n#[test]\nfn assert_pedersen() {\n assert_eq(\n pedersen_hash_with_separator([1], 1),\n 0x1b3f4b1a83092a13d8d1a59f7acb62aba15e7002f4440f2275edb99ebbc2305f,\n );\n assert_eq(\n pedersen_commitment_with_separator([1], 1),\n EmbeddedCurvePoint {\n x: 0x054aa86a73cb8a34525e5bbed6e43ba1198e860f5f3950268f71df4591bde402,\n y: 0x209dcfbf2cfb57f9f6046f44d71ac6faf87254afc7407c04eb621a6287cac126,\n is_infinite: false,\n },\n );\n\n assert_eq(\n pedersen_hash_with_separator([1, 2], 2),\n 0x26691c129448e9ace0c66d11f0a16d9014a9e8498ee78f4d69f0083168188255,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2], 2),\n EmbeddedCurvePoint {\n x: 0x2e2b3b191e49541fe468ec6877721d445dcaffe41728df0a0eafeb15e87b0753,\n y: 0x2ff4482400ad3a6228be17a2af33e2bcdf41be04795f9782bd96efe7e24f8778,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3], 3),\n 0x0bc694b7a1f8d10d2d8987d07433f26bd616a2d351bc79a3c540d85b6206dbe4,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3], 3),\n EmbeddedCurvePoint {\n x: 0x1fee4e8cf8d2f527caa2684236b07c4b1bad7342c01b0f75e9a877a71827dc85,\n y: 0x2f9fedb9a090697ab69bf04c8bc15f7385b3e4b68c849c1536e5ae15ff138fd1,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4], 4),\n 0xdae10fb32a8408521803905981a2b300d6a35e40e798743e9322b223a5eddc,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4], 4),\n EmbeddedCurvePoint {\n x: 0x07ae3e202811e1fca39c2d81eabe6f79183978e6f12be0d3b8eda095b79bdbc9,\n y: 0x0afc6f892593db6fbba60f2da558517e279e0ae04f95758587760ba193145014,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5], 5),\n 0xfc375b062c4f4f0150f7100dfb8d9b72a6d28582dd9512390b0497cdad9c22,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5], 5),\n EmbeddedCurvePoint {\n x: 0x1754b12bd475a6984a1094b5109eeca9838f4f81ac89c5f0a41dbce53189bb29,\n y: 0x2da030e3cfcdc7ddad80eaf2599df6692cae0717d4e9f7bfbee8d073d5d278f7,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6], 6),\n 0x1696ed13dc2730062a98ac9d8f9de0661bb98829c7582f699d0273b18c86a572,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6], 6),\n EmbeddedCurvePoint {\n x: 0x190f6c0e97ad83e1e28da22a98aae156da083c5a4100e929b77e750d3106a697,\n y: 0x1f4b60f34ef91221a0b49756fa0705da93311a61af73d37a0c458877706616fb,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7], 7),\n 0x128c0ff144fc66b6cb60eeac8a38e23da52992fc427b92397a7dffd71c45ede3,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7], 7),\n EmbeddedCurvePoint {\n x: 0x015441e9d29491b06563fac16fc76abf7a9534c715421d0de85d20dbe2965939,\n y: 0x1d2575b0276f4e9087e6e07c2cb75aa1baafad127af4be5918ef8a2ef2fea8fc,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8),\n 0x2f960e117482044dfc99d12fece2ef6862fba9242be4846c7c9a3e854325a55c,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8),\n EmbeddedCurvePoint {\n x: 0x1657737676968887fceb6dd516382ea13b3a2c557f509811cd86d5d1199bc443,\n y: 0x1f39f0cb569040105fa1e2f156521e8b8e08261e635a2b210bdc94e8d6d65f77,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9),\n 0x0c96db0790602dcb166cc4699e2d306c479a76926b81c2cb2aaa92d249ec7be7,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9),\n EmbeddedCurvePoint {\n x: 0x0a3ceae42d14914a432aa60ec7fded4af7dad7dd4acdbf2908452675ec67e06d,\n y: 0xfc19761eaaf621ad4aec9a8b2e84a4eceffdba78f60f8b9391b0bd9345a2f2,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10),\n 0x2cd37505871bc460a62ea1e63c7fe51149df5d0801302cf1cbc48beb8dff7e94,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10),\n EmbeddedCurvePoint {\n x: 0x2fb3f8b3d41ddde007c8c3c62550f9a9380ee546fcc639ffbb3fd30c8d8de30c,\n y: 0x300783be23c446b11a4c0fabf6c91af148937cea15fcf5fb054abf7f752ee245,\n is_infinite: false,\n },\n );\n}\n",
2460
2660
  path: "std/hash/mod.nr"
2461
2661
  },
2462
2662
  "51": {
2463
- source: "// District membership circuit (Barretenberg / Noir)\n// Mirrors Halo2 semantics with Poseidon hashing and domain-separated nullifier.\n\nuse dep::std::hash::poseidon2_permutation;\n\n// Default depth; build pipeline rewrites this per-class (14 / 20 / 22).\nglobal DEPTH: u32 = 14;\n\nfn poseidon2_hash2(left: Field, right: Field) -> Field {\n let mut state: [Field; 4] = [left, right, 0, 0];\n let out = poseidon2_permutation(state, 4);\n out[0]\n}\n\nfn poseidon2_hash4(a: Field, b: Field, c: Field, d: Field) -> Field {\n let mut state: [Field; 4] = [a, b, c, d];\n let out = poseidon2_permutation(state, 4);\n out[0]\n}\n\nfn compute_merkle_root(leaf: Field, merkle_path: [Field; DEPTH], leaf_index: u32) -> Field {\n assert(leaf_index < (1u32 << DEPTH)); // range-constrain index\n\n let mut node = leaf;\n for i in 0..DEPTH {\n let bit: bool = ((leaf_index >> i) & 1u32) == 1u32;\n let sibling = merkle_path[i];\n node = if bit { poseidon2_hash2(sibling, node) } else { poseidon2_hash2(node, sibling) };\n }\n node\n}\n\nfn compute_nullifier(user_secret: Field, campaign_id: Field, authority_hash: Field, epoch_id: Field) -> Field {\n poseidon2_hash4(user_secret, campaign_id, authority_hash, epoch_id)\n}\n\n// Public inputs are marked `pub`; private witnesses remain secret.\nfn main(\n merkle_root: Field,\n nullifier: Field,\n authority_hash: Field,\n epoch_id: Field,\n campaign_id: Field,\n leaf: Field,\n merkle_path: [Field; DEPTH],\n leaf_index: u32,\n user_secret: Field,\n) -> pub (Field, Field, Field, Field, Field) {\n let computed_root = compute_merkle_root(leaf, merkle_path, leaf_index);\n assert(computed_root == merkle_root);\n\n let computed_nullifier = compute_nullifier(user_secret, campaign_id, authority_hash, epoch_id);\n assert(computed_nullifier == nullifier);\n\n (merkle_root, nullifier, authority_hash, epoch_id, campaign_id)\n}\n",
2464
- path: "/Users/noot/Documents/voter-protocol/packages/crypto/noir/district_membership/src/main.nr"
2663
+ source: "use dep::std::hash::poseidon2_permutation;\n\n// Returns all 4 elements of the permutation state (not just state[0])\n// This is needed for implementing the sponge construction in TypeScript\nfn main(inputs: [Field; 4]) -> pub [Field; 4] {\n poseidon2_permutation(inputs, 4)\n}\n",
2664
+ path: "/Users/noot/Documents/voter-protocol/packages/crypto/noir/sponge_helper/src/main.nr"
2465
2665
  }
2466
2666
  };
2467
2667
  const expression_width = {
@@ -2469,7 +2669,7 @@ const expression_width = {
2469
2669
  width: 4
2470
2670
  }
2471
2671
  };
2472
- const circuitJson = {
2672
+ const spongeHelperCircuit = {
2473
2673
  noir_version: noir_version,
2474
2674
  hash: hash,
2475
2675
  abi: abi,
@@ -2479,91 +2679,749 @@ const circuitJson = {
2479
2679
  expression_width: expression_width
2480
2680
  };
2481
2681
 
2682
+ const ZERO_PAD = "0x" + "00".repeat(32);
2683
+ const DOMAIN_HASH2 = "0x" + 4731469 .toString(16).padStart(64, "0");
2684
+ const DOMAIN_HASH1 = "0x" + 4731213 .toString(16).padStart(64, "0");
2685
+ const DOMAIN_HASH3 = "0x" + 4731725 .toString(16).padStart(64, "0");
2686
+ const DOMAIN_HASH4 = "0x" + 4731981 .toString(16).padStart(64, "0");
2687
+ const DOMAIN_SPONGE_24 = "0x" + 0x534f4e47455f24n.toString(16).padStart(64, "0");
2688
+ const DEFAULT_BATCH_SIZE = 64;
2689
+ class Poseidon2Hasher {
2690
+ static instance = null;
2691
+ static initPromise = null;
2692
+ // BN254 scalar field modulus (exported for validation in dependent packages)
2693
+ static BN254_MODULUS = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");
2694
+ noir;
2695
+ spongeHelperNoir;
2696
+ initialized = false;
2697
+ /**
2698
+ * Private constructor - use getInstance() instead
2699
+ */
2700
+ constructor(noir, spongeHelperNoir) {
2701
+ this.noir = noir;
2702
+ this.spongeHelperNoir = spongeHelperNoir;
2703
+ }
2704
+ /**
2705
+ * Get singleton instance (thread-safe initialization)
2706
+ *
2707
+ * First call initializes the Noir circuit, subsequent calls return cached instance.
2708
+ * Uses promise-based locking to prevent double initialization.
2709
+ */
2710
+ static async getInstance() {
2711
+ if (Poseidon2Hasher.instance?.initialized) {
2712
+ return Poseidon2Hasher.instance;
2713
+ }
2714
+ if (!Poseidon2Hasher.initPromise) {
2715
+ Poseidon2Hasher.initPromise = Poseidon2Hasher.initialize().catch((err) => {
2716
+ Poseidon2Hasher.initPromise = null;
2717
+ throw err;
2718
+ });
2719
+ }
2720
+ return Poseidon2Hasher.initPromise;
2721
+ }
2722
+ /**
2723
+ * Initialize the Noir circuits (called once per process)
2724
+ */
2725
+ static async initialize() {
2726
+ const circuit = fixturesCircuit;
2727
+ const noir = new noir_js.Noir(circuit);
2728
+ const spongeCircuit = spongeHelperCircuit;
2729
+ const spongeHelperNoir = new noir_js.Noir(spongeCircuit);
2730
+ const instance = new Poseidon2Hasher(noir, spongeHelperNoir);
2731
+ instance.initialized = true;
2732
+ Poseidon2Hasher.instance = instance;
2733
+ return instance;
2734
+ }
2735
+ /**
2736
+ * Reset singleton (for testing only)
2737
+ */
2738
+ static resetInstance() {
2739
+ Poseidon2Hasher.instance = null;
2740
+ Poseidon2Hasher.initPromise = null;
2741
+ }
2742
+ /**
2743
+ * Hash two field elements: Poseidon2(left, right, DOMAIN_HASH2, 0)
2744
+ *
2745
+ * Matches the circuit: poseidon2_permutation([left, right, 0x48324d, 0], 4)[0]
2746
+ * BA-003: Third state element carries the "H2M" domain separation tag to
2747
+ * distinguish pair-hashing from single/quad modes in the Noir circuit.
2748
+ *
2749
+ * @param left - Left input (bigint or hex string)
2750
+ * @param right - Right input (bigint or hex string)
2751
+ * @returns Poseidon2 hash as bigint
2752
+ */
2753
+ async hashPair(left, right) {
2754
+ const inputs = [
2755
+ this.toHex(left),
2756
+ this.toHex(right),
2757
+ DOMAIN_HASH2,
2758
+ // BA-003: Domain separation tag matching circuit
2759
+ ZERO_PAD
2760
+ ];
2761
+ const result = await this.noir.execute({ inputs });
2762
+ const returnValue = result.returnValue ?? result.return_value;
2763
+ if (!returnValue) {
2764
+ throw new Error("Noir circuit returned no value");
2765
+ }
2766
+ return BigInt(returnValue);
2767
+ }
2768
+ /**
2769
+ * Hash a single field element: Poseidon2(value, DOMAIN_HASH1, 0, 0)
2770
+ *
2771
+ * SA-007 FIX: Now includes DOMAIN_HASH1 in slot 1 for domain separation.
2772
+ * This prevents collision with hash4(value, 0, 0, 0).
2773
+ *
2774
+ * @param value - Input value (bigint or hex string)
2775
+ * @returns Poseidon2 hash as bigint
2776
+ */
2777
+ async hashSingle(value) {
2778
+ const inputs = [
2779
+ this.toHex(value),
2780
+ DOMAIN_HASH1,
2781
+ // SA-007: Domain tag in slot 1 to prevent collision with hash4
2782
+ ZERO_PAD,
2783
+ ZERO_PAD
2784
+ ];
2785
+ const result = await this.noir.execute({ inputs });
2786
+ const returnValue = result.returnValue ?? result.return_value;
2787
+ if (!returnValue) {
2788
+ throw new Error("Noir circuit returned no value");
2789
+ }
2790
+ return BigInt(returnValue);
2791
+ }
2792
+ /**
2793
+ * Hash three field elements: Poseidon2(a, b, c, DOMAIN_HASH3)
2794
+ *
2795
+ * Two-tree architecture: Used for user leaf computation
2796
+ * user_leaf = hash3(user_secret, cell_id, registration_salt)
2797
+ *
2798
+ * Domain separation: DOMAIN_HASH3 (0x48334d = "H3M") in slot 3 prevents
2799
+ * collision with hash4(a, b, c, 0).
2800
+ *
2801
+ * @param a - First input
2802
+ * @param b - Second input
2803
+ * @param c - Third input
2804
+ * @returns Poseidon2 hash as bigint
2805
+ */
2806
+ async hash3(a, b, c) {
2807
+ const inputs = [
2808
+ this.toHex(a),
2809
+ this.toHex(b),
2810
+ this.toHex(c),
2811
+ DOMAIN_HASH3
2812
+ // Domain tag in slot 3 to prevent collision with hash4
2813
+ ];
2814
+ const result = await this.noir.execute({ inputs });
2815
+ const returnValue = result.returnValue ?? result.return_value;
2816
+ if (!returnValue) {
2817
+ throw new Error("Noir circuit returned no value");
2818
+ }
2819
+ return BigInt(returnValue);
2820
+ }
2821
+ /**
2822
+ * Hash four field elements using 2-round Poseidon2 sponge (BR5-001).
2823
+ *
2824
+ * Matches Noir circuit poseidon2_hash4:
2825
+ * Round 1: state = permute([DOMAIN_HASH4, a, b, c])
2826
+ * Round 2: state[1] += d, state = permute(state), return state[0]
2827
+ *
2828
+ * Used for user leaf: hash4(user_secret, cell_id, registration_salt, authority_level)
2829
+ *
2830
+ * @param a - First input (user_secret)
2831
+ * @param b - Second input (cell_id)
2832
+ * @param c - Third input (registration_salt)
2833
+ * @param d - Fourth input (authority_level)
2834
+ * @returns Poseidon2 hash as bigint
2835
+ */
2836
+ async hash4(a, b, c, d) {
2837
+ const round1Inputs = [
2838
+ DOMAIN_HASH4,
2839
+ this.toHex(a),
2840
+ this.toHex(b),
2841
+ this.toHex(c)
2842
+ ];
2843
+ const r1 = await this.spongeHelperNoir.execute({ inputs: round1Inputs });
2844
+ const r1State = r1.returnValue ?? r1.return_value;
2845
+ if (!r1State || !Array.isArray(r1State) || r1State.length !== 4) {
2846
+ throw new Error("Sponge helper circuit returned invalid state array");
2847
+ }
2848
+ const s1PlusD = (BigInt(r1State[1]) + BigInt(this.toHex(d))) % Poseidon2Hasher.BN254_MODULUS;
2849
+ const round2Inputs = [
2850
+ r1State[0],
2851
+ this.toHex(s1PlusD),
2852
+ r1State[2],
2853
+ r1State[3]
2854
+ ];
2855
+ const r2 = await this.noir.execute({ inputs: round2Inputs });
2856
+ const returnValue = r2.returnValue ?? r2.return_value;
2857
+ if (!returnValue) {
2858
+ throw new Error("Noir circuit returned no value");
2859
+ }
2860
+ return BigInt(returnValue);
2861
+ }
2862
+ /**
2863
+ * Batch hash multiple pairs with controlled concurrency
2864
+ *
2865
+ * PARALLELISM STRATEGY:
2866
+ * - Divides pairs into batches of size `batchSize`
2867
+ * - Each batch runs concurrently with Promise.all()
2868
+ * - Batches run sequentially to prevent memory pressure
2869
+ *
2870
+ * @param pairs - Array of [left, right] pairs to hash
2871
+ * @param batchSize - Max concurrent operations (default: 64)
2872
+ * @returns Array of hashes in same order as input pairs
2873
+ */
2874
+ async hashPairsBatch(pairs, batchSize = DEFAULT_BATCH_SIZE) {
2875
+ const results = new Array(pairs.length);
2876
+ for (let i = 0; i < pairs.length; i += batchSize) {
2877
+ const batch = pairs.slice(i, Math.min(i + batchSize, pairs.length));
2878
+ const batchResults = await Promise.all(batch.map(([left, right]) => this.hashPair(left, right)));
2879
+ for (let j = 0; j < batchResults.length; j++) {
2880
+ results[i + j] = batchResults[j];
2881
+ }
2882
+ }
2883
+ return results;
2884
+ }
2885
+ /**
2886
+ * Batch hash multiple single values with controlled concurrency
2887
+ *
2888
+ * @param values - Array of values to hash
2889
+ * @param batchSize - Max concurrent operations (default: 64)
2890
+ * @returns Array of hashes in same order as input values
2891
+ */
2892
+ async hashSinglesBatch(values, batchSize = DEFAULT_BATCH_SIZE) {
2893
+ const results = new Array(values.length);
2894
+ for (let i = 0; i < values.length; i += batchSize) {
2895
+ const batch = values.slice(i, Math.min(i + batchSize, values.length));
2896
+ const batchResults = await Promise.all(batch.map((value) => this.hashSingle(value)));
2897
+ for (let j = 0; j < batchResults.length; j++) {
2898
+ results[i + j] = batchResults[j];
2899
+ }
2900
+ }
2901
+ return results;
2902
+ }
2903
+ /**
2904
+ * Poseidon2 sponge construction for absorbing 24 field elements.
2905
+ * Rate = 3 (absorb 3 fields per round), Capacity = 1.
2906
+ * State width = 4 (matches Poseidon2 permutation width).
2907
+ *
2908
+ * CRITICAL: State elements are ADDED TO, not overwritten.
2909
+ * The spec (v0.1) had a bug that overwrote state — this is the correct version.
2910
+ *
2911
+ * ALGORITHM:
2912
+ * 1. Initialize state with domain tag in capacity element: [DOMAIN_SPONGE_24, 0, 0, 0]
2913
+ * 2. For each chunk of 3 inputs:
2914
+ * - ADD inputs to rate elements (state[1], state[2], state[3])
2915
+ * - Apply full Poseidon2 permutation
2916
+ * 3. Handle remaining inputs (if not multiple of 3) by padding with zeros
2917
+ * 4. Squeeze: return state[0] as output
2918
+ *
2919
+ * SECURITY NOTE: Adding to state (not overwriting) ensures proper chaining between
2920
+ * rounds. Overwriting would discard the cryptographic state and create collision
2921
+ * vulnerabilities (see TWO-TREE-AGENT-REVIEW-SUMMARY.md BLOCKER-3).
2922
+ *
2923
+ * @param inputs - Array of exactly 24 field elements (district IDs)
2924
+ * @param domainTag - Domain separation tag (default: DOMAIN_SPONGE_24)
2925
+ * @returns Poseidon2 sponge output as bigint (district commitment)
2926
+ */
2927
+ async poseidon2Sponge(inputs, domainTag = BigInt(DOMAIN_SPONGE_24)) {
2928
+ if (inputs.length !== 24) {
2929
+ throw new Error(`poseidon2Sponge expects 24 inputs, got ${inputs.length}`);
2930
+ }
2931
+ for (let i = 0; i < inputs.length; i++) {
2932
+ if (inputs[i] < 0n) {
2933
+ throw new Error(`Input ${i} is negative: ${inputs[i]}`);
2934
+ }
2935
+ if (inputs[i] >= Poseidon2Hasher.BN254_MODULUS) {
2936
+ throw new Error(`Input ${i} exceeds BN254 field modulus: ${inputs[i]}`);
2937
+ }
2938
+ }
2939
+ let state = [
2940
+ domainTag % Poseidon2Hasher.BN254_MODULUS,
2941
+ 0n,
2942
+ 0n,
2943
+ 0n
2944
+ ];
2945
+ for (let i = 0; i < 8; i++) {
2946
+ state[1] = (state[1] + inputs[i * 3]) % Poseidon2Hasher.BN254_MODULUS;
2947
+ state[2] = (state[2] + inputs[i * 3 + 1]) % Poseidon2Hasher.BN254_MODULUS;
2948
+ state[3] = (state[3] + inputs[i * 3 + 2]) % Poseidon2Hasher.BN254_MODULUS;
2949
+ const stateHex = state.map((x) => this.toHex(x));
2950
+ const result = await this.spongeHelperNoir.execute({ inputs: stateHex });
2951
+ const returnValue = result.returnValue ?? result.return_value;
2952
+ if (!returnValue || !Array.isArray(returnValue) || returnValue.length !== 4) {
2953
+ throw new Error("Sponge helper circuit returned invalid state array");
2954
+ }
2955
+ state = returnValue.map((x) => BigInt(x));
2956
+ }
2957
+ return state[0];
2958
+ }
2959
+ /**
2960
+ * Hash a string to BN254 field element
2961
+ *
2962
+ * Chunking strategy:
2963
+ * - UTF-8 encode string
2964
+ * - Split into 31-byte chunks (safe for 254-bit BN254 field)
2965
+ * - Commit to byte length first, then fold in chunks iteratively:
2966
+ * hash = hashSingle(length)
2967
+ * hash = hashPair(hash, chunk[0])
2968
+ * hash = hashPair(hash, chunk[1])
2969
+ * ...
2970
+ *
2971
+ * LENGTH PREFIX (BA-022 fix):
2972
+ * Without a length commitment, "" and "\x00" both reduce to hashSingle(0n).
2973
+ * More generally, any two strings whose chunk representations share a common
2974
+ * suffix (due to trailing zero bytes) could collide. Hashing the byte length
2975
+ * as the first element makes every distinct string length a separate domain,
2976
+ * eliminating this class of collision.
2977
+ *
2978
+ * @param str - String to hash
2979
+ * @returns Poseidon2 hash as bigint
2980
+ */
2981
+ async hashString(str) {
2982
+ const bytes = Buffer.from(str, "utf-8");
2983
+ const chunks = [];
2984
+ for (let i = 0; i < bytes.length; i += 31) {
2985
+ const chunk = bytes.subarray(i, Math.min(i + 31, bytes.length));
2986
+ chunks.push(BigInt("0x" + chunk.toString("hex")));
2987
+ }
2988
+ let hash = await this.hashSingle(BigInt(bytes.length));
2989
+ for (const chunk of chunks) {
2990
+ hash = await this.hashPair(hash, chunk);
2991
+ }
2992
+ return hash;
2993
+ }
2994
+ /**
2995
+ * Batch hash multiple strings with controlled concurrency
2996
+ *
2997
+ * @param strings - Array of strings to hash
2998
+ * @param batchSize - Max concurrent operations (default: 64)
2999
+ * @returns Array of hashes in same order as input strings
3000
+ */
3001
+ async hashStringsBatch(strings, batchSize = DEFAULT_BATCH_SIZE) {
3002
+ const results = new Array(strings.length);
3003
+ for (let i = 0; i < strings.length; i += batchSize) {
3004
+ const batch = strings.slice(i, Math.min(i + batchSize, strings.length));
3005
+ const batchResults = await Promise.all(batch.map((str) => this.hashString(str)));
3006
+ for (let j = 0; j < batchResults.length; j++) {
3007
+ results[i + j] = batchResults[j];
3008
+ }
3009
+ }
3010
+ return results;
3011
+ }
3012
+ /**
3013
+ * Convert value to 0x-prefixed 64-char hex string.
3014
+ * BA-016: Validates hex characters, rejects negative bigints,
3015
+ * and enforces BN254 field modulus bound.
3016
+ */
3017
+ toHex(value) {
3018
+ if (typeof value === "bigint") {
3019
+ if (value < 0n) {
3020
+ throw new Error(`Negative bigint not allowed: ${value}`);
3021
+ }
3022
+ if (value >= Poseidon2Hasher.BN254_MODULUS) {
3023
+ throw new Error(`Value exceeds BN254 field modulus: ${value}`);
3024
+ }
3025
+ return "0x" + value.toString(16).padStart(64, "0");
3026
+ }
3027
+ const hex = value.startsWith("0x") ? value.slice(2) : value;
3028
+ if (!/^[0-9a-fA-F]*$/.test(hex)) {
3029
+ throw new Error(`Invalid hex string: ${value}`);
3030
+ }
3031
+ const padded = hex.padStart(64, "0");
3032
+ const asBigInt = BigInt("0x" + padded);
3033
+ if (asBigInt >= Poseidon2Hasher.BN254_MODULUS) {
3034
+ throw new Error(`Value exceeds BN254 field modulus: 0x${padded}`);
3035
+ }
3036
+ return "0x" + padded;
3037
+ }
3038
+ }
3039
+
3040
+ /**
3041
+ * VOTER Protocol Crypto SDK
3042
+ *
3043
+ * Cryptography library providing:
3044
+ * - Poseidon2 hashing (Noir stdlib compatibility for ZK circuits)
3045
+ * - District resolution services
3046
+ * - Geocoding services
3047
+ *
3048
+ * @packageDocumentation
3049
+ */
3050
+ // Export BN254_MODULUS for field validation in dependent packages (BR3-003 fix)
3051
+ const BN254_MODULUS = Poseidon2Hasher.BN254_MODULUS;
3052
+
3053
+ const twoTreeCircuitLoaders = {
3054
+ 18: async () => {
3055
+ const module = await Promise.resolve().then(() => require('./two_tree_membership_18-Dfr1mYE-.cjs'));
3056
+ return module.default;
3057
+ },
3058
+ 20: async () => {
3059
+ const module = await Promise.resolve().then(() => require('./two_tree_membership_20-jDMJJKIC.cjs'));
3060
+ return module.default;
3061
+ },
3062
+ 22: async () => {
3063
+ const module = await Promise.resolve().then(() => require('./two_tree_membership_22-CjwYhC_e.cjs'));
3064
+ return module.default;
3065
+ },
3066
+ 24: async () => {
3067
+ const module = await Promise.resolve().then(() => require('./two_tree_membership_24-Br8I-xLQ.cjs'));
3068
+ return module.default;
3069
+ }
3070
+ };
2482
3071
  function detectThreads() {
2483
3072
  const hasSharedArrayBuffer = typeof SharedArrayBuffer !== "undefined";
2484
3073
  if (!hasSharedArrayBuffer) {
2485
- console.log("[NoirProver] SharedArrayBuffer unavailable - using single-threaded mode");
3074
+ console.log("[TwoTreeNoirProver] SharedArrayBuffer unavailable - using single-threaded mode");
2486
3075
  return 1;
2487
3076
  }
2488
3077
  const cores = typeof navigator !== "undefined" ? navigator.hardwareConcurrency || 4 : 4;
2489
3078
  return Math.min(cores, 8);
2490
3079
  }
2491
- class NoirProver {
3080
+ function parsePublicInput(hex, label) {
3081
+ if (typeof hex !== "string" || !/^0x[0-9a-fA-F]+$/.test(hex)) {
3082
+ throw new Error(
3083
+ `BR5-006: Invalid public input format for ${label}: expected 0x-prefixed hex string, got ${typeof hex === "string" ? `"${hex.slice(0, 20)}"` : typeof hex}`
3084
+ );
3085
+ }
3086
+ const val = BigInt(hex);
3087
+ if (val >= BN254_MODULUS) {
3088
+ throw new Error(
3089
+ `BR5-006: Public input ${label} (${val}) exceeds BN254 scalar field modulus. Possible field aliasing attack.`
3090
+ );
3091
+ }
3092
+ return val;
3093
+ }
3094
+ function toHex(value) {
3095
+ if (value < 0n) {
3096
+ throw new Error("Field element cannot be negative");
3097
+ }
3098
+ if (value >= BN254_MODULUS) {
3099
+ throw new Error(`Field element ${value} exceeds BN254 scalar field modulus`);
3100
+ }
3101
+ return "0x" + value.toString(16).padStart(64, "0");
3102
+ }
3103
+ class TwoTreeNoirProver {
2492
3104
  backend = null;
2493
3105
  noir = null;
2494
- config;
2495
3106
  threads;
3107
+ depth;
2496
3108
  constructor(config = {}) {
2497
- this.config = {
2498
- circuitName: "district_membership",
2499
- ...config
2500
- };
2501
3109
  this.threads = config.threads ?? detectThreads();
3110
+ this.depth = config.depth ?? DEFAULT_CIRCUIT_DEPTH;
2502
3111
  }
3112
+ // ========================================================================
3113
+ // Initialization
3114
+ // ========================================================================
2503
3115
  /**
2504
- * Initialize the prover (must be called before generating proofs)
3116
+ * Initialize the prover (must be called before generating proofs).
3117
+ * Lazily loads the circuit for the configured depth.
2505
3118
  */
2506
3119
  async init() {
2507
3120
  if (this.backend && this.noir) return;
2508
- console.log(`[NoirProver] Initializing with ${this.threads} thread(s)...`);
3121
+ console.log(`[TwoTreeNoirProver] Initializing depth=${this.depth} with ${this.threads} thread(s)...`);
2509
3122
  const start = Date.now();
2510
- const circuit = circuitJson;
3123
+ const loader = twoTreeCircuitLoaders[this.depth];
3124
+ if (!loader) {
3125
+ throw new Error(`Unsupported circuit depth: ${this.depth}. Must be 18, 20, 22, or 24.`);
3126
+ }
3127
+ const circuit = await loader();
2511
3128
  this.noir = new noir_js.Noir(circuit);
2512
3129
  this.backend = new bb_js.UltraHonkBackend(circuit.bytecode, { threads: this.threads });
2513
- console.log(`[NoirProver] Initialized in ${Date.now() - start}ms (${this.threads} threads)`);
3130
+ console.log(`[TwoTreeNoirProver] Initialized depth=${this.depth} in ${Date.now() - start}ms (${this.threads} threads)`);
2514
3131
  }
2515
3132
  /**
2516
- * Pre-warm the prover by initializing backend
2517
- * Call this on app load to hide latency from user
3133
+ * Get the circuit depth for this prover instance.
3134
+ */
3135
+ getDepth() {
3136
+ return this.depth;
3137
+ }
3138
+ /**
3139
+ * Pre-warm the prover by initializing backend.
3140
+ * Call this on app load to hide latency from user.
2518
3141
  */
2519
3142
  async warmup() {
2520
3143
  await this.init();
2521
- console.log("[NoirProver] Warmup complete (backend initialized)");
3144
+ console.log("[TwoTreeNoirProver] Warmup complete (backend initialized)");
2522
3145
  }
3146
+ // ========================================================================
3147
+ // Input Validation
3148
+ // ========================================================================
3149
+ /** Maximum allowed Merkle depth (prevents DoS via oversized arrays) */
3150
+ static MAX_MERKLE_DEPTH = 24;
2523
3151
  /**
2524
- * Generate a ZK proof for district membership
3152
+ * Validate all inputs before circuit execution.
3153
+ * Throws descriptive errors for any invalid input.
2525
3154
  */
2526
- async prove(inputs) {
3155
+ validateInputs(inputs) {
3156
+ if (inputs.userSecret === 0n) {
3157
+ throw new Error(
3158
+ "user_secret cannot be zero (SA-011). A zero secret produces predictable nullifiers."
3159
+ );
3160
+ }
3161
+ if (inputs.cellId === 0n) {
3162
+ throw new Error(
3163
+ "cell_id cannot be zero. A zero cell ID produces a degenerate cell map leaf."
3164
+ );
3165
+ }
3166
+ if (inputs.actionDomain === 0n) {
3167
+ throw new Error(
3168
+ "action_domain cannot be zero. A zero action domain produces a universal nullifier that would be consumed across ALL elections, permanently blocking the user."
3169
+ );
3170
+ }
3171
+ if (inputs.registrationSalt === 0n) {
3172
+ throw new Error(
3173
+ "registration_salt cannot be zero. A zero salt reduces leaf preimage entropy."
3174
+ );
3175
+ }
3176
+ if (inputs.identityCommitment === 0n) {
3177
+ throw new Error(
3178
+ "identity_commitment cannot be zero. NUL-001 requires a verified identity commitment from self.xyz/didit to prevent Sybil attacks via re-registration."
3179
+ );
3180
+ }
3181
+ validateAuthorityLevel(inputs.authorityLevel);
3182
+ if (!Array.isArray(inputs.districts) || inputs.districts.length !== DISTRICT_SLOT_COUNT) {
3183
+ throw new Error(
3184
+ `districts array must have exactly ${DISTRICT_SLOT_COUNT} elements, got ${inputs.districts?.length ?? "non-array"}`
3185
+ );
3186
+ }
3187
+ const nonZeroDistricts = /* @__PURE__ */ new Set();
3188
+ for (let i = 0; i < inputs.districts.length; i++) {
3189
+ const d = inputs.districts[i];
3190
+ if (d < 0n) {
3191
+ throw new Error(`districts[${i}] cannot be negative`);
3192
+ }
3193
+ if (d >= BN254_MODULUS) {
3194
+ throw new Error(`districts[${i}] exceeds BN254 scalar field modulus`);
3195
+ }
3196
+ if (d !== 0n) {
3197
+ if (nonZeroDistricts.has(d)) {
3198
+ throw new Error(
3199
+ `BR5-017: Duplicate district ID at slot ${i}: 0x${d.toString(16)}. Each non-zero district must appear in exactly one positional slot.`
3200
+ );
3201
+ }
3202
+ nonZeroDistricts.add(d);
3203
+ }
3204
+ }
3205
+ const fieldChecks = [
3206
+ [inputs.userRoot, "userRoot"],
3207
+ [inputs.cellMapRoot, "cellMapRoot"],
3208
+ [inputs.nullifier, "nullifier"],
3209
+ [inputs.actionDomain, "actionDomain"],
3210
+ [inputs.userSecret, "userSecret"],
3211
+ [inputs.cellId, "cellId"],
3212
+ [inputs.registrationSalt, "registrationSalt"],
3213
+ [inputs.identityCommitment, "identityCommitment"]
3214
+ ];
3215
+ for (const [val, name] of fieldChecks) {
3216
+ if (val < 0n) {
3217
+ throw new Error(`${name} cannot be negative`);
3218
+ }
3219
+ if (val >= BN254_MODULUS) {
3220
+ throw new Error(`${name} exceeds BN254 scalar field modulus`);
3221
+ }
3222
+ }
3223
+ if (!Array.isArray(inputs.userPath)) {
3224
+ throw new Error("userPath must be an array");
3225
+ }
3226
+ if (inputs.userPath.length > TwoTreeNoirProver.MAX_MERKLE_DEPTH) {
3227
+ throw new Error(
3228
+ `userPath exceeds maximum allowed depth: ${inputs.userPath.length} > ${TwoTreeNoirProver.MAX_MERKLE_DEPTH}`
3229
+ );
3230
+ }
3231
+ if (inputs.userPath.length !== this.depth) {
3232
+ throw new Error(
3233
+ `userPath length mismatch: expected ${this.depth}, got ${inputs.userPath.length}. Did you initialize the prover with the wrong depth?`
3234
+ );
3235
+ }
3236
+ if (!Array.isArray(inputs.cellMapPath)) {
3237
+ throw new Error("cellMapPath must be an array");
3238
+ }
3239
+ if (inputs.cellMapPath.length !== this.depth) {
3240
+ throw new Error(
3241
+ `cellMapPath length mismatch: expected ${this.depth}, got ${inputs.cellMapPath.length}`
3242
+ );
3243
+ }
3244
+ if (!Array.isArray(inputs.cellMapPathBits)) {
3245
+ throw new Error("cellMapPathBits must be an array");
3246
+ }
3247
+ if (inputs.cellMapPathBits.length !== this.depth) {
3248
+ throw new Error(
3249
+ `cellMapPathBits length mismatch: expected ${this.depth}, got ${inputs.cellMapPathBits.length}`
3250
+ );
3251
+ }
3252
+ for (let i = 0; i < inputs.cellMapPathBits.length; i++) {
3253
+ if (inputs.cellMapPathBits[i] !== 0 && inputs.cellMapPathBits[i] !== 1) {
3254
+ throw new Error(
3255
+ `cellMapPathBits[${i}] must be 0 or 1, got ${inputs.cellMapPathBits[i]}`
3256
+ );
3257
+ }
3258
+ }
3259
+ if (inputs.userIndex < 0 || inputs.userIndex >= 2 ** this.depth) {
3260
+ throw new Error(
3261
+ `userIndex out of range: must be 0 to ${2 ** this.depth - 1}, got ${inputs.userIndex}`
3262
+ );
3263
+ }
3264
+ for (let i = 0; i < inputs.userPath.length; i++) {
3265
+ if (inputs.userPath[i] < 0n || inputs.userPath[i] >= BN254_MODULUS) {
3266
+ throw new Error(`userPath[${i}] outside BN254 scalar field`);
3267
+ }
3268
+ }
3269
+ for (let i = 0; i < inputs.cellMapPath.length; i++) {
3270
+ if (inputs.cellMapPath[i] < 0n || inputs.cellMapPath[i] >= BN254_MODULUS) {
3271
+ throw new Error(`cellMapPath[${i}] outside BN254 scalar field`);
3272
+ }
3273
+ }
3274
+ }
3275
+ // ========================================================================
3276
+ // Input Formatting
3277
+ // ========================================================================
3278
+ /**
3279
+ * Format TypeScript inputs into the Noir circuit's expected parameter names
3280
+ * and types (snake_case, hex strings, integer arrays).
3281
+ *
3282
+ * This is exposed as a method for testing purposes.
3283
+ */
3284
+ formatInputs(inputs) {
3285
+ return {
3286
+ // Public inputs
3287
+ user_root: toHex(inputs.userRoot),
3288
+ cell_map_root: toHex(inputs.cellMapRoot),
3289
+ districts: inputs.districts.map(toHex),
3290
+ nullifier: toHex(inputs.nullifier),
3291
+ action_domain: toHex(inputs.actionDomain),
3292
+ authority_level: toHex(BigInt(inputs.authorityLevel)),
3293
+ // Private inputs (witnesses)
3294
+ user_secret: toHex(inputs.userSecret),
3295
+ cell_id: toHex(inputs.cellId),
3296
+ registration_salt: toHex(inputs.registrationSalt),
3297
+ identity_commitment: toHex(inputs.identityCommitment),
3298
+ // Tree 1: Standard Merkle proof
3299
+ user_path: inputs.userPath.map(toHex),
3300
+ user_index: inputs.userIndex,
3301
+ // Tree 2: SMT proof
3302
+ cell_map_path: inputs.cellMapPath.map(toHex),
3303
+ cell_map_path_bits: inputs.cellMapPathBits
3304
+ };
3305
+ }
3306
+ // ========================================================================
3307
+ // Proof Generation
3308
+ // ========================================================================
3309
+ /**
3310
+ * Generate a ZK proof for two-tree membership.
3311
+ *
3312
+ * The circuit internally verifies:
3313
+ * 1. User leaf in Tree 1: hash4(user_secret, cell_id, registration_salt, authority_level)
3314
+ * 2. District commitment: poseidon2_sponge_24(districts)
3315
+ * 3. Cell map leaf in Tree 2: hash2(cell_id, district_commitment)
3316
+ * 4. Nullifier: hash2(identity_commitment, action_domain) (NUL-001)
3317
+ * 5. Authority level in [1, 5]
3318
+ *
3319
+ * @param inputs - All public and private inputs for the circuit
3320
+ * @returns Proof bytes and public inputs as hex strings
3321
+ */
3322
+ async generateProof(inputs) {
3323
+ this.validateInputs(inputs);
2527
3324
  await this.init();
2528
- console.log("[NoirProver] Generating witness...");
3325
+ console.log("[TwoTreeNoirProver] Generating witness...");
2529
3326
  const witnessStart = Date.now();
2530
- const noirInputs = {
2531
- merkle_root: inputs.merkleRoot,
2532
- nullifier: inputs.nullifier,
2533
- authority_hash: inputs.authorityHash,
2534
- epoch_id: inputs.epochId,
2535
- campaign_id: inputs.campaignId,
2536
- leaf: inputs.leaf,
2537
- merkle_path: inputs.merklePath,
2538
- leaf_index: inputs.leafIndex,
2539
- user_secret: inputs.userSecret
2540
- };
3327
+ const noirInputs = this.formatInputs(inputs);
2541
3328
  const { witness } = await this.noir.execute(noirInputs);
2542
- console.log(`[NoirProver] Witness generated in ${Date.now() - witnessStart}ms`);
2543
- console.log("[NoirProver] Generating proof...");
3329
+ console.log(`[TwoTreeNoirProver] Witness generated in ${Date.now() - witnessStart}ms`);
3330
+ console.log("[TwoTreeNoirProver] Generating proof...");
2544
3331
  const proofStart = Date.now();
2545
3332
  const { proof, publicInputs } = await this.backend.generateProof(witness);
2546
- console.log(`[NoirProver] Proof generated in ${Date.now() - proofStart}ms`);
3333
+ console.log(`[TwoTreeNoirProver] Proof generated in ${Date.now() - proofStart}ms`);
3334
+ if (publicInputs.length !== TWO_TREE_PUBLIC_INPUT_COUNT) {
3335
+ throw new Error(
3336
+ `Unexpected public input count: expected ${TWO_TREE_PUBLIC_INPUT_COUNT}, got ${publicInputs.length}`
3337
+ );
3338
+ }
2547
3339
  return {
2548
3340
  proof,
2549
- publicInputs: {
2550
- merkleRoot: publicInputs[0] ?? inputs.merkleRoot,
2551
- nullifier: publicInputs[1] ?? inputs.nullifier,
2552
- authorityHash: publicInputs[2] ?? inputs.authorityHash,
2553
- epochId: publicInputs[3] ?? inputs.epochId,
2554
- campaignId: publicInputs[4] ?? inputs.campaignId
2555
- }
3341
+ publicInputs
2556
3342
  };
2557
3343
  }
3344
+ // ========================================================================
3345
+ // Proof Verification
3346
+ // ========================================================================
2558
3347
  /**
2559
- * Verify a proof
3348
+ * Verify a two-tree membership proof.
3349
+ *
3350
+ * BR5-006: Validates public input count before backend verification.
3351
+ * For full public input binding (matching proof outputs to expected values),
3352
+ * use verifyProofWithExpectedInputs().
3353
+ *
3354
+ * @param proofResult - The proof result from generateProof()
3355
+ * @returns true if the proof is valid
2560
3356
  */
2561
- async verify(proof, publicInputs) {
3357
+ async verifyProof(proofResult) {
3358
+ if (proofResult.publicInputs.length !== TWO_TREE_PUBLIC_INPUT_COUNT) {
3359
+ throw new Error(
3360
+ `BR5-006: Public input count mismatch: expected ${TWO_TREE_PUBLIC_INPUT_COUNT}, got ${proofResult.publicInputs.length}. Possible proof tampering.`
3361
+ );
3362
+ }
2562
3363
  await this.init();
2563
- return this.backend.verifyProof({ proof, publicInputs });
3364
+ return this.backend.verifyProof({
3365
+ proof: proofResult.proof,
3366
+ publicInputs: proofResult.publicInputs
3367
+ });
2564
3368
  }
2565
3369
  /**
2566
- * Clean up resources
3370
+ * Verify a proof AND validate that public inputs match expected values.
3371
+ *
3372
+ * BR5-006 FIX: The base verifyProof() only checks that the proof is
3373
+ * cryptographically valid for its public inputs. It does NOT verify that
3374
+ * those inputs match what the caller intended. An attacker could substitute
3375
+ * a valid proof generated for different inputs (e.g., different districts
3376
+ * or a different authority level).
3377
+ *
3378
+ * This method binds the proof to the caller's expected values by checking:
3379
+ * - user_root matches
3380
+ * - cell_map_root matches
3381
+ * - All 24 districts match
3382
+ * - nullifier matches
3383
+ * - action_domain matches
3384
+ * - authority_level matches
3385
+ *
3386
+ * @param proofResult - The proof from generateProof()
3387
+ * @param expectedInputs - The original inputs used for proof generation
3388
+ * @returns true if proof is valid AND public inputs match expectations
3389
+ */
3390
+ async verifyProofWithExpectedInputs(proofResult, expectedInputs) {
3391
+ const valid = await this.verifyProof(proofResult);
3392
+ if (!valid) return false;
3393
+ const pi = proofResult.publicInputs;
3394
+ const checks = [
3395
+ [0, expectedInputs.userRoot, "user_root"],
3396
+ [1, expectedInputs.cellMapRoot, "cell_map_root"],
3397
+ [26, expectedInputs.nullifier, "nullifier"],
3398
+ [27, expectedInputs.actionDomain, "action_domain"],
3399
+ [28, BigInt(expectedInputs.authorityLevel), "authority_level"]
3400
+ ];
3401
+ for (const [idx, expected, name] of checks) {
3402
+ const actual = parsePublicInput(pi[idx], `${name}[${idx}]`);
3403
+ if (actual !== expected) {
3404
+ throw new Error(
3405
+ `BR5-006: Public input mismatch at index ${idx} (${name}): expected ${expected}, got ${actual}`
3406
+ );
3407
+ }
3408
+ }
3409
+ for (let i = 0; i < DISTRICT_SLOT_COUNT; i++) {
3410
+ const actual = parsePublicInput(pi[2 + i], `district[${i}]`);
3411
+ const expected = expectedInputs.districts[i];
3412
+ if (actual !== expected) {
3413
+ throw new Error(
3414
+ `BR5-006: District mismatch at slot ${i} (public input index ${2 + i}): expected ${expected}, got ${actual}`
3415
+ );
3416
+ }
3417
+ }
3418
+ return true;
3419
+ }
3420
+ // ========================================================================
3421
+ // Lifecycle
3422
+ // ========================================================================
3423
+ /**
3424
+ * Clean up resources (WASM memory, web workers).
2567
3425
  */
2568
3426
  async destroy() {
2569
3427
  if (this.backend) {
@@ -2573,6 +3431,54 @@ class NoirProver {
2573
3431
  }
2574
3432
  }
2575
3433
  }
3434
+ const twoTreeProverInstances = /* @__PURE__ */ new Map();
3435
+ const twoTreeInitPromises = /* @__PURE__ */ new Map();
3436
+ async function getTwoTreeProverForDepth(depth = DEFAULT_CIRCUIT_DEPTH, config) {
3437
+ const existing = twoTreeProverInstances.get(depth);
3438
+ if (existing) {
3439
+ return existing;
3440
+ }
3441
+ const existingPromise = twoTreeInitPromises.get(depth);
3442
+ if (existingPromise) {
3443
+ return existingPromise;
3444
+ }
3445
+ let resolveInit;
3446
+ let rejectInit;
3447
+ const initPromise = new Promise((resolve, reject) => {
3448
+ resolveInit = resolve;
3449
+ rejectInit = reject;
3450
+ });
3451
+ twoTreeInitPromises.set(depth, initPromise);
3452
+ (async () => {
3453
+ try {
3454
+ const prover = new TwoTreeNoirProver({ ...config, depth });
3455
+ await prover.init();
3456
+ twoTreeProverInstances.set(depth, prover);
3457
+ twoTreeInitPromises.delete(depth);
3458
+ resolveInit(prover);
3459
+ } catch (err) {
3460
+ twoTreeInitPromises.delete(depth);
3461
+ rejectInit(err instanceof Error ? err : new Error(String(err)));
3462
+ }
3463
+ })();
3464
+ return initPromise;
3465
+ }
3466
+ async function resetTwoTreeProverSingleton() {
3467
+ const destroyPromises = Array.from(twoTreeProverInstances.values()).map(
3468
+ (prover) => prover.destroy()
3469
+ );
3470
+ await Promise.all(destroyPromises);
3471
+ twoTreeProverInstances.clear();
3472
+ twoTreeInitPromises.clear();
3473
+ }
3474
+ async function resetTwoTreeProverForDepth(depth) {
3475
+ const prover = twoTreeProverInstances.get(depth);
3476
+ if (prover) {
3477
+ await prover.destroy();
3478
+ twoTreeProverInstances.delete(depth);
3479
+ }
3480
+ twoTreeInitPromises.delete(depth);
3481
+ }
2576
3482
 
2577
3483
  function checkCrossOriginIsolation() {
2578
3484
  const isolated = typeof crossOriginIsolated !== "undefined" && crossOriginIsolated;
@@ -2628,6 +3534,18 @@ if (typeof globalThis !== "undefined" && !globalThis.Buffer) {
2628
3534
  globalThis.Buffer = buffer.Buffer;
2629
3535
  }
2630
3536
 
3537
+ exports.DEFAULT_CIRCUIT_DEPTH = DEFAULT_CIRCUIT_DEPTH;
3538
+ exports.DISTRICT_SLOT_COUNT = DISTRICT_SLOT_COUNT;
2631
3539
  exports.NoirProver = NoirProver;
3540
+ exports.TWO_TREE_PUBLIC_INPUT_COUNT = TWO_TREE_PUBLIC_INPUT_COUNT;
3541
+ exports.TwoTreeNoirProver = TwoTreeNoirProver;
2632
3542
  exports.checkCrossOriginIsolation = checkCrossOriginIsolation;
3543
+ exports.getProver = getProver;
3544
+ exports.getProverForDepth = getProverForDepth;
3545
+ exports.getTwoTreeProverForDepth = getTwoTreeProverForDepth;
2633
3546
  exports.requireCrossOriginIsolation = requireCrossOriginIsolation;
3547
+ exports.resetProverForDepth = resetProverForDepth;
3548
+ exports.resetProverSingleton = resetProverSingleton;
3549
+ exports.resetTwoTreeProverForDepth = resetTwoTreeProverForDepth;
3550
+ exports.resetTwoTreeProverSingleton = resetTwoTreeProverSingleton;
3551
+ exports.validateAuthorityLevel = validateAuthorityLevel;