@voter-protocol/noir-prover 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/circuits/district_membership_14.json +1 -0
  2. package/circuits/district_membership_18.json +1 -0
  3. package/circuits/district_membership_20.json +1 -0
  4. package/circuits/district_membership_22.json +1 -0
  5. package/circuits/district_membership_24.json +1 -0
  6. package/circuits/two_tree_membership_18.json +1 -0
  7. package/circuits/two_tree_membership_20.json +1 -0
  8. package/circuits/two_tree_membership_22.json +1 -0
  9. package/circuits/two_tree_membership_24.json +1 -0
  10. package/dist/district_membership_18-Breq2tlt.js +128 -0
  11. package/dist/district_membership_18-DSj7IJGE.cjs +139 -0
  12. package/dist/district_membership_20-Bu0PWpWT.js +128 -0
  13. package/dist/district_membership_20-DKRyEvX4.cjs +139 -0
  14. package/dist/district_membership_22-DlrHPQtq.js +128 -0
  15. package/dist/district_membership_22-pal861Dz.cjs +139 -0
  16. package/dist/district_membership_24-BAJ-CEhq.cjs +139 -0
  17. package/dist/district_membership_24-CM66_Yd9.js +128 -0
  18. package/dist/index.cjs +1041 -123
  19. package/dist/index.js +1030 -124
  20. package/dist/noir-prover/src/cross-origin-isolation.d.ts +1 -0
  21. package/dist/noir-prover/src/cross-origin-isolation.d.ts.map +1 -0
  22. package/dist/noir-prover/src/fixtures.d.ts +82 -0
  23. package/dist/noir-prover/src/fixtures.d.ts.map +1 -0
  24. package/dist/noir-prover/src/hash.worker.d.ts +2 -0
  25. package/dist/noir-prover/src/hash.worker.d.ts.map +1 -0
  26. package/dist/noir-prover/src/index.d.ts +10 -2
  27. package/dist/noir-prover/src/index.d.ts.map +1 -0
  28. package/dist/noir-prover/src/profiler.d.ts +74 -0
  29. package/dist/noir-prover/src/profiler.d.ts.map +1 -0
  30. package/dist/noir-prover/src/prover-e2e.test.d.ts +1 -0
  31. package/dist/noir-prover/src/prover-e2e.test.d.ts.map +1 -0
  32. package/dist/noir-prover/src/prover-orchestrator.d.ts +104 -0
  33. package/dist/noir-prover/src/prover-orchestrator.d.ts.map +1 -0
  34. package/dist/noir-prover/src/prover.d.ts +42 -1
  35. package/dist/noir-prover/src/prover.d.ts.map +1 -0
  36. package/dist/noir-prover/src/prover.test.d.ts +1 -0
  37. package/dist/noir-prover/src/prover.test.d.ts.map +1 -0
  38. package/dist/noir-prover/src/two-tree-prover.d.ts +106 -0
  39. package/dist/noir-prover/src/two-tree-prover.d.ts.map +1 -0
  40. package/dist/noir-prover/src/two-tree-prover.test.d.ts +18 -0
  41. package/dist/noir-prover/src/two-tree-prover.test.d.ts.map +1 -0
  42. package/dist/noir-prover/src/types.d.ts +283 -20
  43. package/dist/noir-prover/src/types.d.ts.map +1 -0
  44. package/dist/noir-prover/src/worker-protocol.d.ts +75 -0
  45. package/dist/noir-prover/src/worker-protocol.d.ts.map +1 -0
  46. package/dist/two_tree_membership_18-Dfr1mYE-.cjs +195 -0
  47. package/dist/two_tree_membership_18-DufFLCM8.js +184 -0
  48. package/dist/two_tree_membership_20-DhrOeOFx.js +184 -0
  49. package/dist/two_tree_membership_20-jDMJJKIC.cjs +195 -0
  50. package/dist/two_tree_membership_22-CjwYhC_e.cjs +195 -0
  51. package/dist/two_tree_membership_22-iMLJVJEK.js +184 -0
  52. package/dist/two_tree_membership_24-Br8I-xLQ.cjs +195 -0
  53. package/dist/two_tree_membership_24-Df3SNDL0.js +184 -0
  54. package/package.json +12 -8
package/dist/index.js CHANGED
@@ -2348,116 +2348,316 @@ ieee754.write = function (buffer, value, offset, isLE, mLen, nBytes) {
2348
2348
  }
2349
2349
  } (buffer));
2350
2350
 
2351
- const noir_version = "1.0.0-beta.16+2d46fca7203545cbbfb31a0d0328de6c10a8db95";
2352
- const hash = "7475465969754700296";
2353
- const abi = {
2351
+ const DEFAULT_CIRCUIT_DEPTH = 20;
2352
+ const PUBLIC_INPUT_COUNT = 5;
2353
+ function validateAuthorityLevel(level) {
2354
+ if (level < 1 || level > 5 || !Number.isInteger(level)) {
2355
+ throw new Error(`Invalid authority level: ${level}. Must be integer 1-5.`);
2356
+ }
2357
+ return level;
2358
+ }
2359
+ const DISTRICT_SLOT_COUNT = 24;
2360
+ const TWO_TREE_PUBLIC_INPUT_COUNT = 29;
2361
+
2362
+ const circuitLoaders = {
2363
+ 18: async () => {
2364
+ const module = await import('./district_membership_18-Breq2tlt.js');
2365
+ return module.default;
2366
+ },
2367
+ 20: async () => {
2368
+ const module = await import('./district_membership_20-Bu0PWpWT.js');
2369
+ return module.default;
2370
+ },
2371
+ 22: async () => {
2372
+ const module = await import('./district_membership_22-DlrHPQtq.js');
2373
+ return module.default;
2374
+ },
2375
+ 24: async () => {
2376
+ const module = await import('./district_membership_24-CM66_Yd9.js');
2377
+ return module.default;
2378
+ }
2379
+ };
2380
+ function detectThreads$1() {
2381
+ const hasSharedArrayBuffer = typeof SharedArrayBuffer !== "undefined";
2382
+ if (!hasSharedArrayBuffer) {
2383
+ console.log("[NoirProver] SharedArrayBuffer unavailable - using single-threaded mode");
2384
+ return 1;
2385
+ }
2386
+ const cores = typeof navigator !== "undefined" ? navigator.hardwareConcurrency || 4 : 4;
2387
+ return Math.min(cores, 8);
2388
+ }
2389
+ class NoirProver {
2390
+ backend = null;
2391
+ noir = null;
2392
+ config;
2393
+ threads;
2394
+ depth;
2395
+ constructor(config = {}) {
2396
+ this.config = {
2397
+ circuitName: "district_membership",
2398
+ ...config
2399
+ };
2400
+ this.threads = config.threads ?? detectThreads$1();
2401
+ this.depth = config.depth ?? DEFAULT_CIRCUIT_DEPTH;
2402
+ }
2403
+ /**
2404
+ * Initialize the prover (must be called before generating proofs)
2405
+ * Lazily loads the circuit for the configured depth
2406
+ */
2407
+ async init() {
2408
+ if (this.backend && this.noir) return;
2409
+ console.log(`[NoirProver] Initializing depth=${this.depth} with ${this.threads} thread(s)...`);
2410
+ const start = Date.now();
2411
+ const loader = circuitLoaders[this.depth];
2412
+ if (!loader) {
2413
+ throw new Error(`Unsupported circuit depth: ${this.depth}. Must be 18, 20, 22, or 24.`);
2414
+ }
2415
+ const circuit = await loader();
2416
+ this.noir = new Noir(circuit);
2417
+ this.backend = new UltraHonkBackend(circuit.bytecode, { threads: this.threads });
2418
+ console.log(`[NoirProver] Initialized depth=${this.depth} in ${Date.now() - start}ms (${this.threads} threads)`);
2419
+ }
2420
+ /**
2421
+ * Get the circuit depth for this prover instance
2422
+ */
2423
+ getDepth() {
2424
+ return this.depth;
2425
+ }
2426
+ /**
2427
+ * Pre-warm the prover by initializing backend
2428
+ * Call this on app load to hide latency from user
2429
+ */
2430
+ async warmup() {
2431
+ await this.init();
2432
+ console.log("[NoirProver] Warmup complete (backend initialized)");
2433
+ }
2434
+ /** Maximum allowed Merkle depth (prevents DoS via oversized arrays) */
2435
+ static MAX_MERKLE_DEPTH = 24;
2436
+ /**
2437
+ * Generate a ZK proof for district membership
2438
+ *
2439
+ * The new secure circuit computes leaf and nullifier internally:
2440
+ * - leaf = hash(userSecret, districtId, authorityLevel, registrationSalt)
2441
+ * - nullifier = hash(userSecret, actionDomain)
2442
+ *
2443
+ * This prevents attackers from submitting arbitrary leaves or nullifiers.
2444
+ */
2445
+ async prove(inputs) {
2446
+ await this.init();
2447
+ if (!Array.isArray(inputs.merklePath)) {
2448
+ throw new Error("merklePath must be an array");
2449
+ }
2450
+ if (inputs.merklePath.length > NoirProver.MAX_MERKLE_DEPTH) {
2451
+ throw new Error(
2452
+ `merklePath exceeds maximum allowed depth: ${inputs.merklePath.length} > ${NoirProver.MAX_MERKLE_DEPTH}`
2453
+ );
2454
+ }
2455
+ if (inputs.merklePath.length !== this.depth) {
2456
+ throw new Error(
2457
+ `merklePath length mismatch: expected ${this.depth}, got ${inputs.merklePath.length}. Did you initialize the prover with the wrong depth?`
2458
+ );
2459
+ }
2460
+ validateAuthorityLevel(inputs.authorityLevel);
2461
+ console.log("[NoirProver] Generating witness...");
2462
+ const witnessStart = Date.now();
2463
+ const noirInputs = {
2464
+ // Public inputs (contract-controlled)
2465
+ merkle_root: inputs.merkleRoot,
2466
+ action_domain: inputs.actionDomain,
2467
+ // Private inputs (user secrets - never revealed)
2468
+ user_secret: inputs.userSecret,
2469
+ district_id: inputs.districtId,
2470
+ authority_level: inputs.authorityLevel.toString(),
2471
+ registration_salt: inputs.registrationSalt,
2472
+ // Merkle proof data
2473
+ merkle_path: inputs.merklePath,
2474
+ leaf_index: inputs.leafIndex
2475
+ };
2476
+ const { witness } = await this.noir.execute(noirInputs);
2477
+ console.log(`[NoirProver] Witness generated in ${Date.now() - witnessStart}ms`);
2478
+ console.log("[NoirProver] Generating proof...");
2479
+ const proofStart = Date.now();
2480
+ const { proof, publicInputs } = await this.backend.generateProof(witness);
2481
+ console.log(`[NoirProver] Proof generated in ${Date.now() - proofStart}ms`);
2482
+ if (publicInputs.length !== PUBLIC_INPUT_COUNT) {
2483
+ throw new Error(
2484
+ `Expected ${PUBLIC_INPUT_COUNT} public inputs from circuit, got ${publicInputs.length}`
2485
+ );
2486
+ }
2487
+ const rawAuthorityLevel = parseInt(publicInputs[2], 16) || parseInt(publicInputs[2], 10);
2488
+ return {
2489
+ proof,
2490
+ publicInputs: {
2491
+ merkleRoot: publicInputs[0],
2492
+ nullifier: publicInputs[1],
2493
+ authorityLevel: validateAuthorityLevel(rawAuthorityLevel),
2494
+ actionDomain: publicInputs[3],
2495
+ districtId: publicInputs[4]
2496
+ }
2497
+ };
2498
+ }
2499
+ /**
2500
+ * Verify a proof
2501
+ */
2502
+ async verify(proof, publicInputs) {
2503
+ await this.init();
2504
+ return this.backend.verifyProof({ proof, publicInputs });
2505
+ }
2506
+ /**
2507
+ * Clean up resources
2508
+ */
2509
+ async destroy() {
2510
+ if (this.backend) {
2511
+ await this.backend.destroy();
2512
+ this.backend = null;
2513
+ this.noir = null;
2514
+ }
2515
+ }
2516
+ }
2517
+ const proverInstances = /* @__PURE__ */ new Map();
2518
+ const initializationPromises = /* @__PURE__ */ new Map();
2519
+ async function getProverForDepth(depth = DEFAULT_CIRCUIT_DEPTH, config) {
2520
+ const existingInstance = proverInstances.get(depth);
2521
+ if (existingInstance) {
2522
+ return existingInstance;
2523
+ }
2524
+ const existingPromise = initializationPromises.get(depth);
2525
+ if (existingPromise) {
2526
+ return existingPromise;
2527
+ }
2528
+ let resolveInit;
2529
+ let rejectInit;
2530
+ const initPromise = new Promise((resolve, reject) => {
2531
+ resolveInit = resolve;
2532
+ rejectInit = reject;
2533
+ });
2534
+ initializationPromises.set(depth, initPromise);
2535
+ (async () => {
2536
+ try {
2537
+ const prover = new NoirProver({ ...config, depth });
2538
+ await prover.init();
2539
+ proverInstances.set(depth, prover);
2540
+ initializationPromises.delete(depth);
2541
+ resolveInit(prover);
2542
+ } catch (err) {
2543
+ initializationPromises.delete(depth);
2544
+ rejectInit(err instanceof Error ? err : new Error(String(err)));
2545
+ }
2546
+ })();
2547
+ return initPromise;
2548
+ }
2549
+ async function getProver(config) {
2550
+ const depth = config?.depth ?? DEFAULT_CIRCUIT_DEPTH;
2551
+ return getProverForDepth(depth, config);
2552
+ }
2553
+ async function resetProverSingleton() {
2554
+ const destroyPromises = Array.from(proverInstances.values()).map(
2555
+ (prover) => prover.destroy()
2556
+ );
2557
+ await Promise.all(destroyPromises);
2558
+ proverInstances.clear();
2559
+ initializationPromises.clear();
2560
+ }
2561
+ async function resetProverForDepth(depth) {
2562
+ const prover = proverInstances.get(depth);
2563
+ if (prover) {
2564
+ await prover.destroy();
2565
+ proverInstances.delete(depth);
2566
+ }
2567
+ initializationPromises.delete(depth);
2568
+ }
2569
+
2570
+ const noir_version$1 = "1.0.0-beta.16+2d46fca7203545cbbfb31a0d0328de6c10a8db95";
2571
+ const hash$1 = "571519090691839533";
2572
+ const abi$1 = {
2354
2573
  parameters: [
2355
2574
  {
2356
- name: "merkle_root",
2357
- type: {
2358
- kind: "field"
2359
- },
2360
- visibility: "private"
2361
- },
2362
- {
2363
- name: "nullifier",
2364
- type: {
2365
- kind: "field"
2366
- },
2367
- visibility: "private"
2368
- },
2369
- {
2370
- name: "authority_hash",
2371
- type: {
2372
- kind: "field"
2373
- },
2374
- visibility: "private"
2375
- },
2376
- {
2377
- name: "epoch_id",
2378
- type: {
2379
- kind: "field"
2380
- },
2381
- visibility: "private"
2382
- },
2383
- {
2384
- name: "campaign_id",
2385
- type: {
2386
- kind: "field"
2387
- },
2388
- visibility: "private"
2389
- },
2390
- {
2391
- name: "leaf",
2392
- type: {
2393
- kind: "field"
2394
- },
2395
- visibility: "private"
2396
- },
2397
- {
2398
- name: "merkle_path",
2575
+ name: "inputs",
2399
2576
  type: {
2400
2577
  kind: "array",
2401
- length: 14,
2578
+ length: 4,
2402
2579
  type: {
2403
2580
  kind: "field"
2404
2581
  }
2405
2582
  },
2406
2583
  visibility: "private"
2584
+ }
2585
+ ],
2586
+ return_type: {
2587
+ abi_type: {
2588
+ kind: "field"
2407
2589
  },
2590
+ visibility: "public"
2591
+ },
2592
+ error_types: {
2593
+ }
2594
+ };
2595
+ const bytecode$1 = "H4sIAAAAAAAA/42PPQ5AQBCFd/0XDqGkI44gEpUoNQqJRkFEp3QEcQGnEI6jU2r0JrErY6t9yZeX3XmZH0peacybsm4tcIW9KWCjOkXOUZirKKcDBmACFvmL93aIlKiGsn6V9keweFsWr+OYF254JsPeTdFxzxebK9n325WgO1ThT8xxPV5UJ+c4AQAA";
2596
+ const debug_symbols$1 = "jZDRCoQgEEX/ZZ590C1h61eWJcymEETFNFiif1+LbOsh2Kdx5nruDHeGDts4NMr0doT6NUPrldZqaLSVIihr0nReCOS2CR4xjeCkJ8oJjyZAbaLWBCah4/ZpdMJsNQifVEoATZdqMuyVxvW1kB9N71FG2Q6zojhwzi48u+fLJ+O7Qfnkj8OBVX9fUFT5gpJfLninTkjlL5kBhXpdMwmvRKtxz7GPRp5iDR+XlRy881ZiFz2udpuWFnwB";
2597
+ const file_map$1 = {
2598
+ "19": {
2599
+ source: "// Exposed only for usage in `std::meta`\npub(crate) mod poseidon2;\n\nuse crate::default::Default;\nuse crate::embedded_curve_ops::{\n EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul, multi_scalar_mul_array_return,\n};\nuse crate::meta::derive_via;\n\n#[foreign(sha256_compression)]\n// docs:start:sha256_compression\npub fn sha256_compression(input: [u32; 16], state: [u32; 8]) -> [u32; 8] {}\n// docs:end:sha256_compression\n\n#[foreign(keccakf1600)]\n// docs:start:keccakf1600\npub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {}\n// docs:end:keccakf1600\n\npub mod keccak {\n #[deprecated(\"This function has been moved to std::hash::keccakf1600\")]\n pub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {\n super::keccakf1600(input)\n }\n}\n\n#[foreign(blake2s)]\n// docs:start:blake2s\npub fn blake2s<let N: u32>(input: [u8; N]) -> [u8; 32]\n// docs:end:blake2s\n{}\n\n// docs:start:blake3\npub fn blake3<let N: u32>(input: [u8; N]) -> [u8; 32]\n// docs:end:blake3\n{\n if crate::runtime::is_unconstrained() {\n // Temporary measure while Barretenberg is main proving system.\n // Please open an issue if you're working on another proving system and running into problems due to this.\n crate::static_assert(\n N <= 1024,\n \"Barretenberg cannot prove blake3 hashes with inputs larger than 1024 bytes\",\n );\n }\n __blake3(input)\n}\n\n#[foreign(blake3)]\nfn __blake3<let N: u32>(input: [u8; N]) -> [u8; 32] {}\n\n// docs:start:pedersen_commitment\npub fn pedersen_commitment<let N: u32>(input: [Field; N]) -> EmbeddedCurvePoint {\n // docs:end:pedersen_commitment\n pedersen_commitment_with_separator(input, 0)\n}\n\n#[inline_always]\npub fn pedersen_commitment_with_separator<let N: u32>(\n input: [Field; N],\n separator: u32,\n) -> EmbeddedCurvePoint {\n let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N];\n for i in 0..N {\n // we use the unsafe version because the multi_scalar_mul will constrain the scalars.\n points[i] = from_field_unsafe(input[i]);\n }\n let generators = derive_generators(\"DEFAULT_DOMAIN_SEPARATOR\".as_bytes(), separator);\n multi_scalar_mul(generators, points)\n}\n\n// docs:start:pedersen_hash\npub fn pedersen_hash<let N: u32>(input: [Field; N]) -> Field\n// docs:end:pedersen_hash\n{\n pedersen_hash_with_separator(input, 0)\n}\n\n#[no_predicates]\npub fn pedersen_hash_with_separator<let N: u32>(input: [Field; N], separator: u32) -> Field {\n let mut scalars: [EmbeddedCurveScalar; N + 1] = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N + 1];\n let mut generators: [EmbeddedCurvePoint; N + 1] =\n [EmbeddedCurvePoint::point_at_infinity(); N + 1];\n let domain_generators: [EmbeddedCurvePoint; N] =\n derive_generators(\"DEFAULT_DOMAIN_SEPARATOR\".as_bytes(), separator);\n\n for i in 0..N {\n scalars[i] = from_field_unsafe(input[i]);\n generators[i] = domain_generators[i];\n }\n scalars[N] = EmbeddedCurveScalar { lo: N as Field, hi: 0 as Field };\n\n let length_generator: [EmbeddedCurvePoint; 1] =\n derive_generators(\"pedersen_hash_length\".as_bytes(), 0);\n generators[N] = length_generator[0];\n multi_scalar_mul_array_return(generators, scalars, true)[0].x\n}\n\n#[field(bn254)]\n#[inline_always]\npub fn derive_generators<let N: u32, let M: u32>(\n domain_separator_bytes: [u8; M],\n starting_index: u32,\n) -> [EmbeddedCurvePoint; N] {\n crate::assert_constant(domain_separator_bytes);\n // TODO(https://github.com/noir-lang/noir/issues/5672): Add back assert_constant on starting_index\n __derive_generators(domain_separator_bytes, starting_index)\n}\n\n#[builtin(derive_pedersen_generators)]\n#[field(bn254)]\nfn __derive_generators<let N: u32, let M: u32>(\n domain_separator_bytes: [u8; M],\n starting_index: u32,\n) -> [EmbeddedCurvePoint; N] {}\n\n#[field(bn254)]\n// Decompose the input 'bn254 scalar' into two 128 bits limbs.\n// It is called 'unsafe' because it does not assert the limbs are 128 bits\n// Assuming the limbs are 128 bits:\n// Assert the decomposition does not overflow the field size.\nfn from_field_unsafe(scalar: Field) -> EmbeddedCurveScalar {\n // Safety: xlo and xhi decomposition is checked below\n let (xlo, xhi) = unsafe { crate::field::bn254::decompose_hint(scalar) };\n // Check that the decomposition is correct\n assert_eq(scalar, xlo + crate::field::bn254::TWO_POW_128 * xhi);\n // Check that the decomposition does not overflow the field size\n let (a, b) = if xhi == crate::field::bn254::PHI {\n (xlo, crate::field::bn254::PLO)\n } else {\n (xhi, crate::field::bn254::PHI)\n };\n crate::field::bn254::assert_lt(a, b);\n\n EmbeddedCurveScalar { lo: xlo, hi: xhi }\n}\n\npub fn poseidon2_permutation<let N: u32>(input: [Field; N], state_len: u32) -> [Field; N] {\n assert_eq(input.len(), state_len);\n poseidon2_permutation_internal(input)\n}\n\n#[foreign(poseidon2_permutation)]\nfn poseidon2_permutation_internal<let N: u32>(input: [Field; N]) -> [Field; N] {}\n\n// Generic hashing support.\n// Partially ported and impacted by rust.\n\n// Hash trait shall be implemented per type.\n#[derive_via(derive_hash)]\npub trait Hash {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher;\n}\n\n// docs:start:derive_hash\ncomptime fn derive_hash(s: TypeDefinition) -> Quoted {\n let name = quote { $crate::hash::Hash };\n let signature = quote { fn hash<H>(_self: Self, _state: &mut H) where H: $crate::hash::Hasher };\n let for_each_field = |name| quote { _self.$name.hash(_state); };\n crate::meta::make_trait_impl(\n s,\n name,\n signature,\n for_each_field,\n quote {},\n |fields| fields,\n )\n}\n// docs:end:derive_hash\n\n// Hasher trait shall be implemented by algorithms to provide hash-agnostic means.\n// TODO: consider making the types generic here ([u8], [Field], etc.)\npub trait Hasher {\n fn finish(self) -> Field;\n\n fn write(&mut self, input: Field);\n}\n\n// BuildHasher is a factory trait, responsible for production of specific Hasher.\npub trait BuildHasher {\n type H: Hasher;\n\n fn build_hasher(self) -> H;\n}\n\npub struct BuildHasherDefault<H>;\n\nimpl<H> BuildHasher for BuildHasherDefault<H>\nwhere\n H: Hasher + Default,\n{\n type H = H;\n\n fn build_hasher(_self: Self) -> H {\n H::default()\n }\n}\n\nimpl<H> Default for BuildHasherDefault<H>\nwhere\n H: Hasher + Default,\n{\n fn default() -> Self {\n BuildHasherDefault {}\n }\n}\n\nimpl Hash for Field {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self);\n }\n}\n\nimpl Hash for u1 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u8 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u16 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u32 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u64 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u128 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for i8 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u8 as Field);\n }\n}\n\nimpl Hash for i16 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u16 as Field);\n }\n}\n\nimpl Hash for i32 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u32 as Field);\n }\n}\n\nimpl Hash for i64 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u64 as Field);\n }\n}\n\nimpl Hash for bool {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for () {\n fn hash<H>(_self: Self, _state: &mut H)\n where\n H: Hasher,\n {}\n}\n\nimpl<T, let N: u32> Hash for [T; N]\nwhere\n T: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n for elem in self {\n elem.hash(state);\n }\n }\n}\n\nimpl<T> Hash for [T]\nwhere\n T: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.len().hash(state);\n for elem in self {\n elem.hash(state);\n }\n }\n}\n\nimpl<A, B> Hash for (A, B)\nwhere\n A: Hash,\n B: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n }\n}\n\nimpl<A, B, C> Hash for (A, B, C)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n }\n}\n\nimpl<A, B, C, D> Hash for (A, B, C, D)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n D: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n self.3.hash(state);\n }\n}\n\nimpl<A, B, C, D, E> Hash for (A, B, C, D, E)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n D: Hash,\n E: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n self.3.hash(state);\n self.4.hash(state);\n }\n}\n\n// Some test vectors for Pedersen hash and Pedersen Commitment.\n// They have been generated using the same functions so the tests are for now useless\n// but they will be useful when we switch to Noir implementation.\n#[test]\nfn assert_pedersen() {\n assert_eq(\n pedersen_hash_with_separator([1], 1),\n 0x1b3f4b1a83092a13d8d1a59f7acb62aba15e7002f4440f2275edb99ebbc2305f,\n );\n assert_eq(\n pedersen_commitment_with_separator([1], 1),\n EmbeddedCurvePoint {\n x: 0x054aa86a73cb8a34525e5bbed6e43ba1198e860f5f3950268f71df4591bde402,\n y: 0x209dcfbf2cfb57f9f6046f44d71ac6faf87254afc7407c04eb621a6287cac126,\n is_infinite: false,\n },\n );\n\n assert_eq(\n pedersen_hash_with_separator([1, 2], 2),\n 0x26691c129448e9ace0c66d11f0a16d9014a9e8498ee78f4d69f0083168188255,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2], 2),\n EmbeddedCurvePoint {\n x: 0x2e2b3b191e49541fe468ec6877721d445dcaffe41728df0a0eafeb15e87b0753,\n y: 0x2ff4482400ad3a6228be17a2af33e2bcdf41be04795f9782bd96efe7e24f8778,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3], 3),\n 0x0bc694b7a1f8d10d2d8987d07433f26bd616a2d351bc79a3c540d85b6206dbe4,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3], 3),\n EmbeddedCurvePoint {\n x: 0x1fee4e8cf8d2f527caa2684236b07c4b1bad7342c01b0f75e9a877a71827dc85,\n y: 0x2f9fedb9a090697ab69bf04c8bc15f7385b3e4b68c849c1536e5ae15ff138fd1,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4], 4),\n 0xdae10fb32a8408521803905981a2b300d6a35e40e798743e9322b223a5eddc,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4], 4),\n EmbeddedCurvePoint {\n x: 0x07ae3e202811e1fca39c2d81eabe6f79183978e6f12be0d3b8eda095b79bdbc9,\n y: 0x0afc6f892593db6fbba60f2da558517e279e0ae04f95758587760ba193145014,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5], 5),\n 0xfc375b062c4f4f0150f7100dfb8d9b72a6d28582dd9512390b0497cdad9c22,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5], 5),\n EmbeddedCurvePoint {\n x: 0x1754b12bd475a6984a1094b5109eeca9838f4f81ac89c5f0a41dbce53189bb29,\n y: 0x2da030e3cfcdc7ddad80eaf2599df6692cae0717d4e9f7bfbee8d073d5d278f7,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6], 6),\n 0x1696ed13dc2730062a98ac9d8f9de0661bb98829c7582f699d0273b18c86a572,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6], 6),\n EmbeddedCurvePoint {\n x: 0x190f6c0e97ad83e1e28da22a98aae156da083c5a4100e929b77e750d3106a697,\n y: 0x1f4b60f34ef91221a0b49756fa0705da93311a61af73d37a0c458877706616fb,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7], 7),\n 0x128c0ff144fc66b6cb60eeac8a38e23da52992fc427b92397a7dffd71c45ede3,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7], 7),\n EmbeddedCurvePoint {\n x: 0x015441e9d29491b06563fac16fc76abf7a9534c715421d0de85d20dbe2965939,\n y: 0x1d2575b0276f4e9087e6e07c2cb75aa1baafad127af4be5918ef8a2ef2fea8fc,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8),\n 0x2f960e117482044dfc99d12fece2ef6862fba9242be4846c7c9a3e854325a55c,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8),\n EmbeddedCurvePoint {\n x: 0x1657737676968887fceb6dd516382ea13b3a2c557f509811cd86d5d1199bc443,\n y: 0x1f39f0cb569040105fa1e2f156521e8b8e08261e635a2b210bdc94e8d6d65f77,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9),\n 0x0c96db0790602dcb166cc4699e2d306c479a76926b81c2cb2aaa92d249ec7be7,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9),\n EmbeddedCurvePoint {\n x: 0x0a3ceae42d14914a432aa60ec7fded4af7dad7dd4acdbf2908452675ec67e06d,\n y: 0xfc19761eaaf621ad4aec9a8b2e84a4eceffdba78f60f8b9391b0bd9345a2f2,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10),\n 0x2cd37505871bc460a62ea1e63c7fe51149df5d0801302cf1cbc48beb8dff7e94,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10),\n EmbeddedCurvePoint {\n x: 0x2fb3f8b3d41ddde007c8c3c62550f9a9380ee546fcc639ffbb3fd30c8d8de30c,\n y: 0x300783be23c446b11a4c0fabf6c91af148937cea15fcf5fb054abf7f752ee245,\n is_infinite: false,\n },\n );\n}\n",
2600
+ path: "std/hash/mod.nr"
2601
+ },
2602
+ "51": {
2603
+ source: "use dep::std::hash::poseidon2_permutation;\n\nfn main(inputs: [Field; 4]) -> pub Field {\n let out = poseidon2_permutation(inputs, 4);\n out[0]\n}\n",
2604
+ path: "/Users/noot/Documents/voter-protocol/packages/crypto/noir/fixtures/src/main.nr"
2605
+ }
2606
+ };
2607
+ const expression_width$1 = {
2608
+ Bounded: {
2609
+ width: 4
2610
+ }
2611
+ };
2612
+ const fixturesCircuit = {
2613
+ noir_version: noir_version$1,
2614
+ hash: hash$1,
2615
+ abi: abi$1,
2616
+ bytecode: bytecode$1,
2617
+ debug_symbols: debug_symbols$1,
2618
+ file_map: file_map$1,
2619
+ expression_width: expression_width$1
2620
+ };
2621
+
2622
+ const noir_version = "1.0.0-beta.16+2d46fca7203545cbbfb31a0d0328de6c10a8db95";
2623
+ const hash = "17841920136822657451";
2624
+ const abi = {
2625
+ parameters: [
2408
2626
  {
2409
- name: "leaf_index",
2410
- type: {
2411
- kind: "integer",
2412
- sign: "unsigned",
2413
- width: 32
2414
- },
2415
- visibility: "private"
2416
- },
2417
- {
2418
- name: "user_secret",
2627
+ name: "inputs",
2419
2628
  type: {
2420
- kind: "field"
2629
+ kind: "array",
2630
+ length: 4,
2631
+ type: {
2632
+ kind: "field"
2633
+ }
2421
2634
  },
2422
2635
  visibility: "private"
2423
2636
  }
2424
2637
  ],
2425
2638
  return_type: {
2426
2639
  abi_type: {
2427
- kind: "tuple",
2428
- fields: [
2429
- {
2430
- kind: "field"
2431
- },
2432
- {
2433
- kind: "field"
2434
- },
2435
- {
2436
- kind: "field"
2437
- },
2438
- {
2439
- kind: "field"
2440
- },
2441
- {
2442
- kind: "field"
2443
- }
2444
- ]
2640
+ kind: "array",
2641
+ length: 4,
2642
+ type: {
2643
+ kind: "field"
2644
+ }
2445
2645
  },
2446
2646
  visibility: "public"
2447
2647
  },
2448
2648
  error_types: {
2449
2649
  }
2450
2650
  };
2451
- const bytecode = "H4sIAAAAAAAA/9Vcf5RVVRX+3jCjCFhkgz/4MTytSEEEMioUgRILRNCiAhEkkQFHfijTgPywgkIRrEhHEazIUAQrKhQBi2oQwYoKRbCiQkaopDLpj9aqtdRx78O53vMO9zp733vfc72z1mavuefc7/vO++4738C8IYfjo9L2GdfVzTxG/bf26xxVG9urqfLO2gqcOHK25yEauWrh2paWpsjrebTOwX+4WrvY3jWYDTbYNQKwjZzIwLkb6jt5VH1zvwfO23rVZZsXLRo74dwLXxo274mb77q0+b9304uc6yJb23gsRlvU8F8o35DWeFxNrY2340GIG3nZsgpXazfba4wChA8CT3T3rtUg3FQwivRwNL7S0tLSDXLTaqAzLYcT9wLoTatRaHT5uqch5JsrvPtaI++u4MgrcJPuIQ/9OzMv5ygkVPKcDTkPY3dAeBLw1yfZXmX72U4P1p1D9R6q91K9LwanysOLwulB9X6qc6nOQ/mfSD1t72UUIDx9eMI/kXqhtCdST8gfol5yDZma1hOlN+1823sbBQgN4gnftN5Ib5rUCDbtfMhN6w2daVnFSG+FRpfvgjSEfLM2Ri5QcPRR4CbdQx/oj/c+cg4z/NeoNV287hyFHl7XIwFPDwVHXzluZBSdbHtfxEdRP6oPUF1I9cEYnL4eXhROf6oPUX2Y6iMoy0irdLUOsP0iowDhScgTNd41XuRvtIiR9toAyB+ii+QaMjVtAEpiWkGkXWz7QKMAoUE84UfaQKSPNKkRHGkXQ27aQOhMyyrSBio0unyXpCHkm7WRdomCY5ACN+keBkEfaYPkHGZoo4Yjs59CD6/vn4Cnv4JjsBw3Mora2j4Y8VE0hOqjVB+jujQGZ7CHF4UzlOoyqo9TfQJlGWltXa3DbB9uFCA8CXmim3dtOMLTMBhFjLT/D4P8IRou15CpacNQEtMKIu1y20cYBQgN4gk/0kYgfaRJjeBIuxxy00ZAZ1pWkTZCodHluyINId+sjbQrFBwjFbhJ9zAS+kgbKecwQxs1HJlDFHp4/dAEPEMVHKPkuJFRdIrtoxAfRVdSXUX1SapPxeCM8vCicEZTfZrqM1SfRVlGWkdX6xjbxxoFCE9CnujqXeNFlR5aESPtP2Mgf4jGyjVkatoYlMS0gki72vZxRgFCg3jCj7RxSB9pUiM40q6G3LRx0JmWVaSNU2h0+a5JQ8g3ayPtGgXHeAVu0j2Mhz7Sxss5zNBGDUfmlQo9vH50Ap7RCo4JctzIKGpn+wTER9G1VBOpPkd1XQzOBA8vCmcS1fVUk6lqUZaRlne1TrF9qlGA8CTkiS7eNV5U5aEVMdKap0D+EE2Va8jUtCkoiWkFkXaD7XVGAUKDeMKPtDqkjzSpERxpN0BuWh10pmUVaXUKjS7fjWkI+WZtpN2o4JimwE26h2nQR9o0OYcZ2qjhyLxWoYfXT0rAM0nBMV2OGxlF7W2fjvgomkE1k+omqptjcKZ7eFE4s6jqqT5P1YCyjLQhrtbZts8xChCehDzR2bvGi07y0IoYadtnQ/4QzZFryNS02SiJaQWRdovtc40ChAbxhB9pc5E+0qRGcKTdArlpc6EzLatIm6vQ6PLNS0PIN2sjbZ6CY74CN+ke5kMfafPlHGZoo4Yjc4ZCD6+flYBnloJjgRw3Moo62L4A8VF0K9UXqL5I9aUYnAUeXhTOQqpFVF+m+grKMtIWuloX236bUYDwJOSJs7xrvOhkD62IkbZoMeQP0W1yDZmathglMa0g0m63fYlRgNAgnvAjbQnSR5rUCI602yE3bQl0pmUVaUsUGl2+O9IQ8s3aSLtDwbFUgZt0D0uhj7Slcg4ztFHDkXmrQg+vX5iAZ6GCY5kcNzKKTrV9GeKj6E6qr1J9jerrMTjLPLwonOVU36C6i+pulGOk5Qq0Ntp+z/Gp8CTkiTO9a7yorQdXxEjLNUL+EN0j15CpaY0ohWmFkXav7SuMAoQG8YQfaSuQPtKkRnCk3Qu5aSugMy2rSFuh0Ojy3ZeGkG/WRtp9Co6VCtyke1gJfaStlHOYoY0ajsw7FXp4/fIEPMsVHKvkuJFR9A7bVyE+iu6n+ibVt6i+HYOzysOLwllN9R2qB6i+i3KMtIoCrWtsf9AoQHgS8sQZ3jVedIoHV7xIez23BvKH6EG5hkxNW4NSmFYYaQ/ZvtYoQGgQT/iRthbpI01qBEfaQ5CbthY607KKtLUKjS7fw2kI+WZtpD2s4FinwE26h3XQR9o6OYcZ2qjhyLxfoYfXr07As1rBsV6OGxlF77R9PeKj6BGq71F9n+oHMTjrPbwonA1UP6T6EdWPUY6RVlmgdaPtjxoFCE9Cnjjdu8aL2nlwxYu0V3MbIX+IHpVryNS0jSiFaYWR9pjtm4wChAbxhB9pm5A+0qRGcKQ9Brlpm6AzLatI26TQ6PI9noaQb9ZG2uMKjs0K3KR72Ax9pG2Wc5ihjRqOzEcUenj9hgQ8GxQcW+S4kVHU0fYtiI+irVRPUP2E6qcxOFs8vCicbVQ/o/o51S9QjpHWtkBrk+3bjQKEJyFPdPKu8aL2HlzxIu1/uSbIH6Ltcg2ZmtaEUphWGGlP2r7DKEBoEE/4kbYD6SOtCfJIexJy03ZAZ1pWkbZDodHleyoNId+sjbSnFBw7FbhJ97AT+kjbKecwQxs1HJlbFXp4/bYEPNsUHLvkuJFR9C7bdyE+ip6m+iXVr6h+HYOzy8OLwtlN9Rsc//8bf4dyjLSOBVr32P6MUYDwJOSJau8aL+rgwRUv0o7l9kD+ED0j15CpaXtQCtMKI+1Z2/caBQgN4gk/0vYifaRJjeBIexZy0/ZCZ1pWkbZXodHley4NId+sjbTnFBz7FLhJ97AP+kjbJ+cwQxs1HJlPK/Tw+t0JeHYrOPbLcSOj6DTb9yM+ip6n+j3VH6j+GIOz38OLwjlA9SeqP1P9BeUYafkCrQdtf8EoQHgS8sS7vWu86FQPrniRdih3EPKH6AW5hkxNO4hSmFYYaYdsbzYKEBrEE36kNSN9pEmN4Eg7BLlpzdCZllWkNSs0unwvpiHkm7WR9qKC47ACN+keDkMfaYflHGZoo4Yj83mFHl5/IAHPAQXHETluZBQFJ+8RxEfRX6n+RvV3qpdicI54eFE4R6n+QfVPqn/Za9rX5rDVE6yXPBNHoX8/uGtb03QU6TwI/o0yOHgqEB6swbqXqf5N9QrVMQ9Tu7cc5Ht7Wb42lSZ3bWuaTkdpNGlepzNQGk0VkGs6E6XR1AZyTWehNJoqIdfUWb7W/Ij8zRutruB9yv+TAv/qKf+uDn+4mT8Nxj8+55838Huev6Plj8/x5w34BzT8L1r8VwA+M/m71U4OdpXDx88We8mvXWdnTfCaBNcm19XXXt9QN6d2Yt3MhtqptfUTZ82+qaGudmZD8FHroLdzdqBwryK4v32y+0/4xVxXi4v7JqHtlRH35WK+rvD6W631r7vXOkTMBZin2e7qDfbR0XbXjTm19Q3BZwOD17CTw6d5BwX3Vye7P9ID98kLcAMe93XMQzeCb/+rIuZ8b9t4vEHP6flzcTqi/A68rHauBa/HG1KywaYoaAAA";
2452
- const debug_symbols = "tVfLbqNAEPwXzj5Q8+jp2V9ZrSLHIZEly7YcO9Iqyr8vTFc7yQEU2doLNYZ0QRVVEN67p+Hx8vKw3T8fXrtfv9+7x9N2t9u+POwOm/V5e9iPe98/Vp3/fDifhmHc1X05Pk4d16dhf+5+7S+73ap7W+8u7Y9ej+t9w/P6NB7tV92wfxpxJHze7oZp9bH6nO7nR5Fr4TQklStBxjcGzDOUUElQ8i3zKn75qumW+ezn1xpvmK9wA2rELfP5Oq+z+mV+PmrkfJzXvzCfenA+xTA3rwvziuwEmj8ZUH98BVGdIN1yB+tnBPswHyHc6eESwY9MRLzbxcVruNNGFdegRebmlx4Fcm0iRPUWhhL8ElBqP6thIY2AuI8IC7ei/leKXPxWSF/nCALuj8PCNciVQQJuuRnaeymg8Xug/oy/1pvt6dvbqMOoadWFto1tm9o2j7VaddK2pW21besobtWhN4DBOD6KQjQYCTDtzVOmRhRiISqxNsLQG8AgGMRGHJJBNpDGH4qB2mlCNbrYE0EMxNgYYzIwXdGExdKIoxqYttQ3/gSDYKdJ0ehSIlJdorpkJiVzKZm2bNqy2ZSDgWnLqfHnbCB2mkyTMk3KVCdUJ2aTmE1i2sS0idkkYmDaRBu/1Aalt9MUmlRoUqG6QnXFbCq8/6atmLZiNqlFQE2bWgTUIqCMgNIkpUlKdUp1ajZVs6matmraqtlULQLVtFWLQLUIVEag0iT0vS/gi+ALswp9ImaiEAuDrUQGHZYGAETmAaBlQPKFB/6aeJhxgBJNL5h5BO9PIEaiZQMhE5kOBO9Q8BJ5/OH5RzQbEQORytkARLMSUYhUHpUtrqwxs4LkZiY308sAbwMS7Uy0k3UA+4BEOzMfHKwEMh8dmc+OfH14uJnZzfRqwLuBTDuFdrIcYDsgtFMSkcrFUgQpROYI4mYWN9OLAm8KCu0stJNVAbuCQjsLg8S6QBkkZZDUg6RuprqZXht4b6C0U2kniwM2B5V2VgaJ5UFlkCqDVD1IU4Om/4ExdSiEaTFShumN8bY+bdePu4EfK8+X/ebLt8v579GP+NfN8XTYDE+X0zC9Wdqx8V3zDw==";
2651
+ const bytecode = "H4sIAAAAAAAA/72SPQqDQBCFZ/2J0c0hUiZdQo4QAqlCyjQpAmlSJAQ7S48gXsBTiB7HztLG3lkdZQWLXQQHHo9lhjcfu8ugK4v8+/r8OLpNZ4baSH0meS+D3JTm1igX5aE4jMsg34JSMUuaPbxvfnFM9tn9kobh47k7ldcg/0fnoo4r2quYO4vJ1mByYRmmlQaTB8swORpMXD13+GftEuIyJ/rCxVuJu3EmchrXOnNwAAMAAA==";
2652
+ const debug_symbols = "dZDRDoMgDEX/pc88iNHM+SvLYhCrISFAEJYshn9fNTL1wafSXs5tehcYsI9Tp8xoZ2hfC/Reaa2mTlspgrKGpktikNsueEQawUknygmPJkBrotYMPkLH7dPshNlqEJ7UggGagSoZjkrj+krsoIt7tKwyXD7KP17zC8/v+arh9W5QNfXhwJ8pvakTUvnLzVDQqrQaeiV6jXsOYzTyFEv4uqzk4Jy3EofocbXbNFrwAw==";
2453
2653
  const file_map = {
2454
2654
  "19": {
2455
2655
  source: "// Exposed only for usage in `std::meta`\npub(crate) mod poseidon2;\n\nuse crate::default::Default;\nuse crate::embedded_curve_ops::{\n EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul, multi_scalar_mul_array_return,\n};\nuse crate::meta::derive_via;\n\n#[foreign(sha256_compression)]\n// docs:start:sha256_compression\npub fn sha256_compression(input: [u32; 16], state: [u32; 8]) -> [u32; 8] {}\n// docs:end:sha256_compression\n\n#[foreign(keccakf1600)]\n// docs:start:keccakf1600\npub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {}\n// docs:end:keccakf1600\n\npub mod keccak {\n #[deprecated(\"This function has been moved to std::hash::keccakf1600\")]\n pub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {\n super::keccakf1600(input)\n }\n}\n\n#[foreign(blake2s)]\n// docs:start:blake2s\npub fn blake2s<let N: u32>(input: [u8; N]) -> [u8; 32]\n// docs:end:blake2s\n{}\n\n// docs:start:blake3\npub fn blake3<let N: u32>(input: [u8; N]) -> [u8; 32]\n// docs:end:blake3\n{\n if crate::runtime::is_unconstrained() {\n // Temporary measure while Barretenberg is main proving system.\n // Please open an issue if you're working on another proving system and running into problems due to this.\n crate::static_assert(\n N <= 1024,\n \"Barretenberg cannot prove blake3 hashes with inputs larger than 1024 bytes\",\n );\n }\n __blake3(input)\n}\n\n#[foreign(blake3)]\nfn __blake3<let N: u32>(input: [u8; N]) -> [u8; 32] {}\n\n// docs:start:pedersen_commitment\npub fn pedersen_commitment<let N: u32>(input: [Field; N]) -> EmbeddedCurvePoint {\n // docs:end:pedersen_commitment\n pedersen_commitment_with_separator(input, 0)\n}\n\n#[inline_always]\npub fn pedersen_commitment_with_separator<let N: u32>(\n input: [Field; N],\n separator: u32,\n) -> EmbeddedCurvePoint {\n let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N];\n for i in 0..N {\n // we use the unsafe version because the multi_scalar_mul will constrain the scalars.\n points[i] = from_field_unsafe(input[i]);\n }\n let generators = derive_generators(\"DEFAULT_DOMAIN_SEPARATOR\".as_bytes(), separator);\n multi_scalar_mul(generators, points)\n}\n\n// docs:start:pedersen_hash\npub fn pedersen_hash<let N: u32>(input: [Field; N]) -> Field\n// docs:end:pedersen_hash\n{\n pedersen_hash_with_separator(input, 0)\n}\n\n#[no_predicates]\npub fn pedersen_hash_with_separator<let N: u32>(input: [Field; N], separator: u32) -> Field {\n let mut scalars: [EmbeddedCurveScalar; N + 1] = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N + 1];\n let mut generators: [EmbeddedCurvePoint; N + 1] =\n [EmbeddedCurvePoint::point_at_infinity(); N + 1];\n let domain_generators: [EmbeddedCurvePoint; N] =\n derive_generators(\"DEFAULT_DOMAIN_SEPARATOR\".as_bytes(), separator);\n\n for i in 0..N {\n scalars[i] = from_field_unsafe(input[i]);\n generators[i] = domain_generators[i];\n }\n scalars[N] = EmbeddedCurveScalar { lo: N as Field, hi: 0 as Field };\n\n let length_generator: [EmbeddedCurvePoint; 1] =\n derive_generators(\"pedersen_hash_length\".as_bytes(), 0);\n generators[N] = length_generator[0];\n multi_scalar_mul_array_return(generators, scalars, true)[0].x\n}\n\n#[field(bn254)]\n#[inline_always]\npub fn derive_generators<let N: u32, let M: u32>(\n domain_separator_bytes: [u8; M],\n starting_index: u32,\n) -> [EmbeddedCurvePoint; N] {\n crate::assert_constant(domain_separator_bytes);\n // TODO(https://github.com/noir-lang/noir/issues/5672): Add back assert_constant on starting_index\n __derive_generators(domain_separator_bytes, starting_index)\n}\n\n#[builtin(derive_pedersen_generators)]\n#[field(bn254)]\nfn __derive_generators<let N: u32, let M: u32>(\n domain_separator_bytes: [u8; M],\n starting_index: u32,\n) -> [EmbeddedCurvePoint; N] {}\n\n#[field(bn254)]\n// Decompose the input 'bn254 scalar' into two 128 bits limbs.\n// It is called 'unsafe' because it does not assert the limbs are 128 bits\n// Assuming the limbs are 128 bits:\n// Assert the decomposition does not overflow the field size.\nfn from_field_unsafe(scalar: Field) -> EmbeddedCurveScalar {\n // Safety: xlo and xhi decomposition is checked below\n let (xlo, xhi) = unsafe { crate::field::bn254::decompose_hint(scalar) };\n // Check that the decomposition is correct\n assert_eq(scalar, xlo + crate::field::bn254::TWO_POW_128 * xhi);\n // Check that the decomposition does not overflow the field size\n let (a, b) = if xhi == crate::field::bn254::PHI {\n (xlo, crate::field::bn254::PLO)\n } else {\n (xhi, crate::field::bn254::PHI)\n };\n crate::field::bn254::assert_lt(a, b);\n\n EmbeddedCurveScalar { lo: xlo, hi: xhi }\n}\n\npub fn poseidon2_permutation<let N: u32>(input: [Field; N], state_len: u32) -> [Field; N] {\n assert_eq(input.len(), state_len);\n poseidon2_permutation_internal(input)\n}\n\n#[foreign(poseidon2_permutation)]\nfn poseidon2_permutation_internal<let N: u32>(input: [Field; N]) -> [Field; N] {}\n\n// Generic hashing support.\n// Partially ported and impacted by rust.\n\n// Hash trait shall be implemented per type.\n#[derive_via(derive_hash)]\npub trait Hash {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher;\n}\n\n// docs:start:derive_hash\ncomptime fn derive_hash(s: TypeDefinition) -> Quoted {\n let name = quote { $crate::hash::Hash };\n let signature = quote { fn hash<H>(_self: Self, _state: &mut H) where H: $crate::hash::Hasher };\n let for_each_field = |name| quote { _self.$name.hash(_state); };\n crate::meta::make_trait_impl(\n s,\n name,\n signature,\n for_each_field,\n quote {},\n |fields| fields,\n )\n}\n// docs:end:derive_hash\n\n// Hasher trait shall be implemented by algorithms to provide hash-agnostic means.\n// TODO: consider making the types generic here ([u8], [Field], etc.)\npub trait Hasher {\n fn finish(self) -> Field;\n\n fn write(&mut self, input: Field);\n}\n\n// BuildHasher is a factory trait, responsible for production of specific Hasher.\npub trait BuildHasher {\n type H: Hasher;\n\n fn build_hasher(self) -> H;\n}\n\npub struct BuildHasherDefault<H>;\n\nimpl<H> BuildHasher for BuildHasherDefault<H>\nwhere\n H: Hasher + Default,\n{\n type H = H;\n\n fn build_hasher(_self: Self) -> H {\n H::default()\n }\n}\n\nimpl<H> Default for BuildHasherDefault<H>\nwhere\n H: Hasher + Default,\n{\n fn default() -> Self {\n BuildHasherDefault {}\n }\n}\n\nimpl Hash for Field {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self);\n }\n}\n\nimpl Hash for u1 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u8 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u16 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u32 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u64 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for u128 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for i8 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u8 as Field);\n }\n}\n\nimpl Hash for i16 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u16 as Field);\n }\n}\n\nimpl Hash for i32 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u32 as Field);\n }\n}\n\nimpl Hash for i64 {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as u64 as Field);\n }\n}\n\nimpl Hash for bool {\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n H::write(state, self as Field);\n }\n}\n\nimpl Hash for () {\n fn hash<H>(_self: Self, _state: &mut H)\n where\n H: Hasher,\n {}\n}\n\nimpl<T, let N: u32> Hash for [T; N]\nwhere\n T: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n for elem in self {\n elem.hash(state);\n }\n }\n}\n\nimpl<T> Hash for [T]\nwhere\n T: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.len().hash(state);\n for elem in self {\n elem.hash(state);\n }\n }\n}\n\nimpl<A, B> Hash for (A, B)\nwhere\n A: Hash,\n B: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n }\n}\n\nimpl<A, B, C> Hash for (A, B, C)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n }\n}\n\nimpl<A, B, C, D> Hash for (A, B, C, D)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n D: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n self.3.hash(state);\n }\n}\n\nimpl<A, B, C, D, E> Hash for (A, B, C, D, E)\nwhere\n A: Hash,\n B: Hash,\n C: Hash,\n D: Hash,\n E: Hash,\n{\n fn hash<H>(self, state: &mut H)\n where\n H: Hasher,\n {\n self.0.hash(state);\n self.1.hash(state);\n self.2.hash(state);\n self.3.hash(state);\n self.4.hash(state);\n }\n}\n\n// Some test vectors for Pedersen hash and Pedersen Commitment.\n// They have been generated using the same functions so the tests are for now useless\n// but they will be useful when we switch to Noir implementation.\n#[test]\nfn assert_pedersen() {\n assert_eq(\n pedersen_hash_with_separator([1], 1),\n 0x1b3f4b1a83092a13d8d1a59f7acb62aba15e7002f4440f2275edb99ebbc2305f,\n );\n assert_eq(\n pedersen_commitment_with_separator([1], 1),\n EmbeddedCurvePoint {\n x: 0x054aa86a73cb8a34525e5bbed6e43ba1198e860f5f3950268f71df4591bde402,\n y: 0x209dcfbf2cfb57f9f6046f44d71ac6faf87254afc7407c04eb621a6287cac126,\n is_infinite: false,\n },\n );\n\n assert_eq(\n pedersen_hash_with_separator([1, 2], 2),\n 0x26691c129448e9ace0c66d11f0a16d9014a9e8498ee78f4d69f0083168188255,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2], 2),\n EmbeddedCurvePoint {\n x: 0x2e2b3b191e49541fe468ec6877721d445dcaffe41728df0a0eafeb15e87b0753,\n y: 0x2ff4482400ad3a6228be17a2af33e2bcdf41be04795f9782bd96efe7e24f8778,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3], 3),\n 0x0bc694b7a1f8d10d2d8987d07433f26bd616a2d351bc79a3c540d85b6206dbe4,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3], 3),\n EmbeddedCurvePoint {\n x: 0x1fee4e8cf8d2f527caa2684236b07c4b1bad7342c01b0f75e9a877a71827dc85,\n y: 0x2f9fedb9a090697ab69bf04c8bc15f7385b3e4b68c849c1536e5ae15ff138fd1,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4], 4),\n 0xdae10fb32a8408521803905981a2b300d6a35e40e798743e9322b223a5eddc,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4], 4),\n EmbeddedCurvePoint {\n x: 0x07ae3e202811e1fca39c2d81eabe6f79183978e6f12be0d3b8eda095b79bdbc9,\n y: 0x0afc6f892593db6fbba60f2da558517e279e0ae04f95758587760ba193145014,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5], 5),\n 0xfc375b062c4f4f0150f7100dfb8d9b72a6d28582dd9512390b0497cdad9c22,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5], 5),\n EmbeddedCurvePoint {\n x: 0x1754b12bd475a6984a1094b5109eeca9838f4f81ac89c5f0a41dbce53189bb29,\n y: 0x2da030e3cfcdc7ddad80eaf2599df6692cae0717d4e9f7bfbee8d073d5d278f7,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6], 6),\n 0x1696ed13dc2730062a98ac9d8f9de0661bb98829c7582f699d0273b18c86a572,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6], 6),\n EmbeddedCurvePoint {\n x: 0x190f6c0e97ad83e1e28da22a98aae156da083c5a4100e929b77e750d3106a697,\n y: 0x1f4b60f34ef91221a0b49756fa0705da93311a61af73d37a0c458877706616fb,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7], 7),\n 0x128c0ff144fc66b6cb60eeac8a38e23da52992fc427b92397a7dffd71c45ede3,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7], 7),\n EmbeddedCurvePoint {\n x: 0x015441e9d29491b06563fac16fc76abf7a9534c715421d0de85d20dbe2965939,\n y: 0x1d2575b0276f4e9087e6e07c2cb75aa1baafad127af4be5918ef8a2ef2fea8fc,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8),\n 0x2f960e117482044dfc99d12fece2ef6862fba9242be4846c7c9a3e854325a55c,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8),\n EmbeddedCurvePoint {\n x: 0x1657737676968887fceb6dd516382ea13b3a2c557f509811cd86d5d1199bc443,\n y: 0x1f39f0cb569040105fa1e2f156521e8b8e08261e635a2b210bdc94e8d6d65f77,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9),\n 0x0c96db0790602dcb166cc4699e2d306c479a76926b81c2cb2aaa92d249ec7be7,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9),\n EmbeddedCurvePoint {\n x: 0x0a3ceae42d14914a432aa60ec7fded4af7dad7dd4acdbf2908452675ec67e06d,\n y: 0xfc19761eaaf621ad4aec9a8b2e84a4eceffdba78f60f8b9391b0bd9345a2f2,\n is_infinite: false,\n },\n );\n assert_eq(\n pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10),\n 0x2cd37505871bc460a62ea1e63c7fe51149df5d0801302cf1cbc48beb8dff7e94,\n );\n assert_eq(\n pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10),\n EmbeddedCurvePoint {\n x: 0x2fb3f8b3d41ddde007c8c3c62550f9a9380ee546fcc639ffbb3fd30c8d8de30c,\n y: 0x300783be23c446b11a4c0fabf6c91af148937cea15fcf5fb054abf7f752ee245,\n is_infinite: false,\n },\n );\n}\n",
2456
2656
  path: "std/hash/mod.nr"
2457
2657
  },
2458
2658
  "51": {
2459
- source: "// District membership circuit (Barretenberg / Noir)\n// Mirrors Halo2 semantics with Poseidon hashing and domain-separated nullifier.\n\nuse dep::std::hash::poseidon2_permutation;\n\n// Default depth; build pipeline rewrites this per-class (14 / 20 / 22).\nglobal DEPTH: u32 = 14;\n\nfn poseidon2_hash2(left: Field, right: Field) -> Field {\n let mut state: [Field; 4] = [left, right, 0, 0];\n let out = poseidon2_permutation(state, 4);\n out[0]\n}\n\nfn poseidon2_hash4(a: Field, b: Field, c: Field, d: Field) -> Field {\n let mut state: [Field; 4] = [a, b, c, d];\n let out = poseidon2_permutation(state, 4);\n out[0]\n}\n\nfn compute_merkle_root(leaf: Field, merkle_path: [Field; DEPTH], leaf_index: u32) -> Field {\n assert(leaf_index < (1u32 << DEPTH)); // range-constrain index\n\n let mut node = leaf;\n for i in 0..DEPTH {\n let bit: bool = ((leaf_index >> i) & 1u32) == 1u32;\n let sibling = merkle_path[i];\n node = if bit { poseidon2_hash2(sibling, node) } else { poseidon2_hash2(node, sibling) };\n }\n node\n}\n\nfn compute_nullifier(user_secret: Field, campaign_id: Field, authority_hash: Field, epoch_id: Field) -> Field {\n poseidon2_hash4(user_secret, campaign_id, authority_hash, epoch_id)\n}\n\n// Public inputs are marked `pub`; private witnesses remain secret.\nfn main(\n merkle_root: Field,\n nullifier: Field,\n authority_hash: Field,\n epoch_id: Field,\n campaign_id: Field,\n leaf: Field,\n merkle_path: [Field; DEPTH],\n leaf_index: u32,\n user_secret: Field,\n) -> pub (Field, Field, Field, Field, Field) {\n let computed_root = compute_merkle_root(leaf, merkle_path, leaf_index);\n assert(computed_root == merkle_root);\n\n let computed_nullifier = compute_nullifier(user_secret, campaign_id, authority_hash, epoch_id);\n assert(computed_nullifier == nullifier);\n\n (merkle_root, nullifier, authority_hash, epoch_id, campaign_id)\n}\n",
2460
- path: "/Users/noot/Documents/voter-protocol/packages/crypto/noir/district_membership/src/main.nr"
2659
+ source: "use dep::std::hash::poseidon2_permutation;\n\n// Returns all 4 elements of the permutation state (not just state[0])\n// This is needed for implementing the sponge construction in TypeScript\nfn main(inputs: [Field; 4]) -> pub [Field; 4] {\n poseidon2_permutation(inputs, 4)\n}\n",
2660
+ path: "/Users/noot/Documents/voter-protocol/packages/crypto/noir/sponge_helper/src/main.nr"
2461
2661
  }
2462
2662
  };
2463
2663
  const expression_width = {
@@ -2465,7 +2665,7 @@ const expression_width = {
2465
2665
  width: 4
2466
2666
  }
2467
2667
  };
2468
- const circuitJson = {
2668
+ const spongeHelperCircuit = {
2469
2669
  noir_version: noir_version,
2470
2670
  hash: hash,
2471
2671
  abi: abi,
@@ -2475,91 +2675,749 @@ const circuitJson = {
2475
2675
  expression_width: expression_width
2476
2676
  };
2477
2677
 
2678
+ const ZERO_PAD = "0x" + "00".repeat(32);
2679
+ const DOMAIN_HASH2 = "0x" + 4731469 .toString(16).padStart(64, "0");
2680
+ const DOMAIN_HASH1 = "0x" + 4731213 .toString(16).padStart(64, "0");
2681
+ const DOMAIN_HASH3 = "0x" + 4731725 .toString(16).padStart(64, "0");
2682
+ const DOMAIN_HASH4 = "0x" + 4731981 .toString(16).padStart(64, "0");
2683
+ const DOMAIN_SPONGE_24 = "0x" + 0x534f4e47455f24n.toString(16).padStart(64, "0");
2684
+ const DEFAULT_BATCH_SIZE = 64;
2685
+ class Poseidon2Hasher {
2686
+ static instance = null;
2687
+ static initPromise = null;
2688
+ // BN254 scalar field modulus (exported for validation in dependent packages)
2689
+ static BN254_MODULUS = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");
2690
+ noir;
2691
+ spongeHelperNoir;
2692
+ initialized = false;
2693
+ /**
2694
+ * Private constructor - use getInstance() instead
2695
+ */
2696
+ constructor(noir, spongeHelperNoir) {
2697
+ this.noir = noir;
2698
+ this.spongeHelperNoir = spongeHelperNoir;
2699
+ }
2700
+ /**
2701
+ * Get singleton instance (thread-safe initialization)
2702
+ *
2703
+ * First call initializes the Noir circuit, subsequent calls return cached instance.
2704
+ * Uses promise-based locking to prevent double initialization.
2705
+ */
2706
+ static async getInstance() {
2707
+ if (Poseidon2Hasher.instance?.initialized) {
2708
+ return Poseidon2Hasher.instance;
2709
+ }
2710
+ if (!Poseidon2Hasher.initPromise) {
2711
+ Poseidon2Hasher.initPromise = Poseidon2Hasher.initialize().catch((err) => {
2712
+ Poseidon2Hasher.initPromise = null;
2713
+ throw err;
2714
+ });
2715
+ }
2716
+ return Poseidon2Hasher.initPromise;
2717
+ }
2718
+ /**
2719
+ * Initialize the Noir circuits (called once per process)
2720
+ */
2721
+ static async initialize() {
2722
+ const circuit = fixturesCircuit;
2723
+ const noir = new Noir(circuit);
2724
+ const spongeCircuit = spongeHelperCircuit;
2725
+ const spongeHelperNoir = new Noir(spongeCircuit);
2726
+ const instance = new Poseidon2Hasher(noir, spongeHelperNoir);
2727
+ instance.initialized = true;
2728
+ Poseidon2Hasher.instance = instance;
2729
+ return instance;
2730
+ }
2731
+ /**
2732
+ * Reset singleton (for testing only)
2733
+ */
2734
+ static resetInstance() {
2735
+ Poseidon2Hasher.instance = null;
2736
+ Poseidon2Hasher.initPromise = null;
2737
+ }
2738
+ /**
2739
+ * Hash two field elements: Poseidon2(left, right, DOMAIN_HASH2, 0)
2740
+ *
2741
+ * Matches the circuit: poseidon2_permutation([left, right, 0x48324d, 0], 4)[0]
2742
+ * BA-003: Third state element carries the "H2M" domain separation tag to
2743
+ * distinguish pair-hashing from single/quad modes in the Noir circuit.
2744
+ *
2745
+ * @param left - Left input (bigint or hex string)
2746
+ * @param right - Right input (bigint or hex string)
2747
+ * @returns Poseidon2 hash as bigint
2748
+ */
2749
+ async hashPair(left, right) {
2750
+ const inputs = [
2751
+ this.toHex(left),
2752
+ this.toHex(right),
2753
+ DOMAIN_HASH2,
2754
+ // BA-003: Domain separation tag matching circuit
2755
+ ZERO_PAD
2756
+ ];
2757
+ const result = await this.noir.execute({ inputs });
2758
+ const returnValue = result.returnValue ?? result.return_value;
2759
+ if (!returnValue) {
2760
+ throw new Error("Noir circuit returned no value");
2761
+ }
2762
+ return BigInt(returnValue);
2763
+ }
2764
+ /**
2765
+ * Hash a single field element: Poseidon2(value, DOMAIN_HASH1, 0, 0)
2766
+ *
2767
+ * SA-007 FIX: Now includes DOMAIN_HASH1 in slot 1 for domain separation.
2768
+ * This prevents collision with hash4(value, 0, 0, 0).
2769
+ *
2770
+ * @param value - Input value (bigint or hex string)
2771
+ * @returns Poseidon2 hash as bigint
2772
+ */
2773
+ async hashSingle(value) {
2774
+ const inputs = [
2775
+ this.toHex(value),
2776
+ DOMAIN_HASH1,
2777
+ // SA-007: Domain tag in slot 1 to prevent collision with hash4
2778
+ ZERO_PAD,
2779
+ ZERO_PAD
2780
+ ];
2781
+ const result = await this.noir.execute({ inputs });
2782
+ const returnValue = result.returnValue ?? result.return_value;
2783
+ if (!returnValue) {
2784
+ throw new Error("Noir circuit returned no value");
2785
+ }
2786
+ return BigInt(returnValue);
2787
+ }
2788
+ /**
2789
+ * Hash three field elements: Poseidon2(a, b, c, DOMAIN_HASH3)
2790
+ *
2791
+ * Two-tree architecture: Used for user leaf computation
2792
+ * user_leaf = hash3(user_secret, cell_id, registration_salt)
2793
+ *
2794
+ * Domain separation: DOMAIN_HASH3 (0x48334d = "H3M") in slot 3 prevents
2795
+ * collision with hash4(a, b, c, 0).
2796
+ *
2797
+ * @param a - First input
2798
+ * @param b - Second input
2799
+ * @param c - Third input
2800
+ * @returns Poseidon2 hash as bigint
2801
+ */
2802
+ async hash3(a, b, c) {
2803
+ const inputs = [
2804
+ this.toHex(a),
2805
+ this.toHex(b),
2806
+ this.toHex(c),
2807
+ DOMAIN_HASH3
2808
+ // Domain tag in slot 3 to prevent collision with hash4
2809
+ ];
2810
+ const result = await this.noir.execute({ inputs });
2811
+ const returnValue = result.returnValue ?? result.return_value;
2812
+ if (!returnValue) {
2813
+ throw new Error("Noir circuit returned no value");
2814
+ }
2815
+ return BigInt(returnValue);
2816
+ }
2817
+ /**
2818
+ * Hash four field elements using 2-round Poseidon2 sponge (BR5-001).
2819
+ *
2820
+ * Matches Noir circuit poseidon2_hash4:
2821
+ * Round 1: state = permute([DOMAIN_HASH4, a, b, c])
2822
+ * Round 2: state[1] += d, state = permute(state), return state[0]
2823
+ *
2824
+ * Used for user leaf: hash4(user_secret, cell_id, registration_salt, authority_level)
2825
+ *
2826
+ * @param a - First input (user_secret)
2827
+ * @param b - Second input (cell_id)
2828
+ * @param c - Third input (registration_salt)
2829
+ * @param d - Fourth input (authority_level)
2830
+ * @returns Poseidon2 hash as bigint
2831
+ */
2832
+ async hash4(a, b, c, d) {
2833
+ const round1Inputs = [
2834
+ DOMAIN_HASH4,
2835
+ this.toHex(a),
2836
+ this.toHex(b),
2837
+ this.toHex(c)
2838
+ ];
2839
+ const r1 = await this.spongeHelperNoir.execute({ inputs: round1Inputs });
2840
+ const r1State = r1.returnValue ?? r1.return_value;
2841
+ if (!r1State || !Array.isArray(r1State) || r1State.length !== 4) {
2842
+ throw new Error("Sponge helper circuit returned invalid state array");
2843
+ }
2844
+ const s1PlusD = (BigInt(r1State[1]) + BigInt(this.toHex(d))) % Poseidon2Hasher.BN254_MODULUS;
2845
+ const round2Inputs = [
2846
+ r1State[0],
2847
+ this.toHex(s1PlusD),
2848
+ r1State[2],
2849
+ r1State[3]
2850
+ ];
2851
+ const r2 = await this.noir.execute({ inputs: round2Inputs });
2852
+ const returnValue = r2.returnValue ?? r2.return_value;
2853
+ if (!returnValue) {
2854
+ throw new Error("Noir circuit returned no value");
2855
+ }
2856
+ return BigInt(returnValue);
2857
+ }
2858
+ /**
2859
+ * Batch hash multiple pairs with controlled concurrency
2860
+ *
2861
+ * PARALLELISM STRATEGY:
2862
+ * - Divides pairs into batches of size `batchSize`
2863
+ * - Each batch runs concurrently with Promise.all()
2864
+ * - Batches run sequentially to prevent memory pressure
2865
+ *
2866
+ * @param pairs - Array of [left, right] pairs to hash
2867
+ * @param batchSize - Max concurrent operations (default: 64)
2868
+ * @returns Array of hashes in same order as input pairs
2869
+ */
2870
+ async hashPairsBatch(pairs, batchSize = DEFAULT_BATCH_SIZE) {
2871
+ const results = new Array(pairs.length);
2872
+ for (let i = 0; i < pairs.length; i += batchSize) {
2873
+ const batch = pairs.slice(i, Math.min(i + batchSize, pairs.length));
2874
+ const batchResults = await Promise.all(batch.map(([left, right]) => this.hashPair(left, right)));
2875
+ for (let j = 0; j < batchResults.length; j++) {
2876
+ results[i + j] = batchResults[j];
2877
+ }
2878
+ }
2879
+ return results;
2880
+ }
2881
+ /**
2882
+ * Batch hash multiple single values with controlled concurrency
2883
+ *
2884
+ * @param values - Array of values to hash
2885
+ * @param batchSize - Max concurrent operations (default: 64)
2886
+ * @returns Array of hashes in same order as input values
2887
+ */
2888
+ async hashSinglesBatch(values, batchSize = DEFAULT_BATCH_SIZE) {
2889
+ const results = new Array(values.length);
2890
+ for (let i = 0; i < values.length; i += batchSize) {
2891
+ const batch = values.slice(i, Math.min(i + batchSize, values.length));
2892
+ const batchResults = await Promise.all(batch.map((value) => this.hashSingle(value)));
2893
+ for (let j = 0; j < batchResults.length; j++) {
2894
+ results[i + j] = batchResults[j];
2895
+ }
2896
+ }
2897
+ return results;
2898
+ }
2899
+ /**
2900
+ * Poseidon2 sponge construction for absorbing 24 field elements.
2901
+ * Rate = 3 (absorb 3 fields per round), Capacity = 1.
2902
+ * State width = 4 (matches Poseidon2 permutation width).
2903
+ *
2904
+ * CRITICAL: State elements are ADDED TO, not overwritten.
2905
+ * The spec (v0.1) had a bug that overwrote state — this is the correct version.
2906
+ *
2907
+ * ALGORITHM:
2908
+ * 1. Initialize state with domain tag in capacity element: [DOMAIN_SPONGE_24, 0, 0, 0]
2909
+ * 2. For each chunk of 3 inputs:
2910
+ * - ADD inputs to rate elements (state[1], state[2], state[3])
2911
+ * - Apply full Poseidon2 permutation
2912
+ * 3. Handle remaining inputs (if not multiple of 3) by padding with zeros
2913
+ * 4. Squeeze: return state[0] as output
2914
+ *
2915
+ * SECURITY NOTE: Adding to state (not overwriting) ensures proper chaining between
2916
+ * rounds. Overwriting would discard the cryptographic state and create collision
2917
+ * vulnerabilities (see TWO-TREE-AGENT-REVIEW-SUMMARY.md BLOCKER-3).
2918
+ *
2919
+ * @param inputs - Array of exactly 24 field elements (district IDs)
2920
+ * @param domainTag - Domain separation tag (default: DOMAIN_SPONGE_24)
2921
+ * @returns Poseidon2 sponge output as bigint (district commitment)
2922
+ */
2923
+ async poseidon2Sponge(inputs, domainTag = BigInt(DOMAIN_SPONGE_24)) {
2924
+ if (inputs.length !== 24) {
2925
+ throw new Error(`poseidon2Sponge expects 24 inputs, got ${inputs.length}`);
2926
+ }
2927
+ for (let i = 0; i < inputs.length; i++) {
2928
+ if (inputs[i] < 0n) {
2929
+ throw new Error(`Input ${i} is negative: ${inputs[i]}`);
2930
+ }
2931
+ if (inputs[i] >= Poseidon2Hasher.BN254_MODULUS) {
2932
+ throw new Error(`Input ${i} exceeds BN254 field modulus: ${inputs[i]}`);
2933
+ }
2934
+ }
2935
+ let state = [
2936
+ domainTag % Poseidon2Hasher.BN254_MODULUS,
2937
+ 0n,
2938
+ 0n,
2939
+ 0n
2940
+ ];
2941
+ for (let i = 0; i < 8; i++) {
2942
+ state[1] = (state[1] + inputs[i * 3]) % Poseidon2Hasher.BN254_MODULUS;
2943
+ state[2] = (state[2] + inputs[i * 3 + 1]) % Poseidon2Hasher.BN254_MODULUS;
2944
+ state[3] = (state[3] + inputs[i * 3 + 2]) % Poseidon2Hasher.BN254_MODULUS;
2945
+ const stateHex = state.map((x) => this.toHex(x));
2946
+ const result = await this.spongeHelperNoir.execute({ inputs: stateHex });
2947
+ const returnValue = result.returnValue ?? result.return_value;
2948
+ if (!returnValue || !Array.isArray(returnValue) || returnValue.length !== 4) {
2949
+ throw new Error("Sponge helper circuit returned invalid state array");
2950
+ }
2951
+ state = returnValue.map((x) => BigInt(x));
2952
+ }
2953
+ return state[0];
2954
+ }
2955
+ /**
2956
+ * Hash a string to BN254 field element
2957
+ *
2958
+ * Chunking strategy:
2959
+ * - UTF-8 encode string
2960
+ * - Split into 31-byte chunks (safe for 254-bit BN254 field)
2961
+ * - Commit to byte length first, then fold in chunks iteratively:
2962
+ * hash = hashSingle(length)
2963
+ * hash = hashPair(hash, chunk[0])
2964
+ * hash = hashPair(hash, chunk[1])
2965
+ * ...
2966
+ *
2967
+ * LENGTH PREFIX (BA-022 fix):
2968
+ * Without a length commitment, "" and "\x00" both reduce to hashSingle(0n).
2969
+ * More generally, any two strings whose chunk representations share a common
2970
+ * suffix (due to trailing zero bytes) could collide. Hashing the byte length
2971
+ * as the first element makes every distinct string length a separate domain,
2972
+ * eliminating this class of collision.
2973
+ *
2974
+ * @param str - String to hash
2975
+ * @returns Poseidon2 hash as bigint
2976
+ */
2977
+ async hashString(str) {
2978
+ const bytes = Buffer.from(str, "utf-8");
2979
+ const chunks = [];
2980
+ for (let i = 0; i < bytes.length; i += 31) {
2981
+ const chunk = bytes.subarray(i, Math.min(i + 31, bytes.length));
2982
+ chunks.push(BigInt("0x" + chunk.toString("hex")));
2983
+ }
2984
+ let hash = await this.hashSingle(BigInt(bytes.length));
2985
+ for (const chunk of chunks) {
2986
+ hash = await this.hashPair(hash, chunk);
2987
+ }
2988
+ return hash;
2989
+ }
2990
+ /**
2991
+ * Batch hash multiple strings with controlled concurrency
2992
+ *
2993
+ * @param strings - Array of strings to hash
2994
+ * @param batchSize - Max concurrent operations (default: 64)
2995
+ * @returns Array of hashes in same order as input strings
2996
+ */
2997
+ async hashStringsBatch(strings, batchSize = DEFAULT_BATCH_SIZE) {
2998
+ const results = new Array(strings.length);
2999
+ for (let i = 0; i < strings.length; i += batchSize) {
3000
+ const batch = strings.slice(i, Math.min(i + batchSize, strings.length));
3001
+ const batchResults = await Promise.all(batch.map((str) => this.hashString(str)));
3002
+ for (let j = 0; j < batchResults.length; j++) {
3003
+ results[i + j] = batchResults[j];
3004
+ }
3005
+ }
3006
+ return results;
3007
+ }
3008
+ /**
3009
+ * Convert value to 0x-prefixed 64-char hex string.
3010
+ * BA-016: Validates hex characters, rejects negative bigints,
3011
+ * and enforces BN254 field modulus bound.
3012
+ */
3013
+ toHex(value) {
3014
+ if (typeof value === "bigint") {
3015
+ if (value < 0n) {
3016
+ throw new Error(`Negative bigint not allowed: ${value}`);
3017
+ }
3018
+ if (value >= Poseidon2Hasher.BN254_MODULUS) {
3019
+ throw new Error(`Value exceeds BN254 field modulus: ${value}`);
3020
+ }
3021
+ return "0x" + value.toString(16).padStart(64, "0");
3022
+ }
3023
+ const hex = value.startsWith("0x") ? value.slice(2) : value;
3024
+ if (!/^[0-9a-fA-F]*$/.test(hex)) {
3025
+ throw new Error(`Invalid hex string: ${value}`);
3026
+ }
3027
+ const padded = hex.padStart(64, "0");
3028
+ const asBigInt = BigInt("0x" + padded);
3029
+ if (asBigInt >= Poseidon2Hasher.BN254_MODULUS) {
3030
+ throw new Error(`Value exceeds BN254 field modulus: 0x${padded}`);
3031
+ }
3032
+ return "0x" + padded;
3033
+ }
3034
+ }
3035
+
3036
+ /**
3037
+ * VOTER Protocol Crypto SDK
3038
+ *
3039
+ * Cryptography library providing:
3040
+ * - Poseidon2 hashing (Noir stdlib compatibility for ZK circuits)
3041
+ * - District resolution services
3042
+ * - Geocoding services
3043
+ *
3044
+ * @packageDocumentation
3045
+ */
3046
+ // Export BN254_MODULUS for field validation in dependent packages (BR3-003 fix)
3047
+ const BN254_MODULUS = Poseidon2Hasher.BN254_MODULUS;
3048
+
3049
+ const twoTreeCircuitLoaders = {
3050
+ 18: async () => {
3051
+ const module = await import('./two_tree_membership_18-DufFLCM8.js');
3052
+ return module.default;
3053
+ },
3054
+ 20: async () => {
3055
+ const module = await import('./two_tree_membership_20-DhrOeOFx.js');
3056
+ return module.default;
3057
+ },
3058
+ 22: async () => {
3059
+ const module = await import('./two_tree_membership_22-iMLJVJEK.js');
3060
+ return module.default;
3061
+ },
3062
+ 24: async () => {
3063
+ const module = await import('./two_tree_membership_24-Df3SNDL0.js');
3064
+ return module.default;
3065
+ }
3066
+ };
2478
3067
  function detectThreads() {
2479
3068
  const hasSharedArrayBuffer = typeof SharedArrayBuffer !== "undefined";
2480
3069
  if (!hasSharedArrayBuffer) {
2481
- console.log("[NoirProver] SharedArrayBuffer unavailable - using single-threaded mode");
3070
+ console.log("[TwoTreeNoirProver] SharedArrayBuffer unavailable - using single-threaded mode");
2482
3071
  return 1;
2483
3072
  }
2484
3073
  const cores = typeof navigator !== "undefined" ? navigator.hardwareConcurrency || 4 : 4;
2485
3074
  return Math.min(cores, 8);
2486
3075
  }
2487
- class NoirProver {
3076
+ function parsePublicInput(hex, label) {
3077
+ if (typeof hex !== "string" || !/^0x[0-9a-fA-F]+$/.test(hex)) {
3078
+ throw new Error(
3079
+ `BR5-006: Invalid public input format for ${label}: expected 0x-prefixed hex string, got ${typeof hex === "string" ? `"${hex.slice(0, 20)}"` : typeof hex}`
3080
+ );
3081
+ }
3082
+ const val = BigInt(hex);
3083
+ if (val >= BN254_MODULUS) {
3084
+ throw new Error(
3085
+ `BR5-006: Public input ${label} (${val}) exceeds BN254 scalar field modulus. Possible field aliasing attack.`
3086
+ );
3087
+ }
3088
+ return val;
3089
+ }
3090
+ function toHex(value) {
3091
+ if (value < 0n) {
3092
+ throw new Error("Field element cannot be negative");
3093
+ }
3094
+ if (value >= BN254_MODULUS) {
3095
+ throw new Error(`Field element ${value} exceeds BN254 scalar field modulus`);
3096
+ }
3097
+ return "0x" + value.toString(16).padStart(64, "0");
3098
+ }
3099
+ class TwoTreeNoirProver {
2488
3100
  backend = null;
2489
3101
  noir = null;
2490
- config;
2491
3102
  threads;
3103
+ depth;
2492
3104
  constructor(config = {}) {
2493
- this.config = {
2494
- circuitName: "district_membership",
2495
- ...config
2496
- };
2497
3105
  this.threads = config.threads ?? detectThreads();
3106
+ this.depth = config.depth ?? DEFAULT_CIRCUIT_DEPTH;
2498
3107
  }
3108
+ // ========================================================================
3109
+ // Initialization
3110
+ // ========================================================================
2499
3111
  /**
2500
- * Initialize the prover (must be called before generating proofs)
3112
+ * Initialize the prover (must be called before generating proofs).
3113
+ * Lazily loads the circuit for the configured depth.
2501
3114
  */
2502
3115
  async init() {
2503
3116
  if (this.backend && this.noir) return;
2504
- console.log(`[NoirProver] Initializing with ${this.threads} thread(s)...`);
3117
+ console.log(`[TwoTreeNoirProver] Initializing depth=${this.depth} with ${this.threads} thread(s)...`);
2505
3118
  const start = Date.now();
2506
- const circuit = circuitJson;
3119
+ const loader = twoTreeCircuitLoaders[this.depth];
3120
+ if (!loader) {
3121
+ throw new Error(`Unsupported circuit depth: ${this.depth}. Must be 18, 20, 22, or 24.`);
3122
+ }
3123
+ const circuit = await loader();
2507
3124
  this.noir = new Noir(circuit);
2508
3125
  this.backend = new UltraHonkBackend(circuit.bytecode, { threads: this.threads });
2509
- console.log(`[NoirProver] Initialized in ${Date.now() - start}ms (${this.threads} threads)`);
3126
+ console.log(`[TwoTreeNoirProver] Initialized depth=${this.depth} in ${Date.now() - start}ms (${this.threads} threads)`);
2510
3127
  }
2511
3128
  /**
2512
- * Pre-warm the prover by initializing backend
2513
- * Call this on app load to hide latency from user
3129
+ * Get the circuit depth for this prover instance.
3130
+ */
3131
+ getDepth() {
3132
+ return this.depth;
3133
+ }
3134
+ /**
3135
+ * Pre-warm the prover by initializing backend.
3136
+ * Call this on app load to hide latency from user.
2514
3137
  */
2515
3138
  async warmup() {
2516
3139
  await this.init();
2517
- console.log("[NoirProver] Warmup complete (backend initialized)");
3140
+ console.log("[TwoTreeNoirProver] Warmup complete (backend initialized)");
2518
3141
  }
3142
+ // ========================================================================
3143
+ // Input Validation
3144
+ // ========================================================================
3145
+ /** Maximum allowed Merkle depth (prevents DoS via oversized arrays) */
3146
+ static MAX_MERKLE_DEPTH = 24;
2519
3147
  /**
2520
- * Generate a ZK proof for district membership
3148
+ * Validate all inputs before circuit execution.
3149
+ * Throws descriptive errors for any invalid input.
2521
3150
  */
2522
- async prove(inputs) {
3151
+ validateInputs(inputs) {
3152
+ if (inputs.userSecret === 0n) {
3153
+ throw new Error(
3154
+ "user_secret cannot be zero (SA-011). A zero secret produces predictable nullifiers."
3155
+ );
3156
+ }
3157
+ if (inputs.cellId === 0n) {
3158
+ throw new Error(
3159
+ "cell_id cannot be zero. A zero cell ID produces a degenerate cell map leaf."
3160
+ );
3161
+ }
3162
+ if (inputs.actionDomain === 0n) {
3163
+ throw new Error(
3164
+ "action_domain cannot be zero. A zero action domain produces a universal nullifier that would be consumed across ALL elections, permanently blocking the user."
3165
+ );
3166
+ }
3167
+ if (inputs.registrationSalt === 0n) {
3168
+ throw new Error(
3169
+ "registration_salt cannot be zero. A zero salt reduces leaf preimage entropy."
3170
+ );
3171
+ }
3172
+ if (inputs.identityCommitment === 0n) {
3173
+ throw new Error(
3174
+ "identity_commitment cannot be zero. NUL-001 requires a verified identity commitment from self.xyz/didit to prevent Sybil attacks via re-registration."
3175
+ );
3176
+ }
3177
+ validateAuthorityLevel(inputs.authorityLevel);
3178
+ if (!Array.isArray(inputs.districts) || inputs.districts.length !== DISTRICT_SLOT_COUNT) {
3179
+ throw new Error(
3180
+ `districts array must have exactly ${DISTRICT_SLOT_COUNT} elements, got ${inputs.districts?.length ?? "non-array"}`
3181
+ );
3182
+ }
3183
+ const nonZeroDistricts = /* @__PURE__ */ new Set();
3184
+ for (let i = 0; i < inputs.districts.length; i++) {
3185
+ const d = inputs.districts[i];
3186
+ if (d < 0n) {
3187
+ throw new Error(`districts[${i}] cannot be negative`);
3188
+ }
3189
+ if (d >= BN254_MODULUS) {
3190
+ throw new Error(`districts[${i}] exceeds BN254 scalar field modulus`);
3191
+ }
3192
+ if (d !== 0n) {
3193
+ if (nonZeroDistricts.has(d)) {
3194
+ throw new Error(
3195
+ `BR5-017: Duplicate district ID at slot ${i}: 0x${d.toString(16)}. Each non-zero district must appear in exactly one positional slot.`
3196
+ );
3197
+ }
3198
+ nonZeroDistricts.add(d);
3199
+ }
3200
+ }
3201
+ const fieldChecks = [
3202
+ [inputs.userRoot, "userRoot"],
3203
+ [inputs.cellMapRoot, "cellMapRoot"],
3204
+ [inputs.nullifier, "nullifier"],
3205
+ [inputs.actionDomain, "actionDomain"],
3206
+ [inputs.userSecret, "userSecret"],
3207
+ [inputs.cellId, "cellId"],
3208
+ [inputs.registrationSalt, "registrationSalt"],
3209
+ [inputs.identityCommitment, "identityCommitment"]
3210
+ ];
3211
+ for (const [val, name] of fieldChecks) {
3212
+ if (val < 0n) {
3213
+ throw new Error(`${name} cannot be negative`);
3214
+ }
3215
+ if (val >= BN254_MODULUS) {
3216
+ throw new Error(`${name} exceeds BN254 scalar field modulus`);
3217
+ }
3218
+ }
3219
+ if (!Array.isArray(inputs.userPath)) {
3220
+ throw new Error("userPath must be an array");
3221
+ }
3222
+ if (inputs.userPath.length > TwoTreeNoirProver.MAX_MERKLE_DEPTH) {
3223
+ throw new Error(
3224
+ `userPath exceeds maximum allowed depth: ${inputs.userPath.length} > ${TwoTreeNoirProver.MAX_MERKLE_DEPTH}`
3225
+ );
3226
+ }
3227
+ if (inputs.userPath.length !== this.depth) {
3228
+ throw new Error(
3229
+ `userPath length mismatch: expected ${this.depth}, got ${inputs.userPath.length}. Did you initialize the prover with the wrong depth?`
3230
+ );
3231
+ }
3232
+ if (!Array.isArray(inputs.cellMapPath)) {
3233
+ throw new Error("cellMapPath must be an array");
3234
+ }
3235
+ if (inputs.cellMapPath.length !== this.depth) {
3236
+ throw new Error(
3237
+ `cellMapPath length mismatch: expected ${this.depth}, got ${inputs.cellMapPath.length}`
3238
+ );
3239
+ }
3240
+ if (!Array.isArray(inputs.cellMapPathBits)) {
3241
+ throw new Error("cellMapPathBits must be an array");
3242
+ }
3243
+ if (inputs.cellMapPathBits.length !== this.depth) {
3244
+ throw new Error(
3245
+ `cellMapPathBits length mismatch: expected ${this.depth}, got ${inputs.cellMapPathBits.length}`
3246
+ );
3247
+ }
3248
+ for (let i = 0; i < inputs.cellMapPathBits.length; i++) {
3249
+ if (inputs.cellMapPathBits[i] !== 0 && inputs.cellMapPathBits[i] !== 1) {
3250
+ throw new Error(
3251
+ `cellMapPathBits[${i}] must be 0 or 1, got ${inputs.cellMapPathBits[i]}`
3252
+ );
3253
+ }
3254
+ }
3255
+ if (inputs.userIndex < 0 || inputs.userIndex >= 2 ** this.depth) {
3256
+ throw new Error(
3257
+ `userIndex out of range: must be 0 to ${2 ** this.depth - 1}, got ${inputs.userIndex}`
3258
+ );
3259
+ }
3260
+ for (let i = 0; i < inputs.userPath.length; i++) {
3261
+ if (inputs.userPath[i] < 0n || inputs.userPath[i] >= BN254_MODULUS) {
3262
+ throw new Error(`userPath[${i}] outside BN254 scalar field`);
3263
+ }
3264
+ }
3265
+ for (let i = 0; i < inputs.cellMapPath.length; i++) {
3266
+ if (inputs.cellMapPath[i] < 0n || inputs.cellMapPath[i] >= BN254_MODULUS) {
3267
+ throw new Error(`cellMapPath[${i}] outside BN254 scalar field`);
3268
+ }
3269
+ }
3270
+ }
3271
+ // ========================================================================
3272
+ // Input Formatting
3273
+ // ========================================================================
3274
+ /**
3275
+ * Format TypeScript inputs into the Noir circuit's expected parameter names
3276
+ * and types (snake_case, hex strings, integer arrays).
3277
+ *
3278
+ * This is exposed as a method for testing purposes.
3279
+ */
3280
+ formatInputs(inputs) {
3281
+ return {
3282
+ // Public inputs
3283
+ user_root: toHex(inputs.userRoot),
3284
+ cell_map_root: toHex(inputs.cellMapRoot),
3285
+ districts: inputs.districts.map(toHex),
3286
+ nullifier: toHex(inputs.nullifier),
3287
+ action_domain: toHex(inputs.actionDomain),
3288
+ authority_level: toHex(BigInt(inputs.authorityLevel)),
3289
+ // Private inputs (witnesses)
3290
+ user_secret: toHex(inputs.userSecret),
3291
+ cell_id: toHex(inputs.cellId),
3292
+ registration_salt: toHex(inputs.registrationSalt),
3293
+ identity_commitment: toHex(inputs.identityCommitment),
3294
+ // Tree 1: Standard Merkle proof
3295
+ user_path: inputs.userPath.map(toHex),
3296
+ user_index: inputs.userIndex,
3297
+ // Tree 2: SMT proof
3298
+ cell_map_path: inputs.cellMapPath.map(toHex),
3299
+ cell_map_path_bits: inputs.cellMapPathBits
3300
+ };
3301
+ }
3302
+ // ========================================================================
3303
+ // Proof Generation
3304
+ // ========================================================================
3305
+ /**
3306
+ * Generate a ZK proof for two-tree membership.
3307
+ *
3308
+ * The circuit internally verifies:
3309
+ * 1. User leaf in Tree 1: hash4(user_secret, cell_id, registration_salt, authority_level)
3310
+ * 2. District commitment: poseidon2_sponge_24(districts)
3311
+ * 3. Cell map leaf in Tree 2: hash2(cell_id, district_commitment)
3312
+ * 4. Nullifier: hash2(identity_commitment, action_domain) (NUL-001)
3313
+ * 5. Authority level in [1, 5]
3314
+ *
3315
+ * @param inputs - All public and private inputs for the circuit
3316
+ * @returns Proof bytes and public inputs as hex strings
3317
+ */
3318
+ async generateProof(inputs) {
3319
+ this.validateInputs(inputs);
2523
3320
  await this.init();
2524
- console.log("[NoirProver] Generating witness...");
3321
+ console.log("[TwoTreeNoirProver] Generating witness...");
2525
3322
  const witnessStart = Date.now();
2526
- const noirInputs = {
2527
- merkle_root: inputs.merkleRoot,
2528
- nullifier: inputs.nullifier,
2529
- authority_hash: inputs.authorityHash,
2530
- epoch_id: inputs.epochId,
2531
- campaign_id: inputs.campaignId,
2532
- leaf: inputs.leaf,
2533
- merkle_path: inputs.merklePath,
2534
- leaf_index: inputs.leafIndex,
2535
- user_secret: inputs.userSecret
2536
- };
3323
+ const noirInputs = this.formatInputs(inputs);
2537
3324
  const { witness } = await this.noir.execute(noirInputs);
2538
- console.log(`[NoirProver] Witness generated in ${Date.now() - witnessStart}ms`);
2539
- console.log("[NoirProver] Generating proof...");
3325
+ console.log(`[TwoTreeNoirProver] Witness generated in ${Date.now() - witnessStart}ms`);
3326
+ console.log("[TwoTreeNoirProver] Generating proof...");
2540
3327
  const proofStart = Date.now();
2541
3328
  const { proof, publicInputs } = await this.backend.generateProof(witness);
2542
- console.log(`[NoirProver] Proof generated in ${Date.now() - proofStart}ms`);
3329
+ console.log(`[TwoTreeNoirProver] Proof generated in ${Date.now() - proofStart}ms`);
3330
+ if (publicInputs.length !== TWO_TREE_PUBLIC_INPUT_COUNT) {
3331
+ throw new Error(
3332
+ `Unexpected public input count: expected ${TWO_TREE_PUBLIC_INPUT_COUNT}, got ${publicInputs.length}`
3333
+ );
3334
+ }
2543
3335
  return {
2544
3336
  proof,
2545
- publicInputs: {
2546
- merkleRoot: publicInputs[0] ?? inputs.merkleRoot,
2547
- nullifier: publicInputs[1] ?? inputs.nullifier,
2548
- authorityHash: publicInputs[2] ?? inputs.authorityHash,
2549
- epochId: publicInputs[3] ?? inputs.epochId,
2550
- campaignId: publicInputs[4] ?? inputs.campaignId
2551
- }
3337
+ publicInputs
2552
3338
  };
2553
3339
  }
3340
+ // ========================================================================
3341
+ // Proof Verification
3342
+ // ========================================================================
2554
3343
  /**
2555
- * Verify a proof
3344
+ * Verify a two-tree membership proof.
3345
+ *
3346
+ * BR5-006: Validates public input count before backend verification.
3347
+ * For full public input binding (matching proof outputs to expected values),
3348
+ * use verifyProofWithExpectedInputs().
3349
+ *
3350
+ * @param proofResult - The proof result from generateProof()
3351
+ * @returns true if the proof is valid
2556
3352
  */
2557
- async verify(proof, publicInputs) {
3353
+ async verifyProof(proofResult) {
3354
+ if (proofResult.publicInputs.length !== TWO_TREE_PUBLIC_INPUT_COUNT) {
3355
+ throw new Error(
3356
+ `BR5-006: Public input count mismatch: expected ${TWO_TREE_PUBLIC_INPUT_COUNT}, got ${proofResult.publicInputs.length}. Possible proof tampering.`
3357
+ );
3358
+ }
2558
3359
  await this.init();
2559
- return this.backend.verifyProof({ proof, publicInputs });
3360
+ return this.backend.verifyProof({
3361
+ proof: proofResult.proof,
3362
+ publicInputs: proofResult.publicInputs
3363
+ });
2560
3364
  }
2561
3365
  /**
2562
- * Clean up resources
3366
+ * Verify a proof AND validate that public inputs match expected values.
3367
+ *
3368
+ * BR5-006 FIX: The base verifyProof() only checks that the proof is
3369
+ * cryptographically valid for its public inputs. It does NOT verify that
3370
+ * those inputs match what the caller intended. An attacker could substitute
3371
+ * a valid proof generated for different inputs (e.g., different districts
3372
+ * or a different authority level).
3373
+ *
3374
+ * This method binds the proof to the caller's expected values by checking:
3375
+ * - user_root matches
3376
+ * - cell_map_root matches
3377
+ * - All 24 districts match
3378
+ * - nullifier matches
3379
+ * - action_domain matches
3380
+ * - authority_level matches
3381
+ *
3382
+ * @param proofResult - The proof from generateProof()
3383
+ * @param expectedInputs - The original inputs used for proof generation
3384
+ * @returns true if proof is valid AND public inputs match expectations
3385
+ */
3386
+ async verifyProofWithExpectedInputs(proofResult, expectedInputs) {
3387
+ const valid = await this.verifyProof(proofResult);
3388
+ if (!valid) return false;
3389
+ const pi = proofResult.publicInputs;
3390
+ const checks = [
3391
+ [0, expectedInputs.userRoot, "user_root"],
3392
+ [1, expectedInputs.cellMapRoot, "cell_map_root"],
3393
+ [26, expectedInputs.nullifier, "nullifier"],
3394
+ [27, expectedInputs.actionDomain, "action_domain"],
3395
+ [28, BigInt(expectedInputs.authorityLevel), "authority_level"]
3396
+ ];
3397
+ for (const [idx, expected, name] of checks) {
3398
+ const actual = parsePublicInput(pi[idx], `${name}[${idx}]`);
3399
+ if (actual !== expected) {
3400
+ throw new Error(
3401
+ `BR5-006: Public input mismatch at index ${idx} (${name}): expected ${expected}, got ${actual}`
3402
+ );
3403
+ }
3404
+ }
3405
+ for (let i = 0; i < DISTRICT_SLOT_COUNT; i++) {
3406
+ const actual = parsePublicInput(pi[2 + i], `district[${i}]`);
3407
+ const expected = expectedInputs.districts[i];
3408
+ if (actual !== expected) {
3409
+ throw new Error(
3410
+ `BR5-006: District mismatch at slot ${i} (public input index ${2 + i}): expected ${expected}, got ${actual}`
3411
+ );
3412
+ }
3413
+ }
3414
+ return true;
3415
+ }
3416
+ // ========================================================================
3417
+ // Lifecycle
3418
+ // ========================================================================
3419
+ /**
3420
+ * Clean up resources (WASM memory, web workers).
2563
3421
  */
2564
3422
  async destroy() {
2565
3423
  if (this.backend) {
@@ -2569,6 +3427,54 @@ class NoirProver {
2569
3427
  }
2570
3428
  }
2571
3429
  }
3430
+ const twoTreeProverInstances = /* @__PURE__ */ new Map();
3431
+ const twoTreeInitPromises = /* @__PURE__ */ new Map();
3432
+ async function getTwoTreeProverForDepth(depth = DEFAULT_CIRCUIT_DEPTH, config) {
3433
+ const existing = twoTreeProverInstances.get(depth);
3434
+ if (existing) {
3435
+ return existing;
3436
+ }
3437
+ const existingPromise = twoTreeInitPromises.get(depth);
3438
+ if (existingPromise) {
3439
+ return existingPromise;
3440
+ }
3441
+ let resolveInit;
3442
+ let rejectInit;
3443
+ const initPromise = new Promise((resolve, reject) => {
3444
+ resolveInit = resolve;
3445
+ rejectInit = reject;
3446
+ });
3447
+ twoTreeInitPromises.set(depth, initPromise);
3448
+ (async () => {
3449
+ try {
3450
+ const prover = new TwoTreeNoirProver({ ...config, depth });
3451
+ await prover.init();
3452
+ twoTreeProverInstances.set(depth, prover);
3453
+ twoTreeInitPromises.delete(depth);
3454
+ resolveInit(prover);
3455
+ } catch (err) {
3456
+ twoTreeInitPromises.delete(depth);
3457
+ rejectInit(err instanceof Error ? err : new Error(String(err)));
3458
+ }
3459
+ })();
3460
+ return initPromise;
3461
+ }
3462
+ async function resetTwoTreeProverSingleton() {
3463
+ const destroyPromises = Array.from(twoTreeProverInstances.values()).map(
3464
+ (prover) => prover.destroy()
3465
+ );
3466
+ await Promise.all(destroyPromises);
3467
+ twoTreeProverInstances.clear();
3468
+ twoTreeInitPromises.clear();
3469
+ }
3470
+ async function resetTwoTreeProverForDepth(depth) {
3471
+ const prover = twoTreeProverInstances.get(depth);
3472
+ if (prover) {
3473
+ await prover.destroy();
3474
+ twoTreeProverInstances.delete(depth);
3475
+ }
3476
+ twoTreeInitPromises.delete(depth);
3477
+ }
2572
3478
 
2573
3479
  function checkCrossOriginIsolation() {
2574
3480
  const isolated = typeof crossOriginIsolated !== "undefined" && crossOriginIsolated;
@@ -2624,4 +3530,4 @@ if (typeof globalThis !== "undefined" && !globalThis.Buffer) {
2624
3530
  globalThis.Buffer = buffer.Buffer;
2625
3531
  }
2626
3532
 
2627
- export { NoirProver, checkCrossOriginIsolation, requireCrossOriginIsolation };
3533
+ export { DEFAULT_CIRCUIT_DEPTH, DISTRICT_SLOT_COUNT, NoirProver, TWO_TREE_PUBLIC_INPUT_COUNT, TwoTreeNoirProver, checkCrossOriginIsolation, getProver, getProverForDepth, getTwoTreeProverForDepth, requireCrossOriginIsolation, resetProverForDepth, resetProverSingleton, resetTwoTreeProverForDepth, resetTwoTreeProverSingleton, validateAuthorityLevel };