diff --git a/poly-commit/Cargo.toml b/poly-commit/Cargo.toml index 49b48083..eb589434 100644 --- a/poly-commit/Cargo.toml +++ b/poly-commit/Cargo.toml @@ -20,6 +20,7 @@ rand = { version = "0.8.0", optional = true } ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } hashbrown = { version = "0.14", default-features = false, optional = true } +rand_chacha = { version = "0.3.0", default-features = false } digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } @@ -56,7 +57,6 @@ ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "cu ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bn254 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } -rand_chacha = { version = "0.3.0", default-features = false } ark-pcs-bench-templates = { path = "../bench-templates" } [target.'cfg(target_arch = "aarch64")'.dependencies] diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index 91181311..ddb514f5 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -44,6 +44,8 @@ use utils::{calculate_t, get_indices_from_sponge}; const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters"; +const CHACHA_SEED_BYTES: usize = 256 / 8; + /// For linear code PC schemes, the universal parameters, committer key /// and verifier key are all the same. This trait abstracts the common /// information contained in these. diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index d4cacc4f..bc31ddcb 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -1,13 +1,20 @@ -use crate::{utils::ceil_div, Error}; +use core::convert::TryInto; + +#[cfg(test)] +use crate::utils::ceil_div; +use crate::Error; use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::{FftField, Field, PrimeField}; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::string::ToString; -use ark_std::vec::Vec; +use ark_std::{collections::BTreeSet, string::ToString, vec::Vec}; +use ark_std::rand::{Rng, SeedableRng}; #[cfg(all(not(feature = "std"), target_arch = "aarch64"))] use num_traits::Float; +use rand_chacha::ChaChaRng; + +use super::CHACHA_SEED_BYTES; #[cfg(test)] use { @@ -127,29 +134,30 @@ impl SprsMat { } #[inline] +#[cfg(test)] pub(crate) fn get_num_bytes(n: usize) -> usize { ceil_div((usize::BITS - n.leading_zeros()) as usize, 8) } /// Generate `t` (not necessarily distinct) random points in `[0, n)` -/// using the current state of the `transcript`. +/// using the current state of the `transcript`. Duplicates are removed (leading +/// to possibly fewer than `t` points being returned). pub(crate) fn get_indices_from_sponge( n: usize, t: usize, sponge: &mut S, ) -> Result, Error> { - let bytes_to_squeeze = get_num_bytes(n); - let mut indices = Vec::with_capacity(t); - for _ in 0..t { - let bytes = sponge.squeeze_bytes(bytes_to_squeeze); - sponge.absorb(&bytes); - - // get the usize from Vec: - let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize); - // modulo the number of columns in the encoded matrix - indices.push(ind % n); - } - Ok(indices) + // Squeeze 256 bits from the sponge and use them to seed a ChaCha20 PRNG + let seed = sponge.squeeze_bytes(CHACHA_SEED_BYTES); + let mut rng = ChaChaRng::from_seed(seed.try_into().unwrap()); + + // Squeeze t elements, then removing duplicates. Crucially, this must be + // done deterministically to ensure prover-verifier consistency. + let mut seen = BTreeSet::new(); + Ok((0..t) + .map(|_| rng.gen_range(0..n)) + .filter(|x| seen.insert(*x)) + .collect()) } #[inline]