Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion poly-commit/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ rand = { version = "0.8.0", optional = true }
ark-relations = { version = "^0.4.0", default-features = false, optional = true }
ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true }
hashbrown = { version = "0.14", default-features = false, optional = true }
rand_chacha = { version = "0.3.0", default-features = false }

digest = "0.10"
derivative = { version = "2", features = [ "use_core" ] }
Expand Down Expand Up @@ -56,7 +57,6 @@ ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "cu
ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] }
ark-bn254 = { version = "^0.4.0", default-features = false, features = [ "curve" ] }

rand_chacha = { version = "0.3.0", default-features = false }
ark-pcs-bench-templates = { path = "../bench-templates" }

[target.'cfg(target_arch = "aarch64")'.dependencies]
Expand Down
2 changes: 2 additions & 0 deletions poly-commit/src/linear_codes/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ use utils::{calculate_t, get_indices_from_sponge};

const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters";

const CHACHA_SEED_BYTES: usize = 256 / 8;

/// For linear code PC schemes, the universal parameters, committer key
/// and verifier key are all the same. This trait abstracts the common
/// information contained in these.
Expand Down
40 changes: 24 additions & 16 deletions poly-commit/src/linear_codes/utils.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,20 @@
use crate::{utils::ceil_div, Error};
use core::convert::TryInto;

#[cfg(test)]
use crate::utils::ceil_div;
use crate::Error;
use ark_crypto_primitives::sponge::CryptographicSponge;
use ark_ff::{FftField, Field, PrimeField};
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::string::ToString;
use ark_std::vec::Vec;
use ark_std::{collections::BTreeSet, string::ToString, vec::Vec};

use ark_std::rand::{Rng, SeedableRng};
#[cfg(all(not(feature = "std"), target_arch = "aarch64"))]
use num_traits::Float;
use rand_chacha::ChaChaRng;

use super::CHACHA_SEED_BYTES;

#[cfg(test)]
use {
Expand Down Expand Up @@ -127,29 +134,30 @@ impl<F: Field> SprsMat<F> {
}

#[inline]
#[cfg(test)]
pub(crate) fn get_num_bytes(n: usize) -> usize {
ceil_div((usize::BITS - n.leading_zeros()) as usize, 8)
}

/// Generate `t` (not necessarily distinct) random points in `[0, n)`
/// using the current state of the `transcript`.
/// using the current state of the `transcript`. Duplicates are removed (leading
/// to possibly fewer than `t` points being returned).
pub(crate) fn get_indices_from_sponge<S: CryptographicSponge>(
n: usize,
t: usize,
sponge: &mut S,
) -> Result<Vec<usize>, Error> {
let bytes_to_squeeze = get_num_bytes(n);
let mut indices = Vec::with_capacity(t);
for _ in 0..t {
let bytes = sponge.squeeze_bytes(bytes_to_squeeze);
sponge.absorb(&bytes);

// get the usize from Vec<u8>:
let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize);
// modulo the number of columns in the encoded matrix
indices.push(ind % n);
}
Ok(indices)
// Squeeze 256 bits from the sponge and use them to seed a ChaCha20 PRNG
let seed = sponge.squeeze_bytes(CHACHA_SEED_BYTES);
let mut rng = ChaChaRng::from_seed(seed.try_into().unwrap());

// Squeeze t elements, then removing duplicates. Crucially, this must be
// done deterministically to ensure prover-verifier consistency.
let mut seen = BTreeSet::new();
Ok((0..t)
.map(|_| rng.gen_range(0..n))
.filter(|x| seen.insert(*x))
.collect())
}

#[inline]
Expand Down