diff --git a/storage-proofs-update/src/challenges.rs b/storage-proofs-update/src/challenges.rs index 0b52a10cf..5e8b65373 100644 --- a/storage-proofs-update/src/challenges.rs +++ b/storage-proofs-update/src/challenges.rs @@ -61,6 +61,28 @@ impl Challenges { challenges_remaining: challenge_count, } } + + pub fn new_poseidon(sector_nodes: usize, comm_r_new: TreeRDomain) -> Self { + let repeats = partition_count(sector_nodes); + + let challenge_bit_len = sector_nodes.trailing_zeros() as usize; + let random_bits_per_challenge = challenge_bit_len; + let challenges_per_digest = Fr::CAPACITY as usize / random_bits_per_challenge; + + let challenge_count = challenge_count(sector_nodes) * repeats; + let digest_index_all_partitions = 0; + + Challenges { + comm_r_new, + partition_bits: 0, + random_bits_per_challenge, + challenges_per_digest, + digest_index_all_partitions, + i: 0, + digest_bits: Vec::with_capacity(Fr::NUM_BITS as usize), + challenges_remaining: challenge_count, + } + } } impl Iterator for Challenges { diff --git a/storage-proofs-update/src/circuit_poseidon.rs b/storage-proofs-update/src/circuit_poseidon.rs new file mode 100644 index 000000000..0f6736ea3 --- /dev/null +++ b/storage-proofs-update/src/circuit_poseidon.rs @@ -0,0 +1,664 @@ +use std::marker::PhantomData; + +use bellperson::{ + gadgets::{boolean::AllocatedBit, num::AllocatedNum}, + Circuit, ConstraintSystem, LinearCombination, SynthesisError, +}; +use blstrs::Scalar as Fr; +use ff::{Field, PrimeFieldBits}; +use filecoin_hashers::{HashFunction, Hasher}; +use generic_array::typenum::Unsigned; +use neptune::circuit::poseidon_hash; +use serde::{Deserialize, Serialize}; +use storage_proofs_core::{ + compound_proof::CircuitComponent, + gadgets::por::por_no_challenge_input, + merkle::{MerkleProof, MerkleProofTrait, MerkleTreeTrait}, +}; + +use crate::{ + constants::{ + challenge_count, hs, partition_count, validate_tree_r_shape, TreeRDomain, TreeRHasher, + POSEIDON_CONSTANTS_GEN_RANDOMNESS, + }, + gadgets::{gen_challenge_bits, get_challenge_high_bits, label_r_new}, + PublicParams, +}; + +// The public inputs for `EmptySectorUpdateCircuit`. +#[derive(Clone)] +pub struct PublicInputs { + // `h_select` chooses the number of encoding hashes.` + pub h_select: Option, + // The SDR-PoRep CommR corresponding to the replica prior to updating the sector data. + pub comm_r_old: Option, + // The root of TreeDNew but with TreeR shape. + pub comm_d_new: Option, + // A commitment to the `EmptySectorUpdate` encoding of the updated sector data. + pub comm_r_new: Option, +} + +impl PublicInputs { + pub fn new( + sector_nodes: usize, + h: usize, + comm_r_old: TreeRDomain, + comm_d_new: TreeRDomain, + comm_r_new: TreeRDomain, + ) -> Self { + let hs_index = hs(sector_nodes) + .iter() + .position(|h_allowed| *h_allowed == h) + .expect("invalid `h` for sector-size"); + + let h_select = 1u64 << hs_index; + + PublicInputs { + h_select: Some(Fr::from(h_select)), + comm_r_old: Some(comm_r_old.into()), + comm_d_new: Some(comm_d_new.into()), + comm_r_new: Some(comm_r_new.into()), + } + } + + // Public-inputs used during Groth16 parameter generation. + pub fn empty() -> Self { + PublicInputs { + h_select: None, + comm_r_old: None, + comm_d_new: None, + comm_r_new: None, + } + } + + // The ordered vector used to verify a Groth16 proof. + pub fn to_vec(&self) -> Vec { + vec![ + self.h_select.unwrap(), + self.comm_r_old.unwrap(), + self.comm_d_new.unwrap(), + self.comm_r_new.unwrap(), + ] + } +} + +#[derive(Serialize, Deserialize)] +pub struct ChallengeProofVanilla +where + TreeR: MerkleTreeTrait, +{ + #[serde(bound( + serialize = "MerkleProof: Serialize", + deserialize = "MerkleProof: Deserialize<'de>" + ))] + pub proof_r_old: + MerkleProof, + #[serde(bound( + serialize = "MerkleProof: Serialize", + deserialize = "MerkleProof: Deserialize<'de>" + ))] + pub proof_d_new: + MerkleProof, + #[serde(bound( + serialize = "MerkleProof: Serialize", + deserialize = "MerkleProof: Deserialize<'de>" + ))] + pub proof_r_new: + MerkleProof, +} + +// Implement `Clone` by hand because `MerkleTreeTrait` does not implement `Clone`. +impl Clone for ChallengeProofVanilla +where + TreeR: MerkleTreeTrait, +{ + fn clone(&self) -> Self { + ChallengeProofVanilla { + proof_r_old: self.proof_r_old.clone(), + proof_d_new: self.proof_d_new.clone(), + proof_r_new: self.proof_r_new.clone(), + } + } +} + +pub struct ChallengeProof +where + TreeR: MerkleTreeTrait, +{ + pub leaf_r_old: Option, + pub path_r_old: Vec>>, + pub leaf_d_new: Option, + pub path_d_new: Vec>>, + pub leaf_r_new: Option, + pub path_r_new: Vec>>, + pub _tree_r: PhantomData, +} + +// Implement `Clone` by hand because `MerkleTreeTrait` does not implement `Clone`. +impl Clone for ChallengeProof +where + TreeR: MerkleTreeTrait, +{ + fn clone(&self) -> Self { + ChallengeProof { + leaf_r_old: self.leaf_r_old, + path_r_old: self.path_r_old.clone(), + leaf_d_new: self.leaf_d_new, + path_d_new: self.path_d_new.clone(), + leaf_r_new: self.leaf_r_new, + path_r_new: self.path_r_new.clone(), + _tree_r: PhantomData, + } + } +} + +impl From> for ChallengeProof +where + TreeR: MerkleTreeTrait, +{ + fn from(vanilla_challenge_proof: ChallengeProofVanilla) -> Self { + let ChallengeProofVanilla { + proof_r_old, + proof_d_new, + proof_r_new, + } = vanilla_challenge_proof; + ChallengeProof::from_merkle_proofs(proof_r_old, proof_d_new, proof_r_new) + } +} + +impl ChallengeProof +where + TreeR: MerkleTreeTrait, +{ + pub fn from_merkle_proofs( + proof_r_old: MerkleProof< + TreeRHasher, + TreeR::Arity, + TreeR::SubTreeArity, + TreeR::TopTreeArity, + >, + proof_d_new: MerkleProof< + TreeRHasher, + TreeR::Arity, + TreeR::SubTreeArity, + TreeR::TopTreeArity, + >, + proof_r_new: MerkleProof< + TreeRHasher, + TreeR::Arity, + TreeR::SubTreeArity, + TreeR::TopTreeArity, + >, + ) -> Self { + let leaf_r_old = Some(proof_r_old.leaf().into()); + let path_r_old: Vec>> = proof_r_old + .path() + .iter() + .map(|(siblings, _insert)| siblings.iter().map(|&s| Some(s.into())).collect()) + .collect(); + + let leaf_d_new = Some(proof_d_new.leaf().into()); + let path_d_new: Vec>> = proof_d_new + .path() + .iter() + .map(|(siblings, _insert)| siblings.iter().map(|&s| Some(s.into())).collect()) + .collect(); + + let leaf_r_new = Some(proof_r_new.leaf().into()); + let path_r_new: Vec>> = proof_r_new + .path() + .iter() + .map(|(siblings, _insert)| siblings.iter().map(|&s| Some(s.into())).collect()) + .collect(); + + ChallengeProof { + leaf_r_old, + path_r_old, + leaf_d_new, + path_d_new, + leaf_r_new, + path_r_new, + _tree_r: PhantomData, + } + } + + pub fn empty(sector_nodes: usize) -> Self { + let challenge_bit_len = sector_nodes.trailing_zeros() as usize; + + // TreeROld and TreeRNew and TreeD have the same shape, thus have the same Merkle path length. + let path_r = { + let base_arity = TreeR::Arity::to_usize(); + let sub_arity = TreeR::SubTreeArity::to_usize(); + let top_arity = TreeR::TopTreeArity::to_usize(); + + let mut bits_remaining = challenge_bit_len; + let mut sub_and_top_path = vec![]; + + if sub_arity > 0 { + sub_and_top_path.push(vec![None; sub_arity - 1]); + bits_remaining -= sub_arity.trailing_zeros() as usize; + }; + + if top_arity > 0 { + sub_and_top_path.push(vec![None; top_arity - 1]); + bits_remaining -= top_arity.trailing_zeros() as usize; + }; + + let base_path_len = bits_remaining / base_arity.trailing_zeros() as usize; + let base_path = vec![vec![None; base_arity - 1]; base_path_len]; + + [base_path, sub_and_top_path].concat() + }; + + ChallengeProof { + leaf_r_old: None, + path_r_old: path_r.clone(), + leaf_d_new: None, + path_d_new: path_r.clone(), + leaf_r_new: None, + path_r_new: path_r, + _tree_r: PhantomData, + } + } +} + +#[derive(Clone)] +pub struct PrivateInputs +where + TreeR: MerkleTreeTrait, +{ + // CommC created by running SDR-PoRep on the old/un-updated data. + pub comm_c: Option, + // Root of the replica tree (called TreeR or TreeRLast) output by SDR-PoRep on the + // old/un-updated data (here called TreeROld). + pub root_r_old: Option, + // Root of the replica tree build over the new/updated data's replica (TreeRNew). + pub root_r_new: Option, + // Generate three Merkle proofs (TreeROld, TreeDNew, TreeRNew) for each of this partition's + // challenges. + pub challenge_proofs: Vec>, +} + +impl PrivateInputs +where + TreeR: MerkleTreeTrait, +{ + pub fn new(comm_c: TreeRDomain, challenge_proofs: &[ChallengeProofVanilla]) -> Self { + let root_r_old: Fr = challenge_proofs[0].proof_r_old.root().into(); + let root_r_new: Fr = challenge_proofs[0].proof_r_new.root().into(); + + let challenge_proofs: Vec> = challenge_proofs + .iter() + .cloned() + .map(ChallengeProof::from) + .collect(); + + PrivateInputs { + comm_c: Some(comm_c.into()), + root_r_old: Some(root_r_old), + root_r_new: Some(root_r_new), + challenge_proofs, + } + } + + pub fn empty(sector_nodes: usize) -> Self { + let challenge_count = challenge_count(sector_nodes) * partition_count(sector_nodes); + PrivateInputs { + comm_c: None, + root_r_old: None, + root_r_new: None, + challenge_proofs: vec![ChallengeProof::empty(sector_nodes); challenge_count], + } + } +} + +pub struct EmptySectorUpdateCircuit +where + TreeR: MerkleTreeTrait, +{ + pub pub_params: PublicParams, + pub pub_inputs: PublicInputs, + pub priv_inputs: PrivateInputs, +} + +impl CircuitComponent for EmptySectorUpdateCircuit +where + TreeR: MerkleTreeTrait, +{ + type ComponentPrivateInputs = (); +} + +impl EmptySectorUpdateCircuit +where + TreeR: MerkleTreeTrait, +{ + pub fn blank(pub_params: PublicParams) -> Self { + let pub_inputs = PublicInputs::empty(); + let priv_inputs = PrivateInputs::::empty(pub_params.sector_nodes); + EmptySectorUpdateCircuit { + pub_params, + pub_inputs, + priv_inputs, + } + } +} + +impl Circuit for EmptySectorUpdateCircuit +where + TreeR: MerkleTreeTrait, +{ + fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { + #[allow(unused_variables)] + let EmptySectorUpdateCircuit { + pub_params: + PublicParams { + sector_nodes, + challenge_count, + challenge_bit_len, + partition_count, + partition_bit_len, + apex_leaf_count, + apex_leaf_bit_len, + }, + pub_inputs: + PublicInputs { + h_select, + comm_r_old, + comm_d_new, + comm_r_new, + }, + priv_inputs: + PrivateInputs { + comm_c, + root_r_old, + root_r_new, + + challenge_proofs, + }, + } = self; + + validate_tree_r_shape::(sector_nodes); + let hs = hs(sector_nodes); + let h_select_bit_len = hs.len(); + + if let Some(h_select) = h_select { + let bits: Vec = h_select.to_le_bits().into_iter().collect(); + + // `h_select` should have exactly one bit set. + let h_select_bits = &bits[..h_select_bit_len]; + assert_eq!( + h_select_bits.iter().filter(|bit| **bit).count(), + 1, + "h_select does not have exactly one bit set" + ); + // The remanining bits should be zero. + assert!(bits[h_select_bit_len..].iter().all(|bit| !*bit)); + } + + assert_eq!(challenge_proofs.len(), challenge_count * partition_count); + + // Allocate public-inputs. + + // Add a public-input `h_select`. + let h_select = AllocatedNum::alloc(cs.namespace(|| "h_select"), || { + h_select.ok_or(SynthesisError::AssignmentMissing) + })?; + h_select.inputize(cs.namespace(|| "h_select (public input)"))?; + + // Split `k_and_h_select` into partition and h-select bits. + let h_select_bits = { + let bit_len = h_select_bit_len; + + let bits: Vec> = if let Some(h_select) = h_select.get_value() { + h_select + .to_le_bits() + .into_iter() + .take(bit_len) + .map(Some) + .collect() + } else { + vec![None; bit_len] + }; + + let h_select_bits = bits + .into_iter() + .enumerate() + .map(|(i, bit)| { + AllocatedBit::alloc(cs.namespace(|| format!("h_select_bit_{}", i)), bit) + }) + .collect::, SynthesisError>>()?; + + let mut lc = LinearCombination::::zero(); + let mut pow2 = Fr::one(); + for bit in h_select_bits.iter() { + lc = lc + (pow2, bit.get_variable()); + pow2 = pow2.double(); + } + cs.enforce( + || "h_select binary decomp", + |_| lc, + |lc| lc + CS::one(), + |lc| lc + h_select.get_variable(), + ); + + h_select_bits + }; + + let comm_r_old = AllocatedNum::alloc(cs.namespace(|| "comm_r_old"), || { + comm_r_old.ok_or(SynthesisError::AssignmentMissing) + })?; + comm_r_old.inputize(cs.namespace(|| "comm_r_old_input"))?; + + let comm_d_new = AllocatedNum::alloc(cs.namespace(|| "comm_d_new"), || { + comm_d_new.ok_or(SynthesisError::AssignmentMissing) + })?; + comm_d_new.inputize(cs.namespace(|| "comm_d_new_input"))?; + + let comm_r_new = AllocatedNum::alloc(cs.namespace(|| "comm_r_new"), || { + comm_r_new.ok_or(SynthesisError::AssignmentMissing) + })?; + comm_r_new.inputize(cs.namespace(|| "comm_r_new_input"))?; + + // Compute `phi = H(comm_d_new || comm_r_old)` from public-inputs. + let phi = poseidon_hash( + cs.namespace(|| "phi"), + vec![comm_d_new.clone(), comm_r_old.clone()], + &POSEIDON_CONSTANTS_GEN_RANDOMNESS, + )?; + + // Allocate private-inputs; excludes each challenge's Merkle proofs. + + let comm_c = AllocatedNum::alloc(cs.namespace(|| "comm_c"), || { + comm_c.ok_or(SynthesisError::AssignmentMissing) + })?; + + let root_r_old = AllocatedNum::alloc(cs.namespace(|| "root_r_old"), || { + root_r_old.ok_or(SynthesisError::AssignmentMissing) + })?; + + let root_r_new = AllocatedNum::alloc(cs.namespace(|| "root_r_new"), || { + root_r_new.ok_or(SynthesisError::AssignmentMissing) + })?; + + // Assert that the witnessed `root_r_old` and `root_r_new` are consistent with the + // public `comm_r_old` and `comm_r_new` via `comm_r = H(comm_c || root_r)`. + let comm_r_old_calc = ::Function::hash2_circuit( + cs.namespace(|| "comm_r_old_calc"), + &comm_c, + &root_r_old, + )?; + cs.enforce( + || "enforce comm_r_old_calc == comm_r_old", + |lc| lc + comm_r_old_calc.get_variable(), + |lc| lc + CS::one(), + |lc| lc + comm_r_old.get_variable(), + ); + let comm_r_new_calc = ::Function::hash2_circuit( + cs.namespace(|| "comm_r_new_calc"), + &comm_c, + &root_r_new, + )?; + cs.enforce( + || "enforce comm_r_new_calc == comm_r_new", + |lc| lc + comm_r_new_calc.get_variable(), + |lc| lc + CS::one(), + |lc| lc + comm_r_new.get_variable(), + ); + + let partition = + AllocatedNum::alloc(cs.namespace(|| "gen_challenge_bits parition zero"), || { + Ok(Fr::zero()) + })?; + // Generate `challenge_bit_len` number of random bits for each challenge. + // For each challenge generate a random index in `0..number of leafs per partition`; we + // append the partition-index's bits onto the random bits generated for each challenge + // producing a challenge in `0..sector_nodes`. + let generated_bits = gen_challenge_bits( + cs.namespace(|| "gen_challenge_bits"), + &comm_r_new, + &partition, + challenge_count * partition_count, + challenge_bit_len, + )?; + + for (c_index, c_bits) in generated_bits.into_iter().enumerate() { + // Compute this challenge's `rho`. + let c_high = get_challenge_high_bits( + cs.namespace(|| format!("get_challenge_high_bits (c_index={})", c_index)), + &c_bits, + &h_select_bits, + &hs, + )?; + let rho = poseidon_hash( + cs.namespace(|| format!("rho (c_index={})", c_index)), + vec![phi.clone(), c_high.clone()], + &POSEIDON_CONSTANTS_GEN_RANDOMNESS, + )?; + + // Validate this challenge's Merkle proofs. + let challenge_proof = &challenge_proofs[c_index]; + + let leaf_r_old = AllocatedNum::alloc( + cs.namespace(|| format!("leaf_r_old (c_index={})", c_index)), + || { + challenge_proof + .leaf_r_old + .ok_or(SynthesisError::AssignmentMissing) + }, + )?; + + let leaf_d_new = AllocatedNum::alloc( + cs.namespace(|| format!("leaf_d_new (c_index={})", c_index)), + || { + challenge_proof + .leaf_d_new + .ok_or(SynthesisError::AssignmentMissing) + }, + )?; + + let leaf_r_new = label_r_new( + cs.namespace(|| format!("leaf_r_new (c_index={})", c_index)), + &leaf_r_old, + &leaf_d_new, + &rho, + )?; + + // Sanity check that the calculated `leaf_r_new` agrees with the provided value. + if let Some(leaf_r_new) = leaf_r_new.get_value() { + assert_eq!(leaf_r_new, challenge_proof.leaf_r_new.unwrap()); + } + + let path_r_old = challenge_proof.path_r_old + .iter() + .enumerate() + .map(|(tree_row, siblings)| { + siblings + .iter() + .enumerate() + .map(|(sibling_index, sibling)| { + AllocatedNum::alloc( + cs.namespace(|| format!( + "path_r_old sibling (c_index={}, tree_row={}, sibling_index={})", + c_index, + tree_row, + sibling_index, + )), + || sibling.ok_or(SynthesisError::AssignmentMissing), + ) + }) + .collect::>, SynthesisError>>() + }) + .collect::>>, SynthesisError>>()?; + + por_no_challenge_input::( + cs.namespace(|| format!("por tree_r_old (c_index={})", c_index)), + c_bits.clone(), + leaf_r_old.clone(), + path_r_old, + root_r_old.clone(), + )?; + + let path_r_new = challenge_proof.path_r_new + .iter() + .enumerate() + .map(|(tree_row, siblings)| { + siblings + .iter() + .enumerate() + .map(|(sibling_index, sibling)| { + AllocatedNum::alloc( + cs.namespace(|| format!( + "path_r_new sibling (c_index={}, tree_row={}, sibling_index={})", + c_index, + tree_row, + sibling_index, + )), + || sibling.ok_or(SynthesisError::AssignmentMissing), + ) + }) + .collect::>, SynthesisError>>() + }) + .collect::>>, SynthesisError>>()?; + + por_no_challenge_input::( + cs.namespace(|| format!("por tree_r_new (c_index={})", c_index)), + c_bits.clone(), + leaf_r_new.clone(), + path_r_new, + root_r_new.clone(), + )?; + + let path_d_new = challenge_proof.path_d_new + .iter() + .enumerate() + .map(|(tree_row, siblings)| { + siblings + .iter() + .enumerate() + .map(|(sibling_index, sibling)| { + AllocatedNum::alloc( + cs.namespace(|| format!( + "path_d_new sibling (c_index={}, tree_row={}, sibling_index={})", + c_index, + tree_row, + sibling_index, + )), + || sibling.ok_or(SynthesisError::AssignmentMissing), + ) + }) + .collect::>, SynthesisError>>() + }) + .collect::>>, SynthesisError>>()?; + + por_no_challenge_input::( + cs.namespace(|| format!("por tree_d_new (c_index={})", c_index)), + c_bits.clone(), + leaf_d_new.clone(), + path_d_new, + comm_d_new.clone(), + )?; + } + + Ok(()) + } +} diff --git a/storage-proofs-update/src/lib.rs b/storage-proofs-update/src/lib.rs index 12791032b..757cdba2f 100644 --- a/storage-proofs-update/src/lib.rs +++ b/storage-proofs-update/src/lib.rs @@ -5,6 +5,7 @@ pub(crate) mod gadgets; pub mod vanilla; mod challenges; +pub mod circuit_poseidon; pub use self::challenges::Challenges; pub use self::circuit::EmptySectorUpdateCircuit; diff --git a/storage-proofs-update/src/vanilla.rs b/storage-proofs-update/src/vanilla.rs index e533bb618..c3768f0e2 100644 --- a/storage-proofs-update/src/vanilla.rs +++ b/storage-proofs-update/src/vanilla.rs @@ -6,7 +6,7 @@ use std::path::{Path, PathBuf}; use anyhow::{ensure, Context, Error}; use blstrs::Scalar as Fr; use ff::Field; -use filecoin_hashers::{HashFunction, Hasher}; +use filecoin_hashers::{Domain, HashFunction, Hasher}; use fr32::{bytes_into_fr, fr_into_bytes_slice}; use generic_array::typenum::Unsigned; use log::{info, trace}; @@ -474,7 +474,7 @@ where // `phi = H(comm_d_new, comm_r_old)` where Poseidon uses the custom "gen randomness" domain // separation tag. #[inline] -pub fn phi(comm_d_new: &TreeDDomain, comm_r_old: &TreeRDomain) -> TreeRDomain { +pub fn phi(comm_d_new: &TreeD, comm_r_old: &TreeRDomain) -> TreeRDomain { let comm_d_new: Fr = (*comm_d_new).into(); let comm_r_old: Fr = (*comm_r_old).into(); Poseidon::new_with_preimage( diff --git a/storage-proofs-update/tests/circuit.rs b/storage-proofs-update/tests/circuit.rs index 4d9b29b37..350a29f8a 100644 --- a/storage-proofs-update/tests/circuit.rs +++ b/storage-proofs-update/tests/circuit.rs @@ -2,6 +2,7 @@ #![allow(dead_code)] use std::path::Path; +use bellperson::util_cs::bench_cs::BenchCS; use bellperson::{util_cs::test_cs::TestConstraintSystem, Circuit}; use blstrs::Scalar as Fr; use filecoin_hashers::{Domain, HashFunction, Hasher}; @@ -19,105 +20,13 @@ use storage_proofs_update::{ constants::{ apex_leaf_count, hs, partition_count, validate_tree_r_shape, TreeD, TreeDDomain, TreeRDomain, TreeRHasher, SECTOR_SIZE_16_KIB, SECTOR_SIZE_1_KIB, SECTOR_SIZE_2_KIB, - SECTOR_SIZE_32_KIB, SECTOR_SIZE_4_KIB, SECTOR_SIZE_8_KIB, + SECTOR_SIZE_32_GIB, SECTOR_SIZE_32_KIB, SECTOR_SIZE_4_KIB, SECTOR_SIZE_8_KIB, }, phi, rho, vanilla, Challenges, EmptySectorUpdateCircuit, PublicParams, }; use tempfile::tempdir; -// Selects a value for `h` via `h = hs[log2(h_select)]`; default to taking `h = hs[2]`. -const H_SELECT: u64 = 1 << 2; - -fn create_tree( - labels: &[<::Hasher as Hasher>::Domain], - tmp_path: &Path, - tree_name: &str, -) -> MerkleTreeWrapper -{ - let sector_nodes = labels.len(); - let base_arity = Tree::Arity::to_usize(); - let sub_arity = Tree::SubTreeArity::to_usize(); - let top_arity = Tree::TopTreeArity::to_usize(); - - // Create a single base-tree, a single sub-tree (out of base-trees), or a single top-tree - // (out of sub-trees, each made of base-trees). - if sub_arity == 0 && top_arity == 0 { - let config = StoreConfig::new( - tmp_path, - tree_name.to_string(), - default_rows_to_discard(sector_nodes, base_arity), - ); - let leafs = labels.iter().copied().map(Ok); - MerkleTreeWrapper::try_from_iter_with_config(leafs, config) - .unwrap_or_else(|_| panic!("failed to create non-compound-tree {}", tree_name)) - } else if top_arity == 0 { - let base_tree_count = sub_arity; - let leafs_per_base_tree = sector_nodes / base_tree_count; - let rows_to_discard = default_rows_to_discard(leafs_per_base_tree, base_arity); - let base_trees: Vec> = (0 - ..base_tree_count) - .map(|i| { - let config = StoreConfig::new( - tmp_path, - format!("{}-base-{}", tree_name, i), - rows_to_discard, - ); - let leafs = labels[i * leafs_per_base_tree..(i + 1) * leafs_per_base_tree] - .iter() - .copied() - .map(Ok); - MerkleTreeWrapper::try_from_iter_with_config(leafs, config) - .unwrap_or_else(|_| panic!("failed to create {} base-tree {}", tree_name, i)) - }) - .collect(); - MerkleTreeWrapper::from_trees(base_trees) - .unwrap_or_else(|_| panic!("failed to create {} from base-trees", tree_name)) - } else { - let base_tree_count = top_arity * sub_arity; - let sub_tree_count = top_arity; - let leafs_per_base_tree = sector_nodes / base_tree_count; - let base_trees_per_sub_tree = sub_arity; - let rows_to_discard = default_rows_to_discard(leafs_per_base_tree, base_arity); - let sub_trees: Vec< - MerkleTreeWrapper, - > = (0..sub_tree_count) - .map(|sub_index| { - let first_sub_leaf = sub_index * base_trees_per_sub_tree * leafs_per_base_tree; - let base_trees: Vec> = (0 - ..base_trees_per_sub_tree) - .map(|base_index| { - let config = StoreConfig::new( - tmp_path, - format!("{}-sub-{}-base-{}", tree_name, sub_index, base_index), - rows_to_discard, - ); - let first_base_leaf = first_sub_leaf + base_index * leafs_per_base_tree; - let leafs = labels[first_base_leaf..first_base_leaf + leafs_per_base_tree] - .iter() - .copied() - .map(Ok); - MerkleTreeWrapper::try_from_iter_with_config(leafs, config).unwrap_or_else( - |_| { - panic!( - "failed to create {} sub-tree {} base-tree {}", - tree_name, sub_index, base_index, - ) - }, - ) - }) - .collect(); - MerkleTreeWrapper::from_trees(base_trees).unwrap_or_else(|_| { - panic!( - "failed to create {} sub-tree {} from base-trees", - tree_name, sub_index, - ) - }) - }) - .collect(); - MerkleTreeWrapper::from_sub_trees(sub_trees) - .unwrap_or_else(|_| panic!("failed to create {} from sub-trees", tree_name)) - } -} +mod common; fn get_apex_leafs( tree_d_new: &MerkleTreeWrapper< @@ -153,33 +62,6 @@ fn get_apex_leafs( }) } -fn encode_new_replica( - labels_r_old: &[TreeRDomain], - labels_d_new: &[TreeDDomain], - phi: &TreeRDomain, - h: usize, -) -> Vec { - let sector_nodes = labels_r_old.len(); - assert_eq!(sector_nodes, labels_d_new.len()); - - // Right-shift each node-index by `get_high_bits_shr` to get its `h` high bits. - let node_index_bit_len = sector_nodes.trailing_zeros() as usize; - let get_high_bits_shr = node_index_bit_len - h; - - (0..sector_nodes) - .map(|node| { - // Take the `h` high bits from the node-index and compute this node's compute `rho`. - let high = (node >> get_high_bits_shr) as u32; - let rho = rho(phi, high); - - // `label_r_new = label_r_old + label_d_new * rho` - let label_r_old: Fr = labels_r_old[node].into(); - let label_d_new: Fr = labels_d_new[node].into(); - (label_r_old + label_d_new * rho).into() - }) - .collect() -} - fn test_empty_sector_update_circuit(sector_nodes: usize, constraints_expected: usize) where TreeR: MerkleTreeTrait, @@ -188,7 +70,7 @@ where let sector_bytes = sector_nodes << 5; let hs = hs(sector_nodes); - let h = hs[H_SELECT.trailing_zeros() as usize]; + let h = hs[common::H_SELECT.trailing_zeros() as usize]; let mut rng = XorShiftRng::from_seed(TEST_SEED); @@ -200,7 +82,7 @@ where let labels_r_old: Vec = (0..sector_nodes) .map(|_| TreeRDomain::random(&mut rng)) .collect(); - let tree_r_old = create_tree::(&labels_r_old, tmp_path, "tree-r-old"); + let tree_r_old = common::create_tree::(&labels_r_old, tmp_path, "tree-r-old"); let root_r_old = tree_r_old.root(); let comm_c = TreeRDomain::random(&mut rng); let comm_r_old = ::Function::hash2(&comm_c, &root_r_old); @@ -209,15 +91,15 @@ where let labels_d_new: Vec = (0..sector_nodes) .map(|_| TreeDDomain::random(&mut rng)) .collect(); - let tree_d_new = create_tree::(&labels_d_new, tmp_path, "tree-d-new"); + let tree_d_new = common::create_tree::(&labels_d_new, tmp_path, "tree-d-new"); let comm_d_new = tree_d_new.root(); // `phi = H(comm_d_new || comm_r_old)` let phi = phi(&comm_d_new, &comm_r_old); // Encode `labels_d_new` into `labels_r_new` and create TreeRNew. - let labels_r_new = encode_new_replica(&labels_r_old, &labels_d_new, &phi, h); - let tree_r_new = create_tree::(&labels_r_new, tmp_path, "tree-r-new"); + let labels_r_new = common::encode_new_replica(&labels_r_old, &labels_d_new, &phi, h); + let tree_r_new = common::create_tree::(&labels_r_new, tmp_path, "tree-r-new"); let root_r_new = tree_r_new.root(); let comm_r_new = ::Function::hash2(&comm_c, &root_r_new); @@ -313,3 +195,22 @@ fn test_empty_sector_update_circuit_32kib() { type TreeR = MerkleTreeWrapper, U8, U8, U2>; test_empty_sector_update_circuit::(SECTOR_SIZE_32_KIB, 6760091); } + +#[test] +#[cfg(feature = "isolated-testing")] +fn test_empty_sector_update_constraints_32gib() { + type TreeR = MerkleTreeWrapper, U8, U8, U0>; + let pub_inputs = circuit::PublicInputs::empty(); + + let priv_inputs = circuit::PrivateInputs::empty(SECTOR_SIZE_32_GIB); + + let circuit = EmptySectorUpdateCircuit:: { + pub_params: PublicParams::from_sector_size(SECTOR_SIZE_32_GIB as u64 * 32), + pub_inputs, + priv_inputs, + }; + + let mut cs = BenchCS::::new(); + circuit.synthesize(&mut cs).expect("failed to synthesize"); + assert_eq!(cs.num_constraints(), 81049499) +} diff --git a/storage-proofs-update/tests/circuit_poseidon.rs b/storage-proofs-update/tests/circuit_poseidon.rs new file mode 100644 index 000000000..16cd46003 --- /dev/null +++ b/storage-proofs-update/tests/circuit_poseidon.rs @@ -0,0 +1,166 @@ +#![allow(unused_imports)] +#![allow(dead_code)] +use std::path::Path; + +use bellperson::{util_cs::bench_cs::BenchCS, util_cs::test_cs::TestConstraintSystem, Circuit}; +use blstrs::Scalar as Fr; +use filecoin_hashers::{Domain, HashFunction, Hasher}; +use generic_array::typenum::{Unsigned, U0, U2, U4, U8}; +use merkletree::store::{DiskStore, StoreConfig}; +use rand::SeedableRng; +use rand_xorshift::XorShiftRng; +use storage_proofs_core::{ + merkle::{MerkleTreeTrait, MerkleTreeWrapper}, + util::default_rows_to_discard, + TEST_SEED, +}; + +use storage_proofs_update::{ + circuit_poseidon, + circuit_poseidon::EmptySectorUpdateCircuit, + constants::{ + apex_leaf_count, hs, partition_count, validate_tree_r_shape, TreeD, TreeDDomain, + TreeRDomain, TreeRHasher, SECTOR_SIZE_16_KIB, SECTOR_SIZE_1_KIB, SECTOR_SIZE_2_KIB, + SECTOR_SIZE_32_KIB, SECTOR_SIZE_4_KIB, SECTOR_SIZE_8_KIB, + }, + phi, rho, vanilla, Challenges, PublicParams, +}; + +use storage_proofs_update::constants::SECTOR_SIZE_32_GIB; +use tempfile::tempdir; + +mod common; + +fn test_empty_sector_update_circuit(sector_nodes: usize, constraints_expected: usize) +where + TreeR: MerkleTreeTrait, +{ + validate_tree_r_shape::(sector_nodes); + + let sector_bytes = sector_nodes << 5; + let hs = hs(sector_nodes); + let h = hs[common::H_SELECT.trailing_zeros() as usize]; + + let mut rng = XorShiftRng::from_seed(TEST_SEED); + + // Merkle tree storage directory. + let tmp_dir = tempdir().unwrap(); + let tmp_path = tmp_dir.path(); + + // Create random TreeROld. + let labels_r_old: Vec = (0..sector_nodes) + .map(|_| TreeRDomain::random(&mut rng)) + .collect(); + let tree_r_old = common::create_tree::(&labels_r_old, tmp_path, "tree-r-old"); + let root_r_old = tree_r_old.root(); + let comm_c = TreeRDomain::random(&mut rng); + let comm_r_old = ::Function::hash2(&comm_c, &root_r_old); + + // Create random TreeDNew. + let labels_d_new: Vec = (0..sector_nodes) + .map(|_| TreeRDomain::random(&mut rng)) + .collect(); + let tree_d_new = common::create_tree::(&labels_d_new, tmp_path, "tree-d-new"); + let comm_d_new = tree_d_new.root(); + + // `phi = H(comm_d_new || comm_r_old)` + let phi = phi(&comm_d_new, &comm_r_old); + + // Encode `labels_d_new` into `labels_r_new` and create TreeRNew. + let labels_r_new = common::encode_new_replica(&labels_r_old, &labels_d_new, &phi, h); + let tree_r_new = common::create_tree::(&labels_r_new, tmp_path, "tree-r-new"); + let root_r_new = tree_r_new.root(); + let comm_r_new = ::Function::hash2(&comm_c, &root_r_new); + + let pub_params = PublicParams::from_sector_size(sector_bytes as u64); + + { + // Generate vanilla-proof. + let challenge_proofs: Vec> = + Challenges::new_poseidon(sector_nodes, comm_r_new) + .enumerate() + .take(pub_params.challenge_count * pub_params.partition_count) + .map(|(i, c)| { + let c = c as usize; + let proof_r_old = tree_r_old.gen_proof(c).unwrap_or_else(|_| { + panic!("failed to generate `proof_r_old` for c_{}={}", i, c) + }); + let proof_d_new = tree_d_new.gen_proof(c).unwrap_or_else(|_| { + panic!("failed to generate `proof_d_new` for c_{}={}", i, c) + }); + let proof_r_new = tree_r_new.gen_proof(c).unwrap_or_else(|_| { + panic!("failed to generate `proof_r_new` for c_{}={}", i, c) + }); + + circuit_poseidon::ChallengeProofVanilla { + proof_r_old, + proof_d_new, + proof_r_new, + } + }) + .collect(); + assert_eq!( + challenge_proofs.len(), + pub_params.challenge_count * pub_params.partition_count + ); + // Create circuit. + let pub_inputs = circuit_poseidon::PublicInputs::new( + sector_nodes, + h, + comm_r_old, + comm_d_new, + comm_r_new, + ); + + let pub_inputs_vec = pub_inputs.to_vec(); + + let priv_inputs = circuit_poseidon::PrivateInputs::new(comm_c, &challenge_proofs); + + let circuit = EmptySectorUpdateCircuit:: { + pub_params: pub_params.clone(), + pub_inputs, + priv_inputs, + }; + + let mut cs = TestConstraintSystem::::new(); + circuit.synthesize(&mut cs).expect("failed to synthesize"); + assert!(cs.is_satisfied()); + assert!(cs.verify(&pub_inputs_vec)); + assert_eq!(cs.num_constraints(), constraints_expected); + } +} + +#[test] +#[cfg(feature = "isolated-testing")] +fn test_empty_sector_update_circuit_1kib() { + type TreeR = MerkleTreeWrapper, U8, U4, U0>; + test_empty_sector_update_circuit::(SECTOR_SIZE_1_KIB, 32164); //old 1248389 +} + +#[test] +#[cfg(feature = "isolated-testing")] +fn test_empty_sector_update_circuit_8kib() { + type TreeR = MerkleTreeWrapper, U8, U4, U0>; + test_empty_sector_update_circuit::(SECTOR_SIZE_8_KIB, 47974); //old 2620359 +} + +#[test] +#[cfg(feature = "isolated-testing")] +fn test_empty_sector_update_constraints_32gib() { + type TreeR = MerkleTreeWrapper, U8, U8, U0>; + let pub_inputs = circuit_poseidon::PublicInputs::empty(); + + let priv_inputs = circuit_poseidon::PrivateInputs::empty(SECTOR_SIZE_32_GIB); + + let circuit = EmptySectorUpdateCircuit:: { + pub_params: PublicParams::from_sector_size(SECTOR_SIZE_32_GIB as u64 * 32), + pub_inputs, + priv_inputs, + }; + + let mut cs = BenchCS::::new(); + circuit.synthesize(&mut cs).expect("failed to synthesize"); + //assert!(cs.is_satisfied()); + //assert!(cs.verify(&pub_inputs_vec)); + assert_eq!(cs.num_constraints(), 22305906) +} diff --git a/storage-proofs-update/tests/common/mod.rs b/storage-proofs-update/tests/common/mod.rs new file mode 100644 index 000000000..b4714ace4 --- /dev/null +++ b/storage-proofs-update/tests/common/mod.rs @@ -0,0 +1,145 @@ +use std::path::Path; + +use bellperson::{util_cs::test_cs::TestConstraintSystem, Circuit}; +use blstrs::Scalar as Fr; +use filecoin_hashers::{Domain, HashFunction, Hasher}; +use generic_array::typenum::{Unsigned, U0, U2, U4, U8}; +use merkletree::store::{DiskStore, StoreConfig}; +use rand::SeedableRng; +use rand_xorshift::XorShiftRng; +use storage_proofs_core::{ + merkle::{MerkleTreeTrait, MerkleTreeWrapper}, + util::default_rows_to_discard, + TEST_SEED, +}; +use storage_proofs_update::{ + circuit, + constants::{ + apex_leaf_count, hs, partition_count, validate_tree_r_shape, TreeD, TreeDDomain, + TreeRDomain, TreeRHasher, SECTOR_SIZE_16_KIB, SECTOR_SIZE_1_KIB, SECTOR_SIZE_2_KIB, + SECTOR_SIZE_32_KIB, SECTOR_SIZE_4_KIB, SECTOR_SIZE_8_KIB, + }, + phi, rho, vanilla, Challenges, EmptySectorUpdateCircuit, PublicParams, +}; +use tempfile::tempdir; + +// Selects a value for `h` via `h = hs[log2(h_select)]`; default to taking `h = hs[2]`. +pub const H_SELECT: u64 = 1 << 2; + +pub fn create_tree( + labels: &[<::Hasher as Hasher>::Domain], + tmp_path: &Path, + tree_name: &str, +) -> MerkleTreeWrapper +{ + let sector_nodes = labels.len(); + let base_arity = Tree::Arity::to_usize(); + let sub_arity = Tree::SubTreeArity::to_usize(); + let top_arity = Tree::TopTreeArity::to_usize(); + + // Create a single base-tree, a single sub-tree (out of base-trees), or a single top-tree + // (out of sub-trees, each made of base-trees). + if sub_arity == 0 && top_arity == 0 { + let config = StoreConfig::new( + tmp_path, + tree_name.to_string(), + default_rows_to_discard(sector_nodes, base_arity), + ); + let leafs = labels.iter().copied().map(Ok); + MerkleTreeWrapper::try_from_iter_with_config(leafs, config) + .unwrap_or_else(|_| panic!("failed to create non-compound-tree {}", tree_name)) + } else if top_arity == 0 { + let base_tree_count = sub_arity; + let leafs_per_base_tree = sector_nodes / base_tree_count; + let rows_to_discard = default_rows_to_discard(leafs_per_base_tree, base_arity); + let base_trees: Vec> = (0 + ..base_tree_count) + .map(|i| { + let config = StoreConfig::new( + tmp_path, + format!("{}-base-{}", tree_name, i), + rows_to_discard, + ); + let leafs = labels[i * leafs_per_base_tree..(i + 1) * leafs_per_base_tree] + .iter() + .copied() + .map(Ok); + MerkleTreeWrapper::try_from_iter_with_config(leafs, config) + .unwrap_or_else(|_| panic!("failed to create {} base-tree {}", tree_name, i)) + }) + .collect(); + MerkleTreeWrapper::from_trees(base_trees) + .unwrap_or_else(|_| panic!("failed to create {} from base-trees", tree_name)) + } else { + let base_tree_count = top_arity * sub_arity; + let sub_tree_count = top_arity; + let leafs_per_base_tree = sector_nodes / base_tree_count; + let base_trees_per_sub_tree = sub_arity; + let rows_to_discard = default_rows_to_discard(leafs_per_base_tree, base_arity); + let sub_trees: Vec< + MerkleTreeWrapper, + > = (0..sub_tree_count) + .map(|sub_index| { + let first_sub_leaf = sub_index * base_trees_per_sub_tree * leafs_per_base_tree; + let base_trees: Vec> = (0 + ..base_trees_per_sub_tree) + .map(|base_index| { + let config = StoreConfig::new( + tmp_path, + format!("{}-sub-{}-base-{}", tree_name, sub_index, base_index), + rows_to_discard, + ); + let first_base_leaf = first_sub_leaf + base_index * leafs_per_base_tree; + let leafs = labels[first_base_leaf..first_base_leaf + leafs_per_base_tree] + .iter() + .copied() + .map(Ok); + MerkleTreeWrapper::try_from_iter_with_config(leafs, config).unwrap_or_else( + |_| { + panic!( + "failed to create {} sub-tree {} base-tree {}", + tree_name, sub_index, base_index, + ) + }, + ) + }) + .collect(); + MerkleTreeWrapper::from_trees(base_trees).unwrap_or_else(|_| { + panic!( + "failed to create {} sub-tree {} from base-trees", + tree_name, sub_index, + ) + }) + }) + .collect(); + MerkleTreeWrapper::from_sub_trees(sub_trees) + .unwrap_or_else(|_| panic!("failed to create {} from sub-trees", tree_name)) + } +} + +pub fn encode_new_replica( + labels_r_old: &[TreeRDomain], + labels_d_new: &[TreeD], + phi: &TreeRDomain, + h: usize, +) -> Vec { + let sector_nodes = labels_r_old.len(); + assert_eq!(sector_nodes, labels_d_new.len()); + + // Right-shift each node-index by `get_high_bits_shr` to get its `h` high bits. + let node_index_bit_len = sector_nodes.trailing_zeros() as usize; + let get_high_bits_shr = node_index_bit_len - h; + + (0..sector_nodes) + .map(|node| { + // Take the `h` high bits from the node-index and compute this node's compute `rho`. + let high = (node >> get_high_bits_shr) as u32; + let rho = rho(phi, high); + + // `label_r_new = label_r_old + label_d_new * rho` + let label_r_old: Fr = labels_r_old[node].into(); + let label_d_new: Fr = labels_d_new[node].into(); + (label_r_old + label_d_new * rho).into() + }) + .collect() +}