Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: update bellperson to v0.18.0 #1529

Merged
merged 4 commits into from
Oct 25, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion fil-proofs-param/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ serde = { version = "1.0", features = ["rc", "derive"] }
serde_json = "1.0"
ff = "0.11.0"
blake2b_simd = "0.5"
bellperson = "0.17.0"
bellperson = "0.18.0"
log = "0.4.7"
fil_logger = "0.1"
env_proxy = "0.4"
Expand Down
4 changes: 2 additions & 2 deletions fil-proofs-param/src/bin/paramfetch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ fn download_ipget(version: &str, verbose: bool) -> Result<()> {
))?;
trace!("making GET request: {}", url.as_str());
let client = Client::builder()
.proxy(Proxy::custom(move |url| env_proxy::for_url(&url).to_url()))
.proxy(Proxy::custom(move |url| env_proxy::for_url(url).to_url()))
.build()?;
let mut resp = client.get(url).send()?;
trace!("received GET response");
Expand Down Expand Up @@ -162,7 +162,7 @@ fn get_filenames_requiring_download(
return true;
};
trace!("params file found");
let calculated_digest = match get_digest_for_file_within_cache(&filename) {
let calculated_digest = match get_digest_for_file_within_cache(filename) {
Ok(digest) => digest,
Err(e) => {
warn!("failed to hash file {}, marking for download", e);
Expand Down
5 changes: 1 addition & 4 deletions fil-proofs-param/tests/paramfetch/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,7 @@ fn rand_bytes_with_blake2b() -> Result<(Vec<u8>, String), FailureError> {

io::copy(&mut as_slice, &mut hasher)?;

Ok((
bytes.iter().cloned().collect(),
hasher.finalize().to_hex()[..32].into(),
))
Ok((bytes.to_vec(), hasher.finalize().to_hex()[..32].into()))
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion fil-proofs-tooling/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ regex = "1.3.7"
commandspec = "0.12.2"
chrono = { version = "0.4.7", features = ["serde"] }
memmap = "0.7.0"
bellperson = "0.17.0"
bellperson = "0.18.0"
rand = "0.8"
tempfile = "3.0.8"
cpu-time = "1.0.0"
Expand Down
2 changes: 1 addition & 1 deletion fil-proofs-tooling/src/bin/benchy/prodbench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ pub fn run(
)?;

clear_cache::<DefaultOctLCTree>(
&replica_info.private_replica_info.cache_dir_path(),
replica_info.private_replica_info.cache_dir_path(),
)?;

seal_commit_phase2(cfg, phase1_output, PROVER_ID, *sector_id)
Expand Down
4 changes: 2 additions & 2 deletions fil-proofs-tooling/src/bin/benchy/window_post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,7 @@ pub fn run_window_post_bench<Tree: 'static + MerkleTreeTrait>(
&RANDOMNESS,
&pub_replica_info,
PROVER_ID,
&proof,
proof,
)
.unwrap();
let verify_window_post_measurement = measure(|| {
Expand All @@ -544,7 +544,7 @@ pub fn run_window_post_bench<Tree: 'static + MerkleTreeTrait>(
&RANDOMNESS,
&pub_replica_info,
PROVER_ID,
&proof,
proof,
)
})
.expect("failed to verify window post proof");
Expand Down
2 changes: 1 addition & 1 deletion fil-proofs-tooling/src/bin/benchy/winning_post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ pub fn run_fallback_post_bench<Tree: 'static + MerkleTreeTrait>(
&RANDOMNESS,
&pub_replica_info[..],
PROVER_ID,
&proof,
proof,
)
})
.expect("failed to verify winning post proof");
Expand Down
2 changes: 1 addition & 1 deletion fil-proofs-tooling/src/bin/check_parameters/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ fn main() {
match matches.subcommand() {
("map", Some(m)) => {
let parameter_file_str = value_t!(m, "param", String).expect("param failed");
run_map(&Path::new(&parameter_file_str)).expect("run_map failed");
run_map(Path::new(&parameter_file_str)).expect("run_map failed");
}
_ => panic!("Unrecognized subcommand"),
}
Expand Down
6 changes: 3 additions & 3 deletions fil-proofs-tooling/src/bin/gpu-cpu-test/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,10 @@ fn thread_fun(
// already there
if gpu_stealing {
// Run the actual proof
generate_post_in_priority(&priv_replica_infos);
generate_post_in_priority(priv_replica_infos);
} else {
// Run the actual proof
generate_post(&priv_replica_infos);
generate_post(priv_replica_infos);
}

// Waiting for this thread to be killed
Expand Down Expand Up @@ -186,7 +186,7 @@ fn processes_mode(parallel: u8, gpu_stealing: bool) {
// Put each process into it's own scope (the other one is due to the if statement)
{
let name = "high";
let child = spawn_process(&name, gpu_stealing);
let child = spawn_process(name, gpu_stealing);
children.insert(name.to_string(), child);
}

Expand Down
18 changes: 9 additions & 9 deletions fil-proofs-tooling/src/bin/update_tree_r_cache/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,22 +80,22 @@ fn get_tree_r_last_root(
} else if is_sector_shape_sub2(sector_size) {
let tree_r_last = SectorShapeSub2::from_store_configs_and_replica(
base_tree_leafs,
&configs,
&replica_config,
configs,
replica_config,
)?;
tree_r_last.root()
} else if is_sector_shape_sub8(sector_size) {
let tree_r_last = SectorShapeSub8::from_store_configs_and_replica(
base_tree_leafs,
&configs,
&replica_config,
configs,
replica_config,
)?;
tree_r_last.root()
} else if is_sector_shape_top2(sector_size) {
let tree_r_last = SectorShapeTop2::from_sub_tree_store_configs_and_replica(
base_tree_leafs,
&configs,
&replica_config,
configs,
replica_config,
)?;
tree_r_last.root()
} else {
Expand Down Expand Up @@ -190,7 +190,7 @@ fn run_inspect(sector_size: usize, cache: &Path, replica_path: &Path) -> Result<
&configs,
&replica_config,
)?;
let p_aux = get_persistent_aux(&cache)?;
let p_aux = get_persistent_aux(cache)?;

println!("CommRLast from p_aux: {:?}", p_aux.comm_r_last);
println!(
Expand Down Expand Up @@ -226,7 +226,7 @@ fn run_verify(sector_size: usize, cache: &Path, replica_path: &Path) -> Result<(
let store = LCStore::new_from_disk_with_reader(
base_tree_len,
OCT_ARITY,
&config,
config,
ExternalReader::new_from_config(&replica_config, i)?,
)?;
cached_base_tree_roots.push(store.last()?);
Expand Down Expand Up @@ -256,7 +256,7 @@ fn run_verify(sector_size: usize, cache: &Path, replica_path: &Path) -> Result<(
create_dir_all(&tmp_path)?;

let (rebuilt_tree_r_last_root, rebuilt_base_tree_roots) =
run_rebuild(sector_size, &tmp_path, &replica_path)?;
run_rebuild(sector_size, tmp_path, replica_path)?;

remove_dir_all(&tmp_path)?;

Expand Down
4 changes: 2 additions & 2 deletions filecoin-hashers/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ repository = "https://github.com/filecoin-project/rust-fil-proofs"
readme = "README.md"

[dependencies]
bellperson = "0.17.0"
bellperson = "0.18.0"
blstrs = "0.4.0"
generic-array = "0.14.4"
merkletree = "0.21.0"
Expand All @@ -18,7 +18,7 @@ anyhow = "1.0.34"
serde = "1.0.117"
rand = "0.8.0"

neptune = { version = "5.0.0", optional = true, features = ["arity2", "arity4", "arity8", "arity11", "arity16", "arity24", "arity36"] }
neptune = { version = "5.1.0", optional = true, features = ["arity2", "arity4", "arity8", "arity11", "arity16", "arity24", "arity36"] }
lazy_static = { version = "1.4.0", optional = true }
blake2s_simd = { version = "0.5.11", optional = true }
sha2 = { version = "0.9.2", optional = true }
Expand Down
20 changes: 9 additions & 11 deletions filecoin-hashers/src/poseidon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl Domain for PoseidonDomain {
"invalid amount of bytes"
);
let mut repr = <Fr as PrimeField>::Repr::default();
repr.copy_from_slice(&raw);
repr.copy_from_slice(raw);
Ok(PoseidonDomain(repr))
}

Expand Down Expand Up @@ -170,7 +170,7 @@ fn shared_hash(data: &[u8]) -> PoseidonDomain {
// We could truncate so `bytes_into_frs` cannot fail, then ensure `data` is always `fr_safe`.
let preimage = data
.chunks(32)
.map(|ref chunk| {
.map(|chunk| {
Fr::from_repr_vartime(PoseidonDomain::from_slice(chunk).0).expect("from_repr failure")
})
.collect::<Vec<_>>();
Expand All @@ -181,19 +181,19 @@ fn shared_hash(data: &[u8]) -> PoseidonDomain {
fn shared_hash_frs(preimage: &[Fr]) -> Fr {
match preimage.len() {
2 => {
let mut p = Poseidon::new_with_preimage(&preimage, &POSEIDON_CONSTANTS_2);
let mut p = Poseidon::new_with_preimage(preimage, &POSEIDON_CONSTANTS_2);
p.hash()
}
4 => {
let mut p = Poseidon::new_with_preimage(&preimage, &POSEIDON_CONSTANTS_4);
let mut p = Poseidon::new_with_preimage(preimage, &POSEIDON_CONSTANTS_4);
p.hash()
}
8 => {
let mut p = Poseidon::new_with_preimage(&preimage, &POSEIDON_CONSTANTS_8);
let mut p = Poseidon::new_with_preimage(preimage, &POSEIDON_CONSTANTS_8);
p.hash()
}
16 => {
let mut p = Poseidon::new_with_preimage(&preimage, &POSEIDON_CONSTANTS_16);
let mut p = Poseidon::new_with_preimage(preimage, &POSEIDON_CONSTANTS_16);
p.hash()
}

Expand Down Expand Up @@ -413,11 +413,9 @@ mod tests {
let p = t.gen_proof(0).expect("gen_proof failure"); // create a proof for the first value =k Fr::one()

assert_eq!(*p.path(), vec![0, 0]);
assert_eq!(
p.validate::<PoseidonFunction>()
.expect("failed to validate"),
true
);
assert!(p
.validate::<PoseidonFunction>()
.expect("failed to validate"));
}

// #[test]
Expand Down
2 changes: 1 addition & 1 deletion filecoin-proofs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ serde = { version = "1.0", features = ["rc", "derive"] }
serde_json = "1.0"
ff = "0.11.0"
blake2b_simd = "0.5"
bellperson = "0.17.0"
bellperson = "0.18.0"
log = "0.4.7"
fil_logger = "0.1"
rayon = "1.1.0"
Expand Down
10 changes: 5 additions & 5 deletions filecoin-proofs/src/api/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ where
let source = BufReader::new(source);
let mut target = BufWriter::new(target);

let written_bytes = sum_piece_bytes_with_alignment(&piece_lengths);
let written_bytes = sum_piece_bytes_with_alignment(piece_lengths);
let piece_alignment = get_piece_alignment(written_bytes, piece_size);
let fr32_reader = Fr32Reader::new(source);

Expand Down Expand Up @@ -506,7 +506,7 @@ fn verify_store(config: &StoreConfig, arity: usize, required_configs: usize) ->
let store_len = config.size.expect("disk store size not configured");
for config in &configs {
ensure!(
DiskStore::<DefaultPieceDomain>::is_consistent(store_len, arity, &config,)?,
DiskStore::<DefaultPieceDomain>::is_consistent(store_len, arity, config,)?,
"Store is inconsistent: {:?}",
StoreConfig::data_path(&config.path, &config.id)
);
Expand All @@ -516,7 +516,7 @@ fn verify_store(config: &StoreConfig, arity: usize, required_configs: usize) ->
DiskStore::<DefaultPieceDomain>::is_consistent(
config.size.expect("disk store size not configured"),
arity,
&config,
config,
)?,
"Store is inconsistent: {:?}",
store_path
Expand Down Expand Up @@ -573,7 +573,7 @@ fn verify_level_cache_store<Tree: MerkleTreeTrait>(config: &StoreConfig) -> Resu
LevelCacheStore::<DefaultPieceDomain, File>::is_consistent(
store_len,
Tree::Arity::to_usize(),
&config,
config,
)?,
"Store is inconsistent: {:?}",
StoreConfig::data_path(&config.path, &config.id)
Expand All @@ -584,7 +584,7 @@ fn verify_level_cache_store<Tree: MerkleTreeTrait>(config: &StoreConfig) -> Resu
LevelCacheStore::<DefaultPieceDomain, File>::is_consistent(
config.size.expect("disk store size not configured"),
Tree::Arity::to_usize(),
&config,
config,
)?,
"Store is inconsistent: {:?}",
store_path
Expand Down
4 changes: 2 additions & 2 deletions filecoin-proofs/src/api/post_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ pub fn clear_caches<Tree: MerkleTreeTrait>(
info!("clear_caches:start");

for replica in replicas.values() {
clear_cache::<Tree>(&replica.cache_dir.as_path())?;
clear_cache::<Tree>(replica.cache_dir.as_path())?;
}

info!("clear_caches:finish");
Expand Down Expand Up @@ -84,7 +84,7 @@ pub fn generate_fallback_sector_challenges<Tree: 'static + MerkleTreeTrait>(
let num_sectors_per_chunk = post_config.sector_count;
let partitions = match post_config.typ {
PoStType::Window => {
get_partitions_for_window_post(pub_sectors.len(), &post_config).unwrap_or(1)
get_partitions_for_window_post(pub_sectors.len(), post_config).unwrap_or(1)
}
PoStType::Winning => 1,
};
Expand Down
20 changes: 10 additions & 10 deletions filecoin-proofs/src/api/window_post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ pub fn generate_window_post_with_vanilla<Tree: 'static + MerkleTreeTrait>(
let prover_id_safe: <Tree::Hasher as Hasher>::Domain =
as_safe_commitment(&prover_id, "prover_id")?;

let vanilla_params = window_post_setup_params(&post_config);
let partitions = get_partitions_for_window_post(vanilla_proofs.len(), &post_config);
let vanilla_params = window_post_setup_params(post_config);
let partitions = get_partitions_for_window_post(vanilla_proofs.len(), post_config);

let setup_params = compound_proof::SetupParams {
vanilla_params,
Expand All @@ -55,7 +55,7 @@ pub fn generate_window_post_with_vanilla<Tree: 'static + MerkleTreeTrait>(

let pub_params: compound_proof::PublicParams<'_, FallbackPoSt<'_, Tree>> =
FallbackPoStCompound::setup(&setup_params)?;
let groth_params = get_post_params::<Tree>(&post_config)?;
let groth_params = get_post_params::<Tree>(post_config)?;

let mut pub_sectors = Vec::with_capacity(vanilla_proofs.len());
for vanilla_proof in &vanilla_proofs {
Expand All @@ -73,7 +73,7 @@ pub fn generate_window_post_with_vanilla<Tree: 'static + MerkleTreeTrait>(
};

let partitioned_proofs = partition_vanilla_proofs(
&post_config,
post_config,
&pub_params.vanilla_params,
&pub_inputs,
partitions,
Expand Down Expand Up @@ -108,8 +108,8 @@ pub fn generate_window_post<Tree: 'static + MerkleTreeTrait>(
let randomness_safe = as_safe_commitment(randomness, "randomness")?;
let prover_id_safe = as_safe_commitment(&prover_id, "prover_id")?;

let vanilla_params = window_post_setup_params(&post_config);
let partitions = get_partitions_for_window_post(replicas.len(), &post_config);
let vanilla_params = window_post_setup_params(post_config);
let partitions = get_partitions_for_window_post(replicas.len(), post_config);

let sector_count = vanilla_params.sector_count;
let setup_params = compound_proof::SetupParams {
Expand All @@ -120,7 +120,7 @@ pub fn generate_window_post<Tree: 'static + MerkleTreeTrait>(

let pub_params: compound_proof::PublicParams<'_, FallbackPoSt<'_, Tree>> =
FallbackPoStCompound::setup(&setup_params)?;
let groth_params = get_post_params::<Tree>(&post_config)?;
let groth_params = get_post_params::<Tree>(post_config)?;

let trees: Vec<_> = replicas
.iter()
Expand Down Expand Up @@ -190,8 +190,8 @@ pub fn verify_window_post<Tree: 'static + MerkleTreeTrait>(
let randomness_safe = as_safe_commitment(randomness, "randomness")?;
let prover_id_safe = as_safe_commitment(&prover_id, "prover_id")?;

let vanilla_params = window_post_setup_params(&post_config);
let partitions = get_partitions_for_window_post(replicas.len(), &post_config);
let vanilla_params = window_post_setup_params(post_config);
let partitions = get_partitions_for_window_post(replicas.len(), post_config);

let setup_params = compound_proof::SetupParams {
vanilla_params,
Expand Down Expand Up @@ -222,7 +222,7 @@ pub fn verify_window_post<Tree: 'static + MerkleTreeTrait>(
};

let is_valid = {
let verifying_key = get_post_verifying_key::<Tree>(&post_config)?;
let verifying_key = get_post_verifying_key::<Tree>(post_config)?;
let multi_proof = MultiProof::new_from_reader(partitions, proof, &verifying_key)?;

FallbackPoStCompound::verify(
Expand Down
Loading