Skip to content

Commit

Permalink
refactor: cleanup imports and remove globs (#1394)
Browse files Browse the repository at this point in the history
  • Loading branch information
DrPeterVanNostrand committed Jan 26, 2021
1 parent f4c584a commit 35ca38a
Show file tree
Hide file tree
Showing 148 changed files with 1,864 additions and 1,721 deletions.
15 changes: 8 additions & 7 deletions fil-proofs-param/tests/paramfetch/mod.rs
Original file line number Diff line number Diff line change
@@ -1,29 +1,30 @@
mod session;

use std::collections::btree_map::BTreeMap;
use std::collections::BTreeMap;
use std::fs::File;
use std::io::{BufReader, Write};
use std::io::{self, BufReader, Write};
use std::path::PathBuf;

use blake2b_simd::State as Blake2b;
use failure::Error as FailureError;
use rand::Rng;
use rand::{thread_rng, Rng};
use storage_proofs_core::parameter_cache::{ParameterData, ParameterMap};

use crate::support::tmp_manifest;

mod session;

use session::ParamFetchSessionBuilder;

/// Produce a random sequence of bytes and first 32 characters of hex encoded
/// BLAKE2b checksum. This helper function must be kept up-to-date with the
/// parampublish implementation.
fn rand_bytes_with_blake2b() -> Result<(Vec<u8>, String), FailureError> {
let bytes = rand::thread_rng().gen::<[u8; 32]>();
let bytes = thread_rng().gen::<[u8; 32]>();

let mut hasher = Blake2b::new();

let mut as_slice = &bytes[..];

std::io::copy(&mut as_slice, &mut hasher)?;
io::copy(&mut as_slice, &mut hasher)?;

Ok((
bytes.iter().cloned().collect(),
Expand Down
9 changes: 4 additions & 5 deletions fil-proofs-param/tests/paramfetch/session.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use std::fs::File;
use std::io::Read;
use std::io::{self, Read};
use std::path::{Path, PathBuf};

use failure::SyncFailure;
use rexpect::session::PtyReplSession;
use tempfile;
use tempfile::TempDir;
use tempfile::{tempdir, TempDir};

use crate::support::{cargo_bin, spawn_bash_with_retries};

Expand All @@ -19,7 +18,7 @@ pub struct ParamFetchSessionBuilder {

impl ParamFetchSessionBuilder {
pub fn new(manifest: Option<PathBuf>) -> ParamFetchSessionBuilder {
let temp_dir = tempfile::tempdir().expect("could not create temp dir");
let temp_dir = tempdir().expect("could not create temp dir");

ParamFetchSessionBuilder {
cache_dir: temp_dir,
Expand Down Expand Up @@ -56,7 +55,7 @@ impl ParamFetchSessionBuilder {

let mut file = File::create(&pbuf).expect("failed to create file in temp dir");

std::io::copy(r, &mut file).expect("failed to copy bytes to file");
io::copy(r, &mut file).expect("failed to copy bytes to file");

self
}
Expand Down
3 changes: 1 addition & 2 deletions fil-proofs-param/tests/parampublish/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
pub mod prompts_to_publish;
pub mod read_metadata_files;
pub mod write_json_manifest;

pub mod support;
pub mod write_json_manifest;
18 changes: 8 additions & 10 deletions fil-proofs-param/tests/parampublish/support/session.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
use std::fs::File;
use std::io::{Read, Write};
use std::fs::{read_dir, File};
use std::io::{self, Read, Write};
use std::path::{Path, PathBuf};

use failure::SyncFailure;
use rand::Rng;
use rand::{thread_rng, Rng};
use rexpect::session::PtyReplSession;
use tempfile;
use tempfile::TempDir;

use storage_proofs_core::parameter_cache::CacheEntryMetadata;
use tempfile::{tempdir, TempDir};

use crate::support::{cargo_bin, spawn_bash_with_retries, FakeIpfsBin};

Expand All @@ -23,7 +21,7 @@ pub struct ParamPublishSessionBuilder {

impl ParamPublishSessionBuilder {
pub fn new() -> ParamPublishSessionBuilder {
let temp_dir = tempfile::tempdir().expect("could not create temp dir");
let temp_dir = tempdir().expect("could not create temp dir");

let mut pbuf = temp_dir.path().to_path_buf();
pbuf.push("parameters.json");
Expand Down Expand Up @@ -60,7 +58,7 @@ impl ParamPublishSessionBuilder {

let mut file = File::create(&pbuf).expect("failed to create file in temp dir");

let random_bytes = rand::thread_rng().gen::<[u8; 32]>();
let random_bytes = thread_rng().gen::<[u8; 32]>();
file.write_all(&random_bytes)
.expect("failed to write bytes");

Expand All @@ -79,7 +77,7 @@ impl ParamPublishSessionBuilder {

let mut file = File::create(&pbuf).expect("failed to create file in temp dir");

std::io::copy(r, &mut file).expect("failed to copy bytes to file");
io::copy(r, &mut file).expect("failed to copy bytes to file");

self.cached_file_pbufs.push(pbuf);
self
Expand Down Expand Up @@ -122,7 +120,7 @@ impl ParamPublishSessionBuilder {

let cache_dir_path = format!("{:?}", self.cache_dir.path());

let cache_contents: Vec<PathBuf> = std::fs::read_dir(&self.cache_dir)
let cache_contents: Vec<PathBuf> = read_dir(&self.cache_dir)
.unwrap_or_else(|_| panic!("failed to read cache dir {:?}", self.cache_dir))
.map(|x| x.expect("failed to get dir entry"))
.map(|x| x.path())
Expand Down
12 changes: 6 additions & 6 deletions fil-proofs-param/tests/parampublish/write_json_manifest.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
use std::collections::btree_map::BTreeMap;
use std::collections::BTreeMap;
use std::fs::File;
use std::path::Path;

use failure::Error as FailureError;

use storage_proofs_core::parameter_cache::{CacheEntryMetadata, ParameterData};

use crate::parampublish::support::session::ParamPublishSessionBuilder;
use crate::support::{tmp_manifest, FakeIpfsBin};
use crate::{
parampublish::support::session::ParamPublishSessionBuilder,
support::{tmp_manifest, FakeIpfsBin},
};

#[test]
fn writes_json_manifest() -> Result<(), FailureError> {
fn writes_json_manifest() -> Result<(), failure::Error> {
let filenames = vec!["v10-aaa.vk", "v10-aaa.params"];

let manifest_path = tmp_manifest(None)?;
Expand Down
18 changes: 9 additions & 9 deletions fil-proofs-param/tests/support/mod.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
use std::collections::BTreeMap;
use std::env;
use std::fs::File;
use std::path::{Path, PathBuf};
use std::{env, thread};
use std::process::Command;
use std::thread;
use std::time::Duration;

use failure::format_err;
use rexpect::session::PtyReplSession;
use rexpect::spawn_bash;
use rexpect::{session::PtyReplSession, spawn_bash};
use storage_proofs_core::parameter_cache::ParameterData;

use std::collections::btree_map::BTreeMap;
use std::fs::File;
use std::process::Command;
use std::time::Duration;
use tempfile::tempdir;

pub struct FakeIpfsBin {
bin_path: PathBuf,
Expand Down Expand Up @@ -87,7 +87,7 @@ pub fn spawn_bash_with_retries(
pub fn tmp_manifest(
opt_manifest: Option<BTreeMap<String, ParameterData>>,
) -> Result<PathBuf, failure::Error> {
let manifest_dir = tempfile::tempdir()?;
let manifest_dir = tempdir()?;
let mut pbuf = manifest_dir.into_path();
pbuf.push("parameters.json");

Expand Down
71 changes: 31 additions & 40 deletions fil-proofs-tooling/src/bin/benchy/prodbench.rs
Original file line number Diff line number Diff line change
@@ -1,31 +1,30 @@
use std::fs::remove_file;
use std::str::FromStr;

use bellperson::bls::Bls12;
use bellperson::util_cs::bench_cs::BenchCS;
use bellperson::Circuit;
use fil_proofs_tooling::shared::{create_replicas, PROVER_ID, RANDOMNESS, TICKET_BYTES};
use fil_proofs_tooling::{measure, Metadata};
use bellperson::{bls::Bls12, util_cs::bench_cs::BenchCS, Circuit};
use fil_proofs_tooling::{
measure,
shared::{create_replicas, PROVER_ID, RANDOMNESS, TICKET_BYTES},
Metadata,
};
use filecoin_hashers::sha256::Sha256Hasher;
use filecoin_proofs::constants::{DefaultOctTree, POREP_PARTITIONS};
use filecoin_proofs::types::PaddedBytesAmount;
use filecoin_proofs::types::SectorSize;
use filecoin_proofs::types::*;
use filecoin_proofs::{
clear_cache, constants::DefaultOctLCTree, seal_commit_phase1, seal_commit_phase2,
validate_cache_for_commit, PoRepConfig,
clear_cache, parameters::public_params, seal_commit_phase1, seal_commit_phase2,
validate_cache_for_commit, DefaultOctLCTree, DefaultOctTree, PaddedBytesAmount, PoRepConfig,
PoRepProofPartitions, SectorSize, DRG_DEGREE, EXP_DEGREE, LAYERS, POREP_MINIMUM_CHALLENGES,
POREP_PARTITIONS,
};
use log::info;
use rand::SeedableRng;
use rand_xorshift::XorShiftRng;
use serde::{Deserialize, Serialize};
use storage_proofs_core::api_version::ApiVersion;
use storage_proofs_core::compound_proof::CompoundProof;
#[cfg(feature = "measurements")]
use storage_proofs_core::measurements::Operation;
#[cfg(feature = "measurements")]
use storage_proofs_core::measurements::OP_MEASUREMENTS;
use storage_proofs_core::parameter_cache::CacheableParameters;
use storage_proofs_core::proof::ProofScheme;
use storage_proofs_core::measurements::{Operation, OP_MEASUREMENTS};
use storage_proofs_core::{
api_version::ApiVersion, compound_proof::CompoundProof, parameter_cache::CacheableParameters,
proof::ProofScheme,
};
use storage_proofs_porep::stacked::{LayerChallenges, SetupParams, StackedCompound, StackedDrg};

const SEED: [u8; 16] = [
0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5,
Expand Down Expand Up @@ -117,40 +116,39 @@ fn augment_with_op_measurements(mut output: &mut ProdbenchOutputs) {
.expect("failed to acquire lock on rx side of perf channel");

for m in measurements.iter() {
use Operation::*;
let cpu_time = m.cpu_time.as_millis() as u64;
let wall_time = m.wall_time.as_millis() as u64;

match m.op {
GenerateTreeC => {
Operation::GenerateTreeC => {
output.generate_tree_c_cpu_time_ms = cpu_time;
output.generate_tree_c_wall_time_ms = wall_time;
}
GenerateTreeRLast => {
Operation::GenerateTreeRLast => {
output.tree_r_last_cpu_time_ms = cpu_time;
output.tree_r_last_wall_time_ms = wall_time;
}
CommD => {
Operation::CommD => {
output.comm_d_cpu_time_ms = cpu_time;
output.comm_d_wall_time_ms = wall_time;
}
EncodeWindowTimeAll => {
Operation::EncodeWindowTimeAll => {
output.encode_window_time_all_cpu_time_ms = cpu_time;
output.encode_window_time_all_wall_time_ms = wall_time;
}
WindowCommLeavesTime => {
Operation::WindowCommLeavesTime => {
output.window_comm_leaves_time_cpu_time_ms = cpu_time;
output.window_comm_leaves_time_wall_time_ms = wall_time;
}
PorepCommitTime => {
Operation::PorepCommitTime => {
output.porep_commit_time_cpu_time_ms = cpu_time;
output.porep_commit_time_wall_time_ms = wall_time;
}
AddPiece => {
Operation::AddPiece => {
output.add_piece_cpu_time_ms = cpu_time;
output.add_piece_wall_time_ms = wall_time;
}
GeneratePieceCommitment => {
Operation::GeneratePieceCommitment => {
output.generate_piece_commitment_cpu_time_ms = cpu_time;
output.generate_piece_commitment_wall_time_ms = wall_time;
}
Expand All @@ -160,15 +158,15 @@ fn augment_with_op_measurements(mut output: &mut ProdbenchOutputs) {
}

fn configure_global_config(inputs: &ProdbenchInputs) {
filecoin_proofs::constants::LAYERS
LAYERS
.write()
.expect("LAYERS poisoned")
.insert(inputs.sector_size_bytes(), inputs.stacked_layers as usize);
filecoin_proofs::constants::POREP_PARTITIONS
POREP_PARTITIONS
.write()
.expect("POREP_PARTITIONS poisoned")
.insert(inputs.sector_size_bytes(), inputs.porep_partitions);
filecoin_proofs::constants::POREP_MINIMUM_CHALLENGES
POREP_MINIMUM_CHALLENGES
.write()
.expect("POREP_MINIMUM_CHALLENGES poisoned")
.insert(inputs.sector_size_bytes(), inputs.porep_challenges);
Expand Down Expand Up @@ -248,7 +246,7 @@ pub fn run(

// Clean-up persisted replica files.
for (_, info) in &created {
std::fs::remove_file(info.private_replica_info.replica_path())
remove_file(info.private_replica_info.replica_path())
.expect("failed to remove sealed replica file");
}

Expand All @@ -270,14 +268,10 @@ fn run_measure_circuits(i: &ProdbenchInputs) -> CircuitOutputs {
}

fn measure_porep_circuit(i: &ProdbenchInputs) -> usize {
use storage_proofs_porep::stacked::{
LayerChallenges, SetupParams, StackedCompound, StackedDrg,
};

let layers = i.stacked_layers as usize;
let challenge_count = i.porep_challenges as usize;
let drg_degree = filecoin_proofs::constants::DRG_DEGREE;
let expansion_degree = filecoin_proofs::constants::EXP_DEGREE;
let drg_degree = DRG_DEGREE;
let expansion_degree = EXP_DEGREE;
let nodes = (i.sector_size_bytes() / 32) as usize;
let layer_challenges = LayerChallenges::new(layers, challenge_count);

Expand Down Expand Up @@ -327,9 +321,6 @@ fn generate_params(i: &ProdbenchInputs) {
}

fn cache_porep_params(porep_config: PoRepConfig) {
use filecoin_proofs::parameters::public_params;
use storage_proofs_porep::stacked::{StackedCompound, StackedDrg};

let public_params = public_params(
PaddedBytesAmount::from(porep_config),
usize::from(PoRepProofPartitions::from(porep_config)),
Expand Down
23 changes: 11 additions & 12 deletions fil-proofs-tooling/src/bin/circuitinfo/main.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,22 @@
use std::str::FromStr;

use bellperson::{bls::Bls12, util_cs::bench_cs::BenchCS, Circuit};
use dialoguer::{theme::ColorfulTheme, MultiSelect};
use filecoin_proofs::{
parameters::{public_params, window_post_public_params, winning_post_public_params},
with_shape, DefaultPieceHasher, PaddedBytesAmount, PoRepConfig, PoRepProofPartitions,
PoStConfig, PoStType, SectorSize, POREP_PARTITIONS, PUBLISHED_SECTOR_SIZES,
WINDOW_POST_CHALLENGE_COUNT, WINDOW_POST_SECTOR_COUNT, WINNING_POST_CHALLENGE_COUNT,
WINNING_POST_SECTOR_COUNT,
};
use humansize::{file_size_opts, FileSize};
use log::{info, warn};
use structopt::StructOpt;

use bellperson::util_cs::bench_cs::BenchCS;
use bellperson::{bls::Bls12, Circuit};
use filecoin_proofs::constants::*;
use filecoin_proofs::parameters::{
public_params, window_post_public_params, winning_post_public_params,
use storage_proofs_core::{
api_version::ApiVersion, compound_proof::CompoundProof, merkle::MerkleTreeTrait,
};
use filecoin_proofs::types::*;
use filecoin_proofs::with_shape;
use filecoin_proofs::PoStType;
use storage_proofs_core::api_version::ApiVersion;
use storage_proofs_core::compound_proof::CompoundProof;
use storage_proofs_porep::stacked::{StackedCompound, StackedDrg};
use storage_proofs_post::fallback::{FallbackPoSt, FallbackPoStCircuit, FallbackPoStCompound};
use structopt::StructOpt;

struct CircuitInfo {
constraints: usize,
Expand Down
Loading

0 comments on commit 35ca38a

Please sign in to comment.