diff --git a/fil-proofs-param/src/bin/fakeipfsadd.rs b/fil-proofs-param/src/bin/fakeipfsadd.rs index 925cb58b0..b1cf0dbc9 100644 --- a/fil-proofs-param/src/bin/fakeipfsadd.rs +++ b/fil-proofs-param/src/bin/fakeipfsadd.rs @@ -17,7 +17,7 @@ enum Cli { #[structopt(help = "Positional argument for the path to the file to add.")] file_path: String, #[structopt(short = "Q", help = "Simulates the -Q argument to `ipfs add`.")] - quieter: bool, + _quieter: bool, }, } diff --git a/fil-proofs-param/src/bin/srspublish.rs b/fil-proofs-param/src/bin/srspublish.rs index 9ebf8e739..91fc80325 100644 --- a/fil-proofs-param/src/bin/srspublish.rs +++ b/fil-proofs-param/src/bin/srspublish.rs @@ -102,14 +102,6 @@ fn write_param_map_to_disk(param_map: &ParameterMap, json_path: &str) -> Result< #[derive(Debug, StructOpt)] #[structopt(name = "srspublish", version = "1.0", about = CLI_ABOUT.as_str())] struct Cli { - #[structopt( - long = "list-all", - short = "a", - help = "The user will be prompted to select the files to publish from the set of all files \ - found in the cache dir. Excluding the -a/--list-all flag will result in the user being \ - prompted for a single param version number for filtering-in files in the cache dir." - )] - list_all_files: bool, #[structopt( long = "ipfs-bin", value_name = "PATH TO IPFS BINARY", diff --git a/fil-proofs-param/tests/paramfetch/session.rs b/fil-proofs-param/tests/paramfetch/session.rs index f41d14536..0fdb4f9a4 100644 --- a/fil-proofs-param/tests/paramfetch/session.rs +++ b/fil-proofs-param/tests/paramfetch/session.rs @@ -63,42 +63,47 @@ impl ParamFetchSessionBuilder { /// Launch paramfetch in an environment configured by the builder. pub fn build(self) -> ParamFetchSession { - let mut p = spawn_bash_with_retries(10, Some(self.session_timeout_ms)) - .unwrap_or_else(|err| panic_any(err)); - - let cache_dir_path = format!("{:?}", self.cache_dir.path()); - - let paramfetch_path = cargo_bin("paramfetch"); - - let whitelist: String = self - .whitelisted_sector_sizes - .map(|wl| { - let mut s = "--sector-sizes=".to_string(); - s.push_str(&wl.join(",")); - s - }) - .unwrap_or_else(|| "".to_string()); - - let json_argument = if self.manifest.is_some() { - format!("--json={:?}", self.manifest.expect("missing manifest")) - } else { - "".to_string() + let pty_session = match spawn_bash_with_retries(10, Some(self.session_timeout_ms)) { + Err(e) => panic_any(e), + Ok(mut session) => { + let cache_dir_path = format!("{:?}", self.cache_dir.path()); + + let paramfetch_path = cargo_bin("paramfetch"); + + let whitelist: String = self + .whitelisted_sector_sizes + .map(|wl| { + let mut s = "--sector-sizes=".to_string(); + s.push_str(&wl.join(",")); + s + }) + .unwrap_or_else(|| "".to_string()); + + let json_argument = if self.manifest.is_some() { + format!("--json={:?}", self.manifest.expect("missing manifest")) + } else { + "".to_string() + }; + + let cmd = format!( + "{}={} {:?} {} {} {}", + "FIL_PROOFS_PARAMETER_CACHE", // related to var name in core/src/settings.rs + cache_dir_path, + paramfetch_path, + if self.prompt_enabled { "" } else { "--all" }, + json_argument, + whitelist, + ); + + session + .execute(&cmd, ".*") + .expect("could not execute paramfetch"); + session + } }; - let cmd = format!( - "{}={} {:?} {} {} {}", - "FIL_PROOFS_PARAMETER_CACHE", // related to var name in core/src/settings.rs - cache_dir_path, - paramfetch_path, - if self.prompt_enabled { "" } else { "--all" }, - json_argument, - whitelist, - ); - - p.execute(&cmd, ".*").expect("could not execute paramfetch"); - ParamFetchSession { - pty_session: p, + pty_session, _cache_dir: self.cache_dir, } } diff --git a/fil-proofs-param/tests/parampublish/support/session.rs b/fil-proofs-param/tests/parampublish/support/session.rs index 36d581891..5b050172e 100644 --- a/fil-proofs-param/tests/parampublish/support/session.rs +++ b/fil-proofs-param/tests/parampublish/support/session.rs @@ -116,10 +116,29 @@ impl ParamPublishSessionBuilder { /// Launch parampublish in an environment configured by the builder. pub fn build(self) -> (ParamPublishSession, Vec) { - let mut p = spawn_bash_with_retries(10, Some(self.session_timeout_ms)) - .unwrap_or_else(|err| panic_any(err)); - - let cache_dir_path = format!("{:?}", self.cache_dir.path()); + let pty_session = match spawn_bash_with_retries(10, Some(self.session_timeout_ms)) { + Err(err) => panic_any(err), + Ok(mut session) => { + let cache_dir_path = format!("{:?}", self.cache_dir.path()); + + let parampublish_path = cargo_bin("parampublish"); + + let cmd = format!( + "{}={} {:?} {} --ipfs-bin={:?} --json={:?}", + "FIL_PROOFS_PARAMETER_CACHE", // related to var name in core/src/settings.rs + cache_dir_path, + parampublish_path, + if self.list_all_files { "-a" } else { "" }, + self.ipfs_bin_path, + self.manifest + ); + + session + .execute(&cmd, ".*") + .expect("could not execute parampublish"); + session + } + }; let cache_contents: Vec = read_dir(&self.cache_dir) .unwrap_or_else(|_| panic_any(format!("failed to read cache dir {:?}", self.cache_dir))) @@ -127,24 +146,9 @@ impl ParamPublishSessionBuilder { .map(|x| x.path()) .collect(); - let parampublish_path = cargo_bin("parampublish"); - - let cmd = format!( - "{}={} {:?} {} --ipfs-bin={:?} --json={:?}", - "FIL_PROOFS_PARAMETER_CACHE", // related to var name in core/src/settings.rs - cache_dir_path, - parampublish_path, - if self.list_all_files { "-a" } else { "" }, - self.ipfs_bin_path, - self.manifest - ); - - p.execute(&cmd, ".*") - .expect("could not execute parampublish"); - ( ParamPublishSession { - pty_session: p, + pty_session, _cache_dir: self.cache_dir, }, cache_contents, diff --git a/fil-proofs-tooling/src/bin/check_parameters/main.rs b/fil-proofs-tooling/src/bin/check_parameters/main.rs index 99773de69..3f69ff6c8 100644 --- a/fil-proofs-tooling/src/bin/check_parameters/main.rs +++ b/fil-proofs-tooling/src/bin/check_parameters/main.rs @@ -8,7 +8,7 @@ use clap::{Arg, Command}; use storage_proofs_core::parameter_cache::read_cached_params; fn run_map(parameter_file: &Path) -> Result> { - read_cached_params(¶meter_file.to_path_buf()) + read_cached_params(parameter_file) } fn main() { diff --git a/fil-proofs-tooling/src/bin/gpu-cpu-test/main.rs b/fil-proofs-tooling/src/bin/gpu-cpu-test/main.rs index 1de4a8013..4dcee8192 100644 --- a/fil-proofs-tooling/src/bin/gpu-cpu-test/main.rs +++ b/fil-proofs-tooling/src/bin/gpu-cpu-test/main.rs @@ -56,6 +56,7 @@ impl FromStr for Mode { } #[derive(Debug)] +#[allow(dead_code)] pub struct RunInfo { elapsed: Duration, iterations: u8, diff --git a/filecoin-hashers/src/poseidon.rs b/filecoin-hashers/src/poseidon.rs index 86c69b30b..aa29b08cc 100644 --- a/filecoin-hashers/src/poseidon.rs +++ b/filecoin-hashers/src/poseidon.rs @@ -56,7 +56,7 @@ impl Hashable for PoseidonDomain { } } -#[derive(Copy, Clone, Debug, Serialize, Deserialize)] +#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize)] pub struct PoseidonDomain(pub ::Repr); impl AsRef for PoseidonDomain { @@ -79,12 +79,6 @@ impl PartialEq for PoseidonDomain { impl Eq for PoseidonDomain {} -impl Default for PoseidonDomain { - fn default() -> PoseidonDomain { - PoseidonDomain(::Repr::default()) - } -} - impl Ord for PoseidonDomain { #[inline(always)] fn cmp(&self, other: &PoseidonDomain) -> Ordering { diff --git a/filecoin-proofs/tests/pieces.rs b/filecoin-proofs/tests/pieces.rs index 858af5b1f..3d50c5232 100644 --- a/filecoin-proofs/tests/pieces.rs +++ b/filecoin-proofs/tests/pieces.rs @@ -64,9 +64,7 @@ fn test_get_piece_alignment() { (300, 300, (208, 208)), ]; - for (bytes_in_sector, bytes_in_piece, (expected_left_align, expected_right_align)) in - table.clone() - { + for (bytes_in_sector, bytes_in_piece, (expected_left_align, expected_right_align)) in table { let PieceAlignment { left_bytes: UnpaddedBytesAmount(actual_left_align), right_bytes: UnpaddedBytesAmount(actual_right_align), diff --git a/rust-toolchain b/rust-toolchain index 3ebf789f5..bb120e876 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.56.0 +1.59.0 diff --git a/sha2raw/src/sha256_intrinsics.rs b/sha2raw/src/sha256_intrinsics.rs index 27a4c9ba2..531101250 100644 --- a/sha2raw/src/sha256_intrinsics.rs +++ b/sha2raw/src/sha256_intrinsics.rs @@ -14,6 +14,7 @@ use x86::{ /// Process a block with the SHA-256 algorithm. /// Based on https://github.com/noloader/SHA-Intrinsics/blob/master/sha256-x86.c +#[allow(clippy::needless_late_init)] #[inline(always)] pub unsafe fn compress256(state: &mut [u32; 8], blocks: &[&[u8]]) { assert_eq!(blocks.len() % 2, 0); diff --git a/storage-proofs-core/src/util.rs b/storage-proofs-core/src/util.rs index 823814a1d..31d9902b2 100644 --- a/storage-proofs-core/src/util.rs +++ b/storage-proofs-core/src/util.rs @@ -142,8 +142,7 @@ pub fn reverse_bit_numbering(bits: Vec) -> Vec { padded_bits .chunks(8) - .map(|chunk| chunk.iter().rev()) - .flatten() + .flat_map(|chunk| chunk.iter().rev()) .cloned() .collect() } diff --git a/storage-proofs-porep/src/stacked/vanilla/cache.rs b/storage-proofs-porep/src/stacked/vanilla/cache.rs index 81afac4bb..dc3ec6ce6 100644 --- a/storage-proofs-porep/src/stacked/vanilla/cache.rs +++ b/storage-proofs-porep/src/stacked/vanilla/cache.rs @@ -311,7 +311,7 @@ impl ParentCache { let mut digest_hex: String = "".to_string(); let sector_size = graph.size() * NODE_SIZE; - with_exclusive_lock(&path.to_path_buf(), |file| { + with_exclusive_lock(path, |file| { let cache_size = cache_entries as usize * NODE_BYTES * DEGREE; file.as_ref() .set_len(cache_size as u64) diff --git a/storage-proofs-porep/src/stacked/vanilla/proof.rs b/storage-proofs-porep/src/stacked/vanilla/proof.rs index 795c56cfe..33749d891 100644 --- a/storage-proofs-porep/src/stacked/vanilla/proof.rs +++ b/storage-proofs-porep/src/stacked/vanilla/proof.rs @@ -578,7 +578,7 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr ]; // gather all layer data. - for (layer_index, mut layer_bytes) in + for (layer_index, layer_bytes) in layer_data.iter_mut().enumerate() { let store = labels.labels_for_layer(layer_index + 1); @@ -586,7 +586,7 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr let end = start + chunked_nodes_count; store - .read_range_into(start, end, &mut layer_bytes) + .read_range_into(start, end, layer_bytes) .expect("failed to read store range"); } diff --git a/storage-proofs-porep/src/stacked/vanilla/utils.rs b/storage-proofs-porep/src/stacked/vanilla/utils.rs index 3ce5c3a0e..26c083401 100644 --- a/storage-proofs-porep/src/stacked/vanilla/utils.rs +++ b/storage-proofs-porep/src/stacked/vanilla/utils.rs @@ -1,3 +1,5 @@ +#![allow(clippy::mut_from_ref)] + use std::cell::UnsafeCell; use std::slice::{self, ChunksExactMut}; @@ -6,7 +8,7 @@ use std::slice::{self, ChunksExactMut}; #[derive(Debug)] pub struct UnsafeSlice<'a, T> { // holds the data to ensure lifetime correctness - data: UnsafeCell<&'a mut [T]>, + _data: UnsafeCell<&'a mut [T]>, /// pointer to the data ptr: *mut T, /// Number of elements, not bytes. @@ -20,8 +22,8 @@ impl<'a, T> UnsafeSlice<'a, T> { pub fn from_slice(source: &'a mut [T]) -> Self { let len = source.len(); let ptr = source.as_mut_ptr(); - let data = UnsafeCell::new(source); - Self { data, ptr, len } + let _data = UnsafeCell::new(source); + Self { _data, ptr, len } } /// Safety: The caller must ensure that there are no unsynchronized parallel access to the same regions.