diff --git a/.circleci/config.yml b/.circleci/config.yml index 578d7ced3..fcba6fb72 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,24 +1,36 @@ version: 2.1 -jobs: - ensure_groth_parameters_and_keys_linux: +parameters: + nightly-toolchain: + type: string + default: "nightly-2020-11-09" + +executors: + default: docker: - image: filecoin/rust:latest working_directory: /mnt/crate resource_class: 2xlarge+ + +setup-env: &setup-env + FIL_PROOFS_PARAMETER_CACHE: "/root/filecoin-proof-parameters/" + RUST_LOG: info + + +jobs: + ensure_groth_parameters_and_keys_linux: + executor: default + environment: *setup-env steps: - - configure_environment_variables - checkout - restore_parameter_cache - ensure_filecoin_parameters - save_parameter_cache + cargo_fetch: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge+ + executor: default + environment: *setup-env steps: - - configure_environment_variables - checkout - run: name: Calculate dependencies @@ -26,66 +38,54 @@ jobs: no_output_timeout: 30m - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - run: rustup install $(cat rust-toolchain) - run: rustup default $(cat rust-toolchain) - - run: rustup install nightly + - run: rustup install << pipeline.parameters.nightly-toolchain >> - run: rustup component add rustfmt-preview - run: rustup component add clippy - run: cargo update - run: cargo fetch - run: rustc +$(cat rust-toolchain) --version + - run: rustup toolchain list --verbose - persist_to_workspace: root: "." paths: - Cargo.lock - save_cache: - key: cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + key: cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} paths: - /root/.cargo - /root/.rustup test: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge+ + executor: default + environment: *setup-env + parameters: + crate: + type: string steps: - - configure_environment_variables - checkout - attach_workspace: at: "." - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - restore_parameter_cache - run: - name: Test - command: cargo +$(cat rust-toolchain) test --verbose --workspace - no_output_timeout: 15m - - run: - name: Prune the output files - command: | - for file in target/debug/* target/debug/.??*; do - [ -d $file -o ! -x $file ] && rm -r $file - done - - persist_to_workspace: - root: "." - paths: - - target/debug/* + name: Test (<< parameters.crate >>) + command: cargo +$(cat rust-toolchain) test --verbose --package << parameters.crate >> + no_output_timeout: 30m test_release: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge+ + executor: default + environment: *setup-env steps: - - configure_environment_variables - checkout - attach_workspace: at: "." - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - restore_parameter_cache - run: name: Test in release profile @@ -95,12 +95,11 @@ jobs: ulimit -n 20000 cargo +$(cat rust-toolchain) test --verbose --release --workspace RUSTFLAGS="-D warnings" cargo +$(cat rust-toolchain) build --examples --release --workspace + no_output_timeout: 30m test_ignored_release: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge+ + executor: default + environment: *setup-env parameters: crate: type: string @@ -108,13 +107,12 @@ jobs: type: string default: "" steps: - - configure_environment_variables - checkout - attach_workspace: at: "." - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - restore_parameter_cache - run: name: Test ignored in release profile @@ -123,7 +121,7 @@ jobs: ulimit -u 20000 ulimit -n 20000 cd << parameters.crate >> - cargo test --release << parameters.features >> -- --ignored + cargo test --release << parameters.features >> -- --ignored --nocapture environment: RUST_TEST_THREADS: 1 no_output_timeout: 30m @@ -132,18 +130,15 @@ jobs: # Running with `use_multicore_sdr=true` should be integrated directly into the test code. For now we # just re-run the lifecycle tests to exercise the use_multicore_sdr code path with that setting set. test_multicore_sdr: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge+ + executor: default + environment: *setup-env steps: - - configure_environment_variables - checkout - attach_workspace: at: "." - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - restore_parameter_cache - run: name: Test with use_multicore_sdr pairing enabled @@ -151,7 +146,8 @@ jobs: ulimit -n 20000 ulimit -u 20000 ulimit -n 20000 - cargo +nightly -Zpackage-features test --all --verbose --release --test api -- --ignored lifecycle + cargo +<< pipeline.parameters.nightly-toolchain >> -Zpackage-features test --all --verbose --release lifecycle -- --ignored --nocapture + no_output_timeout: 30m environment: RUST_TEST_THREADS: 1 FIL_PROOFS_USE_MULTICORE_SDR: true @@ -162,57 +158,75 @@ jobs: ulimit -n 20000 ulimit -u 20000 ulimit -n 20000 - cargo +nightly -Zpackage-features test --all --no-default-features --features gpu,blst --verbose --release --test api -- --ignored lifecycle + cargo +<< pipeline.parameters.nightly-toolchain >> -Zpackage-features test --all --no-default-features --features gpu,blst --verbose --release lifecycle -- --ignored --nocapture + no_output_timeout: 30m environment: RUST_TEST_THREADS: 1 FIL_PROOFS_USE_MULTICORE_SDR: true test_blst: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge+ + executor: default + environment: *setup-env + parameters: + crate: + type: string + features: + type: string + default: "gpu,blst" steps: - - configure_environment_variables - checkout - attach_workspace: at: "." - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - restore_parameter_cache - run: - name: Test ignored with blst enabled + name: Test ignored with blst enabled (<< parameters.crate >>) command: | ulimit -n 20000 ulimit -u 20000 ulimit -n 20000 - cargo +nightly -Zpackage-features test --all --no-default-features --features gpu,blst --verbose --release --test api -- --ignored + RUST_LOG=trace cargo +<< pipeline.parameters.nightly-toolchain >> -Zpackage-features test --no-default-features --features << parameters.features >> --verbose --release --package << parameters.crate >> -- --nocapture + no_output_timeout: 30m environment: RUST_TEST_THREADS: 1 + test_blst_ignored: + executor: default + environment: *setup-env + parameters: + crate: + type: string + steps: + - checkout + - attach_workspace: + at: "." + - restore_cache: + keys: + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - restore_parameter_cache + - run: - name: Test with blst enabled + name: Test with blst enabled (<< parameters.crate >>) command: | ulimit -n 20000 ulimit -u 20000 ulimit -n 20000 - cargo +nightly -Zpackage-features test --all --no-default-features --features gpu,blst --verbose + cargo +<< pipeline.parameters.nightly-toolchain >> -Zpackage-features test --no-default-features --features gpu,blst --verbose --package << parameters.crate >> --release -- --ignored --nocapture + no_output_timeout: 30m bench: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge+ + executor: default + environment: *setup-env steps: - - configure_environment_variables - checkout - attach_workspace: at: "." - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - restore_parameter_cache - run: name: Benchmarks @@ -292,35 +306,29 @@ jobs: path: aggregated-benchmarks.json rustfmt: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge + executor: default + environment: *setup-env steps: - - configure_environment_variables - checkout - attach_workspace: at: "." - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - run: name: Run cargo fmt command: cargo fmt --all -- --check clippy: - docker: - - image: filecoin/rust:latest - working_directory: /mnt/crate - resource_class: 2xlarge + executor: default + environment: *setup-env steps: - - configure_environment_variables - checkout - attach_workspace: at: "." - restore_cache: keys: - - cargo-v28-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} + - cargo-v28-b-{{ checksum "rust-toolchain" }}-{{ checksum "Cargo.toml" }}-{{ checksum "Cargo.lock" }}-{{ arch }} - run: name: Run cargo clippy command: cargo +$(cat rust-toolchain) clippy --workspace @@ -329,8 +337,8 @@ jobs: xcode: "10.0.0" working_directory: ~/crate resource_class: large + environment: *setup-env steps: - - configure_environment_variables - checkout - run: name: Install hwloc 2.3.0 @@ -356,13 +364,12 @@ jobs: sudo ulimit -n 20000 sudo ulimit -u 20000 ulimit -n 20000 - cargo +$(cat rust-toolchain) test --release --verbose --workspace + cargo +$(cat rust-toolchain) test --release --verbose --workspace -- --nocapture no_output_timeout: 2h commands: ensure_filecoin_parameters: steps: - - configure_environment_variables - run: name: Build paramcache if it doesn't already exist command: | @@ -375,24 +382,15 @@ commands: save_parameter_cache: steps: - save_cache: - key: proof-params-v28-{{ checksum "filecoin-proofs/parameters.json" }}-{{ arch }} + key: proof-params-v28-b-{{ checksum "filecoin-proofs/parameters.json" }}-{{ arch }} paths: - "~/paramcache.awesome" - "~/filecoin-proof-parameters/" restore_parameter_cache: steps: - - configure_environment_variables - restore_cache: keys: - - proof-params-v28-{{ checksum "filecoin-proofs/parameters.json" }}-{{ arch }} - configure_environment_variables: - steps: - - run: - name: Configure environment variables - command: | - echo 'export FIL_PROOFS_PARAMETER_CACHE="${HOME}/filecoin-proof-parameters/"' >> $BASH_ENV - echo 'export PATH="${HOME}/.cargo/bin:${PATH}"' >> $BASH_ENV - echo 'export RUST_LOG=info' >> $BASH_ENV + - proof-params-v28-b-{{ checksum "filecoin-proofs/parameters.json" }}-{{ arch }} workflows: version: 2.1 @@ -406,28 +404,33 @@ workflows: - clippy: requires: - cargo_fetch + - test_release: requires: - cargo_fetch - ensure_groth_parameters_and_keys_linux + - test_ignored_release: name: test_ignored_release_storage_proofs_post crate: "storage-proofs/post" requires: - cargo_fetch - ensure_groth_parameters_and_keys_linux + - test_ignored_release: name: test_ignored_release_storage_proofs_core crate: "storage-proofs/core" requires: - cargo_fetch - ensure_groth_parameters_and_keys_linux + - test_ignored_release: name: test_ignored_release_storage_proofs_porep crate: "storage-proofs/porep" requires: - cargo_fetch - ensure_groth_parameters_and_keys_linux + - test_ignored_release: name: test_ignored_release_filecoin_proofs crate: "filecoin-proofs" @@ -439,14 +442,159 @@ workflows: requires: - cargo_fetch - ensure_groth_parameters_and_keys_linux + + - test_blst: + name: test_blst_filecoin_proofs + crate: "filecoin-proofs" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test_blst_ignored: + name: test_blst_ignored_filecoin_proofs + crate: "filecoin-proofs" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test: + name: test_filecoin_proofs + crate: "filecoin-proofs" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + - test_blst: + name: test_blst_storage_proofs_core + crate: "storage-proofs-core" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test_blst_ignored: + name: test_blst_ignored_storage_proofs_core + crate: "storage-proofs-core" requires: - cargo_fetch - ensure_groth_parameters_and_keys_linux + - test: + name: test_storage_proofs_core + crate: "storage-proofs-core" requires: - cargo_fetch - ensure_groth_parameters_and_keys_linux + + - test_blst: + name: test_blst_storage_proofs_post + crate: "storage-proofs-post" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test_blst_ignored: + name: test_blst_ignored_storage_proofs_post + crate: "storage-proofs-post" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test: + name: test_storage_proofs_post + crate: "storage-proofs-post" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + + - test_blst: + name: test_blst_storage_proofs_porep + crate: "storage-proofs-porep" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test_blst_ignored: + name: test_blst_ignored_storage_proofs_porep + crate: "storage-proofs-porep" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test: + name: test_storage_proofs_porep + crate: "storage-proofs-porep" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + + - test_blst: + name: test_blst_fil_proofs_tooling + crate: "fil-proofs-tooling" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test: + name: test_fil_proofs_tooling + crate: "fil-proofs-tooling" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test: + name: test_sha2raw + crate: "sha2raw" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + + - test_blst: + name: test_blst_phase2 + crate: "filecoin-phase2" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test: + name: test_phase2 + crate: "filecoin-phase2" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test_blst: + name: test_blst_filecoin_hashers + crate: "filecoin-hashers" + features: "blst,gpu,poseidon,sha256,blake2s" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test: + name: test_filecoin_hashers + crate: "filecoin-hashers" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + + - test_blst: + name: test_blst_fil_proofs_param + crate: "fil-proofs-param" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + + - test: + name: test_fil_proofs_param + crate: "fil-proofs-param" + requires: + - cargo_fetch + - ensure_groth_parameters_and_keys_linux + - bench: requires: - cargo_fetch diff --git a/fil-proofs-tooling/Cargo.toml b/fil-proofs-tooling/Cargo.toml index a92b1535c..b57e72ec0 100644 --- a/fil-proofs-tooling/Cargo.toml +++ b/fil-proofs-tooling/Cargo.toml @@ -51,7 +51,7 @@ structopt = "0.3.12" humansize = "1.1.0" [features] -default = ["gpu", "measurements"] +default = ["gpu", "measurements", "pairing"] gpu = ["storage-proofs/gpu", "filecoin-proofs/gpu", "bellperson/gpu", "filecoin-hashers/gpu"] measurements = ["storage-proofs/measurements"] profile = ["storage-proofs/profile", "measurements"] diff --git a/filecoin-proofs/tests/api.rs b/filecoin-proofs/tests/api.rs index 2abd900e6..f7b447653 100644 --- a/filecoin-proofs/tests/api.rs +++ b/filecoin-proofs/tests/api.rs @@ -144,7 +144,7 @@ fn seal_lifecycle( } fn get_layer_file_paths(cache_dir: &tempfile::TempDir) -> Vec { - fs::read_dir(&cache_dir) + let mut list: Vec<_> = fs::read_dir(&cache_dir) .expect("failed to read read directory ") .filter_map(|entry| { let cur = entry.expect("reading directory failed"); @@ -156,7 +156,9 @@ fn get_layer_file_paths(cache_dir: &tempfile::TempDir) -> Vec { None } }) - .collect() + .collect(); + list.sort(); + list } fn clear_cache_dir_keep_data_layer(cache_dir: &tempfile::TempDir) { diff --git a/phase2/tests/large.rs b/phase2/tests/large.rs index 69700af70..aede58fc4 100644 --- a/phase2/tests/large.rs +++ b/phase2/tests/large.rs @@ -13,14 +13,6 @@ use mimc::{mimc as mimc_hash, MiMCDemo, MIMC_ROUNDS}; #[test] #[ignore] fn test_large_params() { - //test_large_params_inner() -} - -// This test is marked as ignore because we haven't checked-in the phase1 file required for this -// test to pass when run via CI. To run this test you must have the correct phase1 params file in -// the top level directory of this crate. -#[allow(dead_code)] -fn test_large_params_inner() { assert!( Path::new("./phase1radix2m10").exists(), "the phase1 file `phase1radix2m10` must be in the crate's top level directory" diff --git a/phase2/tests/small.rs b/phase2/tests/small.rs index 3eb68f1da..71ae1cf8d 100644 --- a/phase2/tests/small.rs +++ b/phase2/tests/small.rs @@ -19,14 +19,6 @@ use mimc::{mimc as mimc_hash, MiMCDemo, MIMC_ROUNDS}; #[test] #[ignore] fn test_mimc_small_params() { - //test_mimc_small_params_inner() -} - -// This test is marked as ignore because we haven't checked-in the phase1 file required for this -// test to pass when run via CI. To run this test you must have the correct phase1 params file in -// the top level directory of this crate. -#[allow(dead_code)] -fn test_mimc_small_params_inner() { assert!( Path::new("./phase1radix2m10").exists(), "the phase1 file `phase1radix2m10` must be in the crate's top level directory" diff --git a/storage-proofs/core/src/fr32.rs b/storage-proofs/core/src/fr32.rs index a9481eba1..d91d98aa3 100644 --- a/storage-proofs/core/src/fr32.rs +++ b/storage-proofs/core/src/fr32.rs @@ -1,9 +1,8 @@ use crate::error::*; -use anyhow::{ensure, Context}; +use anyhow::ensure; use bellperson::bls::{Fr, FrRepr}; use byteorder::{ByteOrder, LittleEndian, WriteBytesExt}; -use ff::{PrimeField, PrimeFieldRepr}; // Contains 32 bytes whose little-endian value represents an Fr. // Invariants: @@ -27,6 +26,9 @@ pub type Fr32Ary = [u8; 32]; // Otherwise, returns a BadFrBytesError. #[cfg(feature = "pairing")] pub fn bytes_into_fr(bytes: &[u8]) -> Result { + use anyhow::Context; + use ff::{PrimeField, PrimeFieldRepr}; + ensure!(bytes.len() == 32, Error::BadFrBytes); let mut fr_repr = <::Repr as Default>::default(); @@ -76,6 +78,8 @@ pub fn bytes_into_fr_repr_safe(r: &[u8]) -> FrRepr { // Takes an Fr and returns a vector of exactly 32 bytes guaranteed to contain a valid Fr. #[cfg(feature = "pairing")] pub fn fr_into_bytes(fr: &Fr) -> Fr32Vec { + use ff::{PrimeField, PrimeFieldRepr}; + let mut out = Vec::with_capacity(32); fr.into_repr().write_le(&mut out).expect("write_le failure"); out @@ -83,8 +87,6 @@ pub fn fr_into_bytes(fr: &Fr) -> Fr32Vec { #[cfg(feature = "blst")] pub fn fr_into_bytes(fr: &Fr) -> Fr32Vec { - use std::convert::TryInto; - fr.to_bytes_le().to_vec() } diff --git a/storage-proofs/core/src/parameter_cache.rs b/storage-proofs/core/src/parameter_cache.rs index 603ef1a11..1f2955d5e 100644 --- a/storage-proofs/core/src/parameter_cache.rs +++ b/storage-proofs/core/src/parameter_cache.rs @@ -14,7 +14,7 @@ use sha2::{Digest, Sha256}; use std::collections::{BTreeMap, HashSet}; use std::fs::{self, create_dir_all, File}; -use std::io::{self, SeekFrom}; +use std::io::{self, SeekFrom, Write}; use std::path::{Path, PathBuf}; use std::sync::Mutex; @@ -80,7 +80,7 @@ pub fn get_verifying_key_data(cache_id: &str) -> Option<&ParameterData> { impl LockedFile { pub fn open_exclusive_read>(p: P) -> io::Result { - let f = fs::OpenOptions::new().read(true).open(p)?; + let f = fs::OpenOptions::new().read(true).create(false).open(p)?; f.lock_exclusive()?; Ok(LockedFile(f)) @@ -90,7 +90,7 @@ impl LockedFile { let f = fs::OpenOptions::new() .read(true) .write(true) - .create(true) + .create_new(true) .open(p)?; f.lock_exclusive()?; @@ -98,7 +98,7 @@ impl LockedFile { } pub fn open_shared_read>(p: P) -> io::Result { - let f = fs::OpenOptions::new().read(true).open(p)?; + let f = fs::OpenOptions::new().read(true).create(false).open(p)?; f.lock_shared()?; Ok(LockedFile(f)) @@ -236,6 +236,7 @@ where let meta_path = ensure_ancestor_dirs_exist(parameter_cache_metadata_path(&id))?; read_cached_metadata(&meta_path) .or_else(|_| write_cached_metadata(&meta_path, Self::cache_meta(pub_params))) + .map_err(Into::into) } /// If the rng option argument is set, parameters will be @@ -277,9 +278,16 @@ where read_cached_params(&cache_path).or_else(|err| match err.downcast::() { Ok(error @ Error::InvalidParameters(_)) => Err(error.into()), _ => { - write_cached_params(&cache_path, generate()?).unwrap_or_else(|e| { - panic!("{}: failed to write generated parameters to cache", e) - }); + // if the file already exists, another process is already trying to generate these. + if !cache_path.exists() { + match write_cached_params(&cache_path, generate()?) { + Ok(_) => {} + Err(e) if e.kind() == io::ErrorKind::AlreadyExists => { + // other thread just wrote it, do nothing + } + Err(e) => panic!("{}: failed to write generated parameters to cache", e), + } + } Ok(read_cached_params(&cache_path)?) } }) @@ -305,12 +313,14 @@ where // generate (or load) verifying key let cache_path = ensure_ancestor_dirs_exist(parameter_cache_verifying_key_path(&id))?; - read_cached_verifying_key(&cache_path) - .or_else(|_| write_cached_verifying_key(&cache_path, generate()?)) + match read_cached_verifying_key(&cache_path) { + Ok(key) => Ok(key), + Err(_) => write_cached_verifying_key(&cache_path, generate()?).map_err(Into::into), + } } } -fn ensure_parent(path: &PathBuf) -> Result<()> { +fn ensure_parent(path: &PathBuf) -> io::Result<()> { match path.parent() { Some(dir) => { create_dir_all(dir)?; @@ -348,11 +358,15 @@ pub fn read_cached_params(cache_entry_path: &PathBuf) -> Result( + cache_entry_path, + |mut file| { + let mut hasher = Blake2bParams::new().to_state(); + io::copy(&mut file, &mut hasher) + .expect("copying file into hasher failed"); + Ok(hasher.finalize()) + }, + )?; info!("generated consistency digest for parameters"); // The hash in the parameters file is truncated to 256 bits. @@ -377,16 +391,19 @@ pub fn read_cached_params(cache_entry_path: &PathBuf) -> Result(cache_entry_path, |_file| { let mapped_params = Parameters::build_mapped_parameters(cache_entry_path.to_path_buf(), false)?; info!("read parameters from cache {:?} ", cache_entry_path); Ok(mapped_params) }) + .map_err(Into::into) } -fn read_cached_verifying_key(cache_entry_path: &PathBuf) -> Result> { +fn read_cached_verifying_key( + cache_entry_path: &PathBuf, +) -> io::Result> { info!( "checking cache_path: {:?} for verifying key", cache_entry_path @@ -399,7 +416,7 @@ fn read_cached_verifying_key(cache_entry_path: &PathBuf) -> Result Result { +fn read_cached_metadata(cache_entry_path: &PathBuf) -> io::Result { info!("checking cache_path: {:?} for metadata", cache_entry_path); with_exclusive_read_lock(cache_entry_path, |file| { let value = serde_json::from_reader(file)?; @@ -412,7 +429,7 @@ fn read_cached_metadata(cache_entry_path: &PathBuf) -> Result Result { +) -> io::Result { with_exclusive_lock(cache_entry_path, |file| { serde_json::to_writer(file, &value)?; info!("wrote metadata to cache {:?} ", cache_entry_path); @@ -424,9 +441,10 @@ fn write_cached_metadata( fn write_cached_verifying_key( cache_entry_path: &PathBuf, value: groth16::VerifyingKey, -) -> Result> { - with_exclusive_lock(cache_entry_path, |file| { - value.write(file)?; +) -> io::Result> { + with_exclusive_lock(cache_entry_path, |mut file| { + value.write(&mut file)?; + file.flush()?; info!("wrote verifying key to cache {:?} ", cache_entry_path); Ok(value) @@ -436,34 +454,42 @@ fn write_cached_verifying_key( fn write_cached_params( cache_entry_path: &PathBuf, value: groth16::Parameters, -) -> Result> { - with_exclusive_lock(cache_entry_path, |file| { - value.write(file)?; +) -> io::Result> { + with_exclusive_lock(cache_entry_path, |mut file| { + value.write(&mut file)?; + file.flush()?; info!("wrote groth parameters to cache {:?} ", cache_entry_path); Ok(value) }) } -pub fn with_exclusive_lock( - file_path: &PathBuf, - f: impl FnOnce(&mut LockedFile) -> Result, -) -> Result { +pub fn with_exclusive_lock(file_path: &PathBuf, f: F) -> std::result::Result +where + F: FnOnce(&mut LockedFile) -> std::result::Result, + E: From, +{ with_open_file(file_path, LockedFile::open_exclusive, f) } -pub fn with_exclusive_read_lock( - file_path: &PathBuf, - f: impl FnOnce(&mut LockedFile) -> Result, -) -> Result { +pub fn with_exclusive_read_lock(file_path: &PathBuf, f: F) -> std::result::Result +where + F: FnOnce(&mut LockedFile) -> std::result::Result, + E: From, +{ with_open_file(file_path, LockedFile::open_exclusive_read, f) } -pub fn with_open_file<'a, T>( +pub fn with_open_file<'a, T, E, F, G>( file_path: &'a PathBuf, - open_file: impl FnOnce(&'a PathBuf) -> io::Result, - f: impl FnOnce(&mut LockedFile) -> Result, -) -> Result { + open_file: G, + f: F, +) -> std::result::Result +where + F: FnOnce(&mut LockedFile) -> std::result::Result, + G: FnOnce(&'a PathBuf) -> io::Result, + E: From, +{ ensure_parent(&file_path)?; f(&mut open_file(&file_path)?) } diff --git a/storage-proofs/porep/src/stacked/vanilla/cache.rs b/storage-proofs/porep/src/stacked/vanilla/cache.rs index 80da4d6e3..fac5a3b01 100644 --- a/storage-proofs/porep/src/stacked/vanilla/cache.rs +++ b/storage-proofs/porep/src/stacked/vanilla/cache.rs @@ -157,9 +157,21 @@ impl ParentCache { { let path = cache_path(cache_entries, graph); if path.exists() { - Self::open(len, cache_entries, graph, path) + Self::open(len, cache_entries, graph, &path) } else { - Self::generate(len, cache_entries, graph, path) + match Self::generate(len, cache_entries, graph, &path) { + Ok(c) => Ok(c), + Err(err) => { + match err.downcast::() { + Ok(error) if error.kind() == std::io::ErrorKind::AlreadyExists => { + // cache was written from another process, just read it + Self::open(len, cache_entries, graph, &path) + } + Ok(error) => Err(error.into()), + Err(error) => Err(error), + } + } + } } } @@ -171,7 +183,7 @@ impl ParentCache { len: u32, cache_entries: u32, graph: &StackedGraph, - path: PathBuf, + path: &PathBuf, ) -> Result where H: Hasher, @@ -247,8 +259,10 @@ impl ParentCache { "[!!!] Parent cache digest mismatch detected. Regenerating {}", path.display() ); + // delete invalid cache + std::fs::remove_file(path)?; ensure!( - Self::generate(len, graph.size() as u32, graph, path.clone()).is_ok(), + Self::generate(len, graph.size() as u32, graph, path).is_ok(), "Failed to generate parent cache" ); @@ -260,7 +274,7 @@ impl ParentCache { Ok(ParentCache { cache: CacheData::open(0, len, &path)?, - path, + path: path.clone(), num_cache_entries: cache_entries, sector_size: graph.size() * NODE_SIZE, digest: digest_hex, @@ -272,7 +286,7 @@ impl ParentCache { len: u32, cache_entries: u32, graph: &StackedGraph, - path: PathBuf, + path: &PathBuf, ) -> Result where H: Hasher, @@ -343,7 +357,7 @@ impl ParentCache { Ok(ParentCache { cache: CacheData::open(0, len, &path)?, - path, + path: path.clone(), num_cache_entries: cache_entries, sector_size, digest: digest_hex, diff --git a/storage-proofs/porep/src/stacked/vanilla/proof.rs b/storage-proofs/porep/src/stacked/vanilla/proof.rs index 67475652b..3d4c833c0 100644 --- a/storage-proofs/porep/src/stacked/vanilla/proof.rs +++ b/storage-proofs/porep/src/stacked/vanilla/proof.rs @@ -443,11 +443,12 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr // This channel will receive batches of columns and add them to the ColumnTreeBuilder. let (builder_tx, builder_rx) = mpsc::sync_channel(0); - // This channel will receive the finished tree data to be written to disk. - let (writer_tx, writer_rx) = mpsc::sync_channel::<(Vec, Vec)>(0); let config_count = configs.len(); // Don't move config into closure below. rayon::scope(|s| { + // This channel will receive the finished tree data to be written to disk. + let (writer_tx, writer_rx) = mpsc::sync_channel::<(Vec, Vec)>(0); + s.spawn(move |_| { for i in 0..config_count { let mut node_index = 0; @@ -560,64 +561,68 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr .expect("failed to send base_data, tree_data"); } }); - }); - for config in &configs { - let (base_data, tree_data) = writer_rx.recv()?; - let tree_len = base_data.len() + tree_data.len(); - - assert_eq!(base_data.len(), nodes_count); - assert_eq!(tree_len, config.size.expect("config size failure")); - - // Persist the base and tree data to disk based using the current store config. - let tree_c_store = DiskStore::<::Domain>::new_with_config( - tree_len, - Tree::Arity::to_usize(), - config.clone(), - ) - .expect("failed to create DiskStore for base tree data"); - - let store = Arc::new(RwLock::new(tree_c_store)); - let batch_size = std::cmp::min(base_data.len(), column_write_batch_size); - let flatten_and_write_store = |data: &Vec, offset| { - data.into_par_iter() - .chunks(column_write_batch_size) - .enumerate() - .try_for_each(|(index, fr_elements)| { - let mut buf = Vec::with_capacity(batch_size * NODE_SIZE); - - for fr in fr_elements { - buf.extend(fr_into_bytes(&fr)); - } - store - .write() - .expect("failed to access store for write") - .copy_from_slice(&buf[..], offset + (batch_size * index)) - }) - }; + for config in &configs { + let (base_data, tree_data) = writer_rx + .recv() + .expect("failed to receive base_data, tree_data for tree_c"); + let tree_len = base_data.len() + tree_data.len(); - trace!( - "flattening tree_c base data of {} nodes using batch size {}", - base_data.len(), - batch_size - ); - flatten_and_write_store(&base_data, 0).expect("failed to flatten and write store"); - trace!("done flattening tree_c base data"); - - let base_offset = base_data.len(); - trace!("flattening tree_c tree data of {} nodes using batch size {} and base offset {}", tree_data.len(), batch_size, base_offset); - flatten_and_write_store(&tree_data, base_offset) - .expect("failed to flatten and write store"); - trace!("done flattening tree_c tree data"); - - trace!("writing tree_c store data"); - store - .write() - .expect("failed to access store for sync") - .sync() - .expect("store sync failure"); - trace!("done writing tree_c store data"); - } + assert_eq!(base_data.len(), nodes_count); + assert_eq!(tree_len, config.size.expect("config size failure")); + + // Persist the base and tree data to disk based using the current store config. + let tree_c_store = + DiskStore::<::Domain>::new_with_config( + tree_len, + Tree::Arity::to_usize(), + config.clone(), + ) + .expect("failed to create DiskStore for base tree data"); + + let store = Arc::new(RwLock::new(tree_c_store)); + let batch_size = std::cmp::min(base_data.len(), column_write_batch_size); + let flatten_and_write_store = |data: &Vec, offset| { + data.into_par_iter() + .chunks(column_write_batch_size) + .enumerate() + .try_for_each(|(index, fr_elements)| { + let mut buf = Vec::with_capacity(batch_size * NODE_SIZE); + + for fr in fr_elements { + buf.extend(fr_into_bytes(&fr)); + } + store + .write() + .expect("failed to access store for write") + .copy_from_slice(&buf[..], offset + (batch_size * index)) + }) + }; + + trace!( + "flattening tree_c base data of {} nodes using batch size {}", + base_data.len(), + batch_size + ); + flatten_and_write_store(&base_data, 0) + .expect("failed to flatten and write store"); + trace!("done flattening tree_c base data"); + + let base_offset = base_data.len(); + trace!("flattening tree_c tree data of {} nodes using batch size {} and base offset {}", tree_data.len(), batch_size, base_offset); + flatten_and_write_store(&tree_data, base_offset) + .expect("failed to flatten and write store"); + trace!("done flattening tree_c tree data"); + + trace!("writing tree_c store data"); + store + .write() + .expect("failed to access store for sync") + .sync() + .expect("store sync failure"); + trace!("done writing tree_c store data"); + } + }); create_disk_tree::< DiskTree, @@ -721,12 +726,13 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr // This channel will receive batches of leaf nodes and add them to the TreeBuilder. let (builder_tx, builder_rx) = mpsc::sync_channel::<(Vec, bool)>(0); - // This channel will receive the finished tree data to be written to disk. - let (writer_tx, writer_rx) = mpsc::sync_channel::>(0); let config_count = configs.len(); // Don't move config into closure below. let configs = &configs; let tree_r_last_config = &tree_r_last_config; rayon::scope(|s| { + // This channel will receive the finished tree data to be written to disk. + let (writer_tx, writer_rx) = mpsc::sync_channel::>(0); + s.spawn(move |_| { for i in 0..config_count { let mut node_index = 0; @@ -822,45 +828,47 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr writer_tx.send(tree_data).expect("failed to send tree_data"); } }); - }); - for config in configs.iter() { - let tree_data = writer_rx.recv()?; + for config in configs.iter() { + let tree_data = writer_rx + .recv() + .expect("failed to receive tree_data for tree_r_last"); - let tree_data_len = tree_data.len(); - let cache_size = get_merkle_tree_cache_size( - get_merkle_tree_leafs( - config.size.expect("config size failure"), + let tree_data_len = tree_data.len(); + let cache_size = get_merkle_tree_cache_size( + get_merkle_tree_leafs( + config.size.expect("config size failure"), + Tree::Arity::to_usize(), + ) + .expect("failed to get merkle tree leaves"), Tree::Arity::to_usize(), + config.rows_to_discard, ) - .expect("failed to get merkle tree leaves"), - Tree::Arity::to_usize(), - config.rows_to_discard, - ) - .expect("failed to get merkle tree cache size"); - assert_eq!(tree_data_len, cache_size); - - let flat_tree_data: Vec<_> = tree_data - .into_par_iter() - .flat_map(|el| fr_into_bytes(&el)) - .collect(); - - // Persist the data to the store based on the current config. - let tree_r_last_path = StoreConfig::data_path(&config.path, &config.id); - trace!( - "persisting tree r of len {} with {} rows to discard at path {:?}", - tree_data_len, - config.rows_to_discard, - tree_r_last_path - ); - let mut f = OpenOptions::new() - .create(true) - .write(true) - .open(&tree_r_last_path) - .expect("failed to open file for tree_r_last"); - f.write_all(&flat_tree_data) - .expect("failed to wrote tree_r_last data"); - } + .expect("failed to get merkle tree cache size"); + assert_eq!(tree_data_len, cache_size); + + let flat_tree_data: Vec<_> = tree_data + .into_par_iter() + .flat_map(|el| fr_into_bytes(&el)) + .collect(); + + // Persist the data to the store based on the current config. + let tree_r_last_path = StoreConfig::data_path(&config.path, &config.id); + trace!( + "persisting tree r of len {} with {} rows to discard at path {:?}", + tree_data_len, + config.rows_to_discard, + tree_r_last_path + ); + let mut f = OpenOptions::new() + .create(true) + .write(true) + .open(&tree_r_last_path) + .expect("failed to open file for tree_r_last"); + f.write_all(&flat_tree_data) + .expect("failed to wrote tree_r_last data"); + } + }); } else { info!("generating tree r last using the CPU"); let size = Store::len(last_layer_labels);