diff --git a/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs b/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs index b5af652d87..8783c787f9 100644 --- a/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs +++ b/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs @@ -30,7 +30,7 @@ use crate::{ proto::base_node::SyncBlocksRequest, tari_utilities::{hex::Hex, Hashable}, transactions::aggregated_body::AggregateBody, - validation::CandidateBlockBodyValidation, + validation::BlockSyncBodyValidation, }; use futures::StreamExt; use log::*; @@ -55,7 +55,7 @@ pub struct BlockSynchronizer { db: AsyncBlockchainDb, connectivity: ConnectivityRequester, sync_peer: Option, - block_validator: Arc>, + block_validator: Arc>, hooks: Hooks, } @@ -65,7 +65,7 @@ impl BlockSynchronizer { db: AsyncBlockchainDb, connectivity: ConnectivityRequester, sync_peer: Option, - block_validator: Arc>, + block_validator: Arc>, ) -> Self { Self { config, diff --git a/base_layer/core/src/base_node/sync/validators.rs b/base_layer/core/src/base_node/sync/validators.rs index e5282cc604..1e982e3de4 100644 --- a/base_layer/core/src/base_node/sync/validators.rs +++ b/base_layer/core/src/base_node/sync/validators.rs @@ -28,7 +28,7 @@ use crate::{ transactions::CryptoFactories, validation::{ block_validators::BlockValidator, - CandidateBlockBodyValidation, + BlockSyncBodyValidation, ChainBalanceValidator, FinalHorizonStateValidation, }, @@ -36,14 +36,14 @@ use crate::{ #[derive(Clone)] pub struct SyncValidators { - pub block_body: Arc>, + pub block_body: Arc>, pub final_horizon_state: Arc>, } impl SyncValidators { pub fn new(block_body: TBody, final_state: TFinal) -> Self where - TBody: CandidateBlockBodyValidation + 'static, + TBody: BlockSyncBodyValidation + 'static, TFinal: FinalHorizonStateValidation + 'static, { Self { diff --git a/base_layer/core/src/blocks/block.rs b/base_layer/core/src/blocks/block.rs index 04cded3ada..abd0a316b0 100644 --- a/base_layer/core/src/blocks/block.rs +++ b/base_layer/core/src/blocks/block.rs @@ -55,6 +55,8 @@ pub enum BlockValidationError { TransactionError(#[from] TransactionError), #[error("Invalid input in block")] InvalidInput, + #[error("Contains kernels or inputs that are not yet spendable")] + MaturityError, #[error("Mismatched MMR roots")] MismatchedMmrRoots, #[error("MMR size for {mmr_tree} does not match. Expected: {expected}, received: {actual}")] @@ -109,9 +111,12 @@ impl Block { Ok(()) } - /// Checks that all STXO rules (maturity etc) are followed - pub fn check_stxo_rules(&self) -> Result<(), BlockValidationError> { + /// Checks that all STXO rules (maturity etc) and kernel heights are followed + pub fn check_spend_rules(&self) -> Result<(), BlockValidationError> { self.body.check_stxo_rules(self.header.height)?; + if self.body.max_kernel_timelock() > self.header.height { + return Err(BlockValidationError::MaturityError); + } Ok(()) } diff --git a/base_layer/core/src/transactions/aggregated_body.rs b/base_layer/core/src/transactions/aggregated_body.rs index ac44b04b4d..67af0bd042 100644 --- a/base_layer/core/src/transactions/aggregated_body.rs +++ b/base_layer/core/src/transactions/aggregated_body.rs @@ -1,14 +1,3 @@ -use std::fmt::{Display, Error, Formatter}; - -use log::*; -use serde::{Deserialize, Serialize}; -use tari_crypto::{ - commitment::HomomorphicCommitmentFactory, - keys::PublicKey as PublicKeyTrait, - ristretto::pedersen::PedersenCommitment, - tari_utilities::hex::Hex, -}; - // Copyright 2019, The Tari Project // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the @@ -31,6 +20,12 @@ use tari_crypto::{ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use crate::transactions::{crypto_factories::CryptoFactories, fee::Fee, tari_amount::*, transaction::*}; +use log::*; +use serde::{Deserialize, Serialize}; +use std::{ + cmp::max, + fmt::{Display, Error, Formatter}, +}; use tari_common_types::types::{ BlindingFactor, Commitment, @@ -39,6 +34,12 @@ use tari_common_types::types::{ PublicKey, RangeProofService, }; +use tari_crypto::{ + commitment::HomomorphicCommitmentFactory, + keys::PublicKey as PublicKeyTrait, + ristretto::pedersen::PedersenCommitment, + tari_utilities::hex::Hex, +}; pub const LOG_TARGET: &str = "c::tx::aggregated_body"; @@ -450,6 +451,12 @@ impl AggregateBody { self.kernels.len() ) } + + pub fn max_kernel_timelock(&self) -> u64 { + self.kernels() + .iter() + .fold(0, |max_timelock, kernel| max(max_timelock, kernel.lock_height)) + } } /// This will strip away the offset of the transaction returning a pure aggregate body diff --git a/base_layer/core/src/transactions/transaction.rs b/base_layer/core/src/transactions/transaction.rs index ce3e2c8aec..cb1f1acb16 100644 --- a/base_layer/core/src/transactions/transaction.rs +++ b/base_layer/core/src/transactions/transaction.rs @@ -1156,10 +1156,7 @@ impl Transaction { /// Returns the maximum time lock of the kernels inside of the transaction pub fn max_kernel_timelock(&self) -> u64 { - self.body - .kernels() - .iter() - .fold(0, |max_timelock, kernel| max(max_timelock, kernel.lock_height)) + self.body.max_kernel_timelock() } /// Returns the height of the minimum height where the transaction is spendable. This is calculated from the diff --git a/base_layer/core/src/validation/block_validators.rs b/base_layer/core/src/validation/block_validators.rs index 3908c28a5f..d85fcb0e5c 100644 --- a/base_layer/core/src/validation/block_validators.rs +++ b/base_layer/core/src/validation/block_validators.rs @@ -1,13 +1,3 @@ -use std::marker::PhantomData; - -use log::*; -use tari_crypto::{ - commitment::HomomorphicCommitmentFactory, - tari_utilities::{hash::Hashable, hex::Hex}, -}; - -use tari_common_types::chain_metadata::ChainMetadata; - // Copyright 2019. The Tari Project // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the @@ -30,23 +20,32 @@ use tari_common_types::chain_metadata::ChainMetadata; // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use crate::{ - blocks::{Block, BlockValidationError}, - chain_storage, - chain_storage::{BlockchainBackend, ChainBlock, MmrTree}, + blocks::Block, + chain_storage::{BlockchainBackend, ChainBlock}, consensus::ConsensusManager, - transactions::{ - aggregated_body::AggregateBody, - transaction::{KernelFeatures, OutputFlags, TransactionError}, - CryptoFactories, - }, + transactions::CryptoFactories, validation::{ - helpers::{check_accounting_balance, check_block_weight, check_coinbase_output, is_all_unique_and_sorted}, + helpers::{ + check_accounting_balance, + check_block_weight, + check_coinbase_output, + check_inputs_are_utxos, + check_mmr_roots, + check_not_duplicate_txos, + check_sorting_and_duplicates, + }, traits::PostOrphanBodyValidation, - CandidateBlockBodyValidation, + BlockSyncBodyValidation, OrphanValidation, ValidationError, }, }; +use std::marker::PhantomData; + +use log::*; +use tari_crypto::tari_utilities::{hash::Hashable, hex::Hex}; + +use tari_common_types::chain_metadata::ChainMetadata; pub const LOG_TARGET: &str = "c::val::block_validators"; @@ -73,7 +72,6 @@ impl OrphanValidation for OrphanBlockValidator { /// 1. Is the block weight of the block under the prescribed limit? /// 1. Does it contain only unique inputs and outputs? /// 1. Where all the rules for the spent outputs followed? - /// 1. Was cut through applied in the block? /// 1. Is there precisely one Coinbase output and is it correctly defined with the correct amount? /// 1. Is the accounting correct? fn validate(&self, block: &Block) -> Result<(), ValidationError> { @@ -105,7 +103,7 @@ impl OrphanValidation for OrphanBlockValidator { ); // Check that the inputs are are allowed to be spent - block.check_stxo_rules()?; + block.check_spend_rules()?; trace!(target: LOG_TARGET, "SV - Output constraints are ok for {} ", &block_id); check_coinbase_output(block, &self.rules, &self.factories)?; trace!(target: LOG_TARGET, "SV - Coinbase output is ok for {} ", &block_id); @@ -158,8 +156,8 @@ impl PostOrphanBodyValidation for BodyOnlyValidator { } let block_id = format!("block #{} ({})", block.header().height, block.hash().to_hex()); - check_inputs_are_utxos(block.block(), backend)?; - check_not_duplicate_txos(block.block(), backend)?; + check_inputs_are_utxos(&block.block().body, backend)?; + check_not_duplicate_txos(&block.block().body, backend)?; trace!( target: LOG_TARGET, "Block validation: All inputs and outputs are valid for {}", @@ -176,149 +174,6 @@ impl PostOrphanBodyValidation for BodyOnlyValidator { } } -// This function checks for duplicate inputs and outputs. There should be no duplicate inputs or outputs in a block -fn check_sorting_and_duplicates(body: &AggregateBody) -> Result<(), ValidationError> { - if !is_all_unique_and_sorted(body.inputs()) { - return Err(ValidationError::UnsortedOrDuplicateInput); - } - if !is_all_unique_and_sorted(body.outputs()) { - return Err(ValidationError::UnsortedOrDuplicateOutput); - } - - Ok(()) -} - -/// This function checks that all inputs in the blocks are valid UTXO's to be spent -fn check_inputs_are_utxos(block: &Block, db: &B) -> Result<(), ValidationError> { - for input in block.body.inputs() { - if let Some(utxo_hash) = db.fetch_unspent_output_hash_by_commitment(&input.commitment)? { - // We know that the commitment exists in the UTXO set. Check that the output hash matches i.e. all fields - // (output features etc.) match - if utxo_hash == input.output_hash() { - continue; - } - - warn!( - target: LOG_TARGET, - "The input spends an unspent output but does not produce the same hash as the output it spends. {}", - input - ); - return Err(ValidationError::BlockError(BlockValidationError::InvalidInput)); - } - - // The input was not found in the UTXO/STXO set, lets check if the input spends an output in the current block - let output_hash = input.output_hash(); - if block.body.outputs().iter().any(|output| output.hash() == output_hash) { - continue; - } - - // The input does not spend a known UTXO - warn!( - target: LOG_TARGET, - "Block validation failed due an input that does not spend a known UTXO: {}", input - ); - return Err(ValidationError::BlockError(BlockValidationError::InvalidInput)); - } - - Ok(()) -} - -/// This function checks that the outputs do not already exist in the UTxO set. -fn check_not_duplicate_txos(block: &Block, db: &B) -> Result<(), ValidationError> { - for output in block.body.outputs() { - if let Some(index) = db.fetch_mmr_leaf_index(MmrTree::Utxo, &output.hash())? { - warn!( - target: LOG_TARGET, - "Block validation failed due to previously spent output: {} (MMR index = {})", output, index - ); - return Err(ValidationError::ContainsTxO); - } - if db - .fetch_unspent_output_hash_by_commitment(&output.commitment)? - .is_some() - { - warn!( - target: LOG_TARGET, - "Duplicate UTXO set commitment found for output: {}", output - ); - return Err(ValidationError::ContainsDuplicateUtxoCommitment); - } - } - Ok(()) -} - -fn check_mmr_roots(block: &Block, db: &B) -> Result<(), ValidationError> { - let mmr_roots = chain_storage::calculate_mmr_roots(db, &block)?; - let header = &block.header; - if header.input_mr != mmr_roots.input_mr { - warn!( - target: LOG_TARGET, - "Block header input merkle root in {} do not match calculated root. Expected: {}, Actual:{}", - block.hash().to_hex(), - header.input_mr.to_hex(), - mmr_roots.input_mr.to_hex() - ); - return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots)); - } - if header.kernel_mr != mmr_roots.kernel_mr { - warn!( - target: LOG_TARGET, - "Block header kernel MMR roots in {} do not match calculated roots. Expected: {}, Actual:{}", - block.hash().to_hex(), - header.kernel_mr.to_hex(), - mmr_roots.kernel_mr.to_hex() - ); - return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots)); - }; - if header.kernel_mmr_size != mmr_roots.kernel_mmr_size { - warn!( - target: LOG_TARGET, - "Block header kernel MMR size in {} does not match. Expected: {}, Actual:{}", - block.hash().to_hex(), - header.kernel_mmr_size, - mmr_roots.kernel_mmr_size - ); - return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrSize { - mmr_tree: MmrTree::Kernel, - expected: mmr_roots.kernel_mmr_size, - actual: header.kernel_mmr_size, - })); - } - if header.output_mr != mmr_roots.output_mr { - warn!( - target: LOG_TARGET, - "Block header output MMR roots in {} do not match calculated roots. Expected: {}, Actual:{}", - block.hash().to_hex(), - header.output_mr.to_hex(), - mmr_roots.output_mr.to_hex() - ); - return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots)); - }; - if header.witness_mr != mmr_roots.witness_mr { - warn!( - target: LOG_TARGET, - "Block header witness MMR roots in {} do not match calculated roots", - block.hash().to_hex() - ); - return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots)); - }; - if header.output_mmr_size != mmr_roots.output_mmr_size { - warn!( - target: LOG_TARGET, - "Block header output MMR size in {} does not match. Expected: {}, Actual:{}", - block.hash().to_hex(), - header.output_mmr_size, - mmr_roots.output_mmr_size - ); - return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrSize { - mmr_tree: MmrTree::Utxo, - expected: mmr_roots.output_mmr_size, - actual: header.output_mmr_size, - })); - } - Ok(()) -} - /// This validator checks whether a block satisfies consensus rules. /// It implements two validators: one for the `BlockHeader` and one for `Block`. The `Block` validator ONLY validates /// the block body using the header. It is assumed that the `BlockHeader` has already been validated. @@ -338,97 +193,9 @@ impl BlockValidator { phantom_data: Default::default(), } } - - /// This function checks that all inputs in the blocks are valid UTXO's to be spend - fn check_inputs(&self, block: &Block) -> Result<(), ValidationError> { - let inputs = block.body.inputs(); - for (i, input) in inputs.iter().enumerate() { - // Check for duplicates and/or incorrect sorting - if i > 0 && input <= &inputs[i - 1] { - return Err(ValidationError::UnsortedOrDuplicateInput); - } - - // Check maturity - if input.features.maturity > block.header.height { - warn!( - target: LOG_TARGET, - "Input found that has not yet matured to spending height: {}", input - ); - return Err(TransactionError::InputMaturity.into()); - } - } - Ok(()) - } - - fn check_outputs(&self, block: &Block) -> Result<(), ValidationError> { - let outputs = block.body.outputs(); - let mut coinbase_output = None; - for (j, output) in outputs.iter().enumerate() { - if output.features.flags.contains(OutputFlags::COINBASE_OUTPUT) { - if coinbase_output.is_some() { - return Err(ValidationError::TransactionError(TransactionError::MoreThanOneCoinbase)); - } - coinbase_output = Some(output); - } - - if j > 0 && output <= &outputs[j - 1] { - return Err(ValidationError::UnsortedOrDuplicateOutput); - } - } - - let coinbase_output = match coinbase_output { - Some(output) => output, - // No coinbase found - None => { - warn!( - target: LOG_TARGET, - "Block #{} failed to validate: no coinbase UTXO", block.header.height - ); - return Err(ValidationError::TransactionError(TransactionError::NoCoinbase)); - }, - }; - - let mut coinbase_kernel = None; - for kernel in block.body.kernels() { - if kernel.features.contains(KernelFeatures::COINBASE_KERNEL) { - if coinbase_kernel.is_some() { - return Err(ValidationError::TransactionError(TransactionError::MoreThanOneCoinbase)); - } - coinbase_kernel = Some(kernel); - } - } - - let coinbase_kernel = match coinbase_kernel { - Some(kernel) => kernel, - // No coinbase found - None => { - warn!( - target: LOG_TARGET, - "Block #{} failed to validate: no coinbase kernel", block.header.height - ); - return Err(ValidationError::TransactionError(TransactionError::NoCoinbase)); - }, - }; - - let reward = self.rules.calculate_coinbase_and_fees(block); - let rhs = &coinbase_kernel.excess + - &self - .factories - .commitment - .commit_value(&Default::default(), reward.into()); - if rhs != coinbase_output.commitment { - warn!( - target: LOG_TARGET, - "Coinbase {} amount validation failed", coinbase_output - ); - return Err(ValidationError::TransactionError(TransactionError::InvalidCoinbase)); - } - - Ok(()) - } } -impl CandidateBlockBodyValidation for BlockValidator { +impl BlockSyncBodyValidation for BlockValidator { /// The following consensus checks are done: /// 1. Does the block satisfy the stateless checks? /// 1. Are the block header MMR roots valid? @@ -439,9 +206,19 @@ impl CandidateBlockBodyValidation for BlockValidator let constants = self.rules.consensus_constants(block.header.height); check_block_weight(block, &constants)?; trace!(target: LOG_TARGET, "SV - Block weight is ok for {} ", &block_id); + // Check that the inputs are are allowed to be spent + block.check_spend_rules()?; + trace!(target: LOG_TARGET, "SV - Output constraints are ok for {} ", &block_id); - self.check_inputs(block)?; - self.check_outputs(block)?; + check_sorting_and_duplicates(&block.body)?; + check_inputs_are_utxos(&block.body, backend)?; + check_not_duplicate_txos(&block.body, backend)?; + check_coinbase_output(block, &self.rules, &self.factories)?; + trace!( + target: LOG_TARGET, + "Block validation: All inputs and outputs are valid for {}", + block_id + ); check_accounting_balance( block, diff --git a/base_layer/core/src/validation/helpers.rs b/base_layer/core/src/validation/helpers.rs index f0d1947b1e..0b2a067eb0 100644 --- a/base_layer/core/src/validation/helpers.rs +++ b/base_layer/core/src/validation/helpers.rs @@ -20,6 +20,7 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +use crate::transactions::aggregated_body::AggregateBody; use log::*; use tari_crypto::tari_utilities::{epoch_time::EpochTime, hash::Hashable, hex::Hex}; @@ -29,7 +30,8 @@ use crate::{ Block, BlockValidationError, }, - chain_storage::BlockchainBackend, + chain_storage, + chain_storage::{BlockchainBackend, MmrTree}, consensus::{ConsensusConstants, ConsensusManager}, proof_of_work::{ monero_difficulty, @@ -222,7 +224,7 @@ pub fn check_accounting_balance( .map_err(|err| { warn!( target: LOG_TARGET, - "Internal validation failed on block:{}:{}", + "Validation failed on block:{}:{}", block.hash().to_hex(), err ); @@ -245,7 +247,7 @@ pub fn check_coinbase_output( .map_err(ValidationError::from) } -pub fn is_all_unique_and_sorted, T: PartialOrd>(items: I) -> bool { +fn is_all_unique_and_sorted, T: PartialOrd>(items: I) -> bool { let items = items.as_ref(); if items.is_empty() { return true; @@ -262,76 +264,219 @@ pub fn is_all_unique_and_sorted, T: PartialOrd>(items: I) -> bool true } -#[cfg(test)] -mod test { - use super::*; +// This function checks for duplicate inputs and outputs. There should be no duplicate inputs or outputs in a block +pub fn check_sorting_and_duplicates(body: &AggregateBody) -> Result<(), ValidationError> { + if !is_all_unique_and_sorted(body.inputs()) { + return Err(ValidationError::UnsortedOrDuplicateInput); + } + if !is_all_unique_and_sorted(body.outputs()) { + return Err(ValidationError::UnsortedOrDuplicateOutput); + } - #[cfg(test)] - mod is_all_unique_and_sorted { - use super::*; + Ok(()) +} - #[test] - fn it_returns_true_when_nothing_to_compare() { - assert!(is_all_unique_and_sorted::<_, usize>(&[])); - assert!(is_all_unique_and_sorted(&[1])); +/// This function checks that all inputs in the blocks are valid UTXO's to be spent +pub fn check_inputs_are_utxos(body: &AggregateBody, db: &B) -> Result<(), ValidationError> { + let mut not_found_input = Vec::new(); + for input in body.inputs() { + let output_hash = input.output_hash(); + if let Some(utxo_hash) = db.fetch_unspent_output_hash_by_commitment(&input.commitment)? { + // We know that the commitment exists in the UTXO set. Check that the output hash matches (i.e. all fields + // like output features match) + if utxo_hash == output_hash { + continue; + } + + warn!( + target: LOG_TARGET, + "Input spends a UTXO but does not produce the same hash as the output it spends: + {}", + input + ); + return Err(ValidationError::BlockError(BlockValidationError::InvalidInput)); } - #[test] - fn it_returns_true_when_unique_and_sorted() { - let v = [1, 2, 3, 4, 5]; - assert!(is_all_unique_and_sorted(&v)); + + // Wallet needs to know if a transaction has already been mined and uses this error variant to do so. + if db.fetch_output(&output_hash)?.is_some() { + warn!( + target: LOG_TARGET, + "Validation failed due to already spent input: {}", input + ); + // We know that the output here must be spent because `fetch_unspent_output_hash_by_commitment` would have + // been Some + return Err(ValidationError::ContainsSTxO); } - #[test] - fn it_returns_false_when_unsorted() { - let v = [2, 1, 3, 4, 5]; - assert!(!is_all_unique_and_sorted(&v)); + if body.outputs().iter().any(|output| output.hash() == output_hash) { + continue; } - #[test] - fn it_returns_false_when_duplicate() { - let v = [1, 2, 3, 4, 4]; - assert!(!is_all_unique_and_sorted(&v)); + + warn!( + target: LOG_TARGET, + "Validation failed due to input: {} which does not exist yet", input + ); + not_found_input.push(output_hash); + } + if !not_found_input.is_empty() { + return Err(ValidationError::UnknownInputs(not_found_input)); + } + + Ok(()) +} + +/// This function checks that the outputs do not already exist in the UTxO set. +pub fn check_not_duplicate_txos(body: &AggregateBody, db: &B) -> Result<(), ValidationError> { + for output in body.outputs() { + if let Some(index) = db.fetch_mmr_leaf_index(MmrTree::Utxo, &output.hash())? { + warn!( + target: LOG_TARGET, + "Validation failed due to previously spent output: {} (MMR index = {})", output, index + ); + return Err(ValidationError::ContainsTxO); } - #[test] - fn it_returns_false_when_duplicate_and_unsorted() { - let v = [4, 2, 3, 0, 4]; - assert!(!is_all_unique_and_sorted(&v)); + if db + .fetch_unspent_output_hash_by_commitment(&output.commitment)? + .is_some() + { + warn!( + target: LOG_TARGET, + "Duplicate UTXO set commitment found for output: {}", output + ); + return Err(ValidationError::ContainsDuplicateUtxoCommitment); } } + Ok(()) +} + +pub fn check_mmr_roots(block: &Block, db: &B) -> Result<(), ValidationError> { + let mmr_roots = chain_storage::calculate_mmr_roots(db, &block)?; + let header = &block.header; + if header.input_mr != mmr_roots.input_mr { + warn!( + target: LOG_TARGET, + "Block header input merkle root in {} do not match calculated root. Expected: {}, Actual:{}", + block.hash().to_hex(), + header.input_mr.to_hex(), + mmr_roots.input_mr.to_hex() + ); + return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots)); + } + if header.kernel_mr != mmr_roots.kernel_mr { + warn!( + target: LOG_TARGET, + "Block header kernel MMR roots in {} do not match calculated roots. Expected: {}, Actual:{}", + block.hash().to_hex(), + header.kernel_mr.to_hex(), + mmr_roots.kernel_mr.to_hex() + ); + return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots)); + }; + if header.kernel_mmr_size != mmr_roots.kernel_mmr_size { + warn!( + target: LOG_TARGET, + "Block header kernel MMR size in {} does not match. Expected: {}, Actual:{}", + block.hash().to_hex(), + header.kernel_mmr_size, + mmr_roots.kernel_mmr_size + ); + return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrSize { + mmr_tree: MmrTree::Kernel, + expected: mmr_roots.kernel_mmr_size, + actual: header.kernel_mmr_size, + })); + } + if header.output_mr != mmr_roots.output_mr { + warn!( + target: LOG_TARGET, + "Block header output MMR roots in {} do not match calculated roots. Expected: {}, Actual:{}", + block.hash().to_hex(), + header.output_mr.to_hex(), + mmr_roots.output_mr.to_hex() + ); + return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots)); + }; + if header.witness_mr != mmr_roots.witness_mr { + warn!( + target: LOG_TARGET, + "Block header witness MMR roots in {} do not match calculated roots", + block.hash().to_hex() + ); + return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots)); + }; + if header.output_mmr_size != mmr_roots.output_mmr_size { + warn!( + target: LOG_TARGET, + "Block header output MMR size in {} does not match. Expected: {}, Actual:{}", + block.hash().to_hex(), + header.output_mmr_size, + mmr_roots.output_mmr_size + ); + return Err(ValidationError::BlockError(BlockValidationError::MismatchedMmrSize { + mmr_tree: MmrTree::Utxo, + expected: mmr_roots.output_mmr_size, + actual: header.output_mmr_size, + })); + } + Ok(()) +} + +#[cfg(test)] +mod test { + use super::*; - #[cfg(test)] - #[allow(clippy::module_inception)] - mod test { - use super::*; + #[test] + fn it_returns_true_when_nothing_to_compare() { + assert!(is_all_unique_and_sorted::<_, usize>(&[])); + assert!(is_all_unique_and_sorted(&[1])); + } + #[test] + fn it_returns_true_when_unique_and_sorted() { + let v = [1, 2, 3, 4, 5]; + assert!(is_all_unique_and_sorted(&v)); + } - mod check_median_timestamp { - use super::*; + #[test] + fn it_returns_false_when_unsorted() { + let v = [2, 1, 3, 4, 5]; + assert!(!is_all_unique_and_sorted(&v)); + } + #[test] + fn it_returns_false_when_duplicate() { + let v = [1, 2, 3, 4, 4]; + assert!(!is_all_unique_and_sorted(&v)); + } + #[test] + fn it_returns_false_when_duplicate_and_unsorted() { + let v = [4, 2, 3, 0, 4]; + assert!(!is_all_unique_and_sorted(&v)); + } - #[test] - #[should_panic] - fn it_panics_if_empty() { - calc_median_timestamp(&[]); - } + // #[allow(clippy::module_inception)] + #[test] + #[should_panic] + fn it_panics_if_empty() { + calc_median_timestamp(&[]); + } - #[test] - fn it_calculates_the_correct_median_timestamp() { - let median_timestamp = calc_median_timestamp(&[0.into()]); - assert_eq!(median_timestamp, 0.into()); + #[test] + fn it_calculates_the_correct_median_timestamp() { + let median_timestamp = calc_median_timestamp(&[0.into()]); + assert_eq!(median_timestamp, 0.into()); - let median_timestamp = calc_median_timestamp(&[123.into()]); - assert_eq!(median_timestamp, 123.into()); + let median_timestamp = calc_median_timestamp(&[123.into()]); + assert_eq!(median_timestamp, 123.into()); - let median_timestamp = calc_median_timestamp(&[2.into(), 4.into()]); - assert_eq!(median_timestamp, 3.into()); + let median_timestamp = calc_median_timestamp(&[2.into(), 4.into()]); + assert_eq!(median_timestamp, 3.into()); - let median_timestamp = calc_median_timestamp(&[0.into(), 100.into(), 0.into()]); - assert_eq!(median_timestamp, 100.into()); + let median_timestamp = calc_median_timestamp(&[0.into(), 100.into(), 0.into()]); + assert_eq!(median_timestamp, 100.into()); - let median_timestamp = calc_median_timestamp(&[1.into(), 2.into(), 3.into(), 4.into()]); - assert_eq!(median_timestamp, 2.into()); + let median_timestamp = calc_median_timestamp(&[1.into(), 2.into(), 3.into(), 4.into()]); + assert_eq!(median_timestamp, 2.into()); - let median_timestamp = calc_median_timestamp(&[1.into(), 2.into(), 3.into(), 4.into(), 5.into()]); - assert_eq!(median_timestamp, 3.into()); - } - } + let median_timestamp = calc_median_timestamp(&[1.into(), 2.into(), 3.into(), 4.into(), 5.into()]); + assert_eq!(median_timestamp, 3.into()); } } diff --git a/base_layer/core/src/validation/mocks.rs b/base_layer/core/src/validation/mocks.rs index c2b3ffe5cf..a1060736c6 100644 --- a/base_layer/core/src/validation/mocks.rs +++ b/base_layer/core/src/validation/mocks.rs @@ -27,7 +27,7 @@ use crate::{ transactions::transaction::Transaction, validation::{ error::ValidationError, - CandidateBlockBodyValidation, + BlockSyncBodyValidation, DifficultyCalculator, FinalHorizonStateValidation, HeaderValidation, @@ -67,7 +67,7 @@ impl MockValidator { } } -impl CandidateBlockBodyValidation for MockValidator { +impl BlockSyncBodyValidation for MockValidator { fn validate_body(&self, _item: &Block, _db: &B) -> Result<(), ValidationError> { if self.is_valid.load(Ordering::SeqCst) { Ok(()) diff --git a/base_layer/core/src/validation/mod.rs b/base_layer/core/src/validation/mod.rs index dbd7eb7d8e..528709eb10 100644 --- a/base_layer/core/src/validation/mod.rs +++ b/base_layer/core/src/validation/mod.rs @@ -34,7 +34,7 @@ pub(crate) mod helpers; mod traits; pub use traits::{ - CandidateBlockBodyValidation, + BlockSyncBodyValidation, FinalHorizonStateValidation, HeaderValidation, MempoolTransactionValidation, diff --git a/base_layer/core/src/validation/traits.rs b/base_layer/core/src/validation/traits.rs index cc8c287b0a..7eecfbc99a 100644 --- a/base_layer/core/src/validation/traits.rs +++ b/base_layer/core/src/validation/traits.rs @@ -31,7 +31,7 @@ use tari_common_types::{chain_metadata::ChainMetadata, types::Commitment}; /// A validator that determines if a block body is valid, assuming that the header has already been /// validated -pub trait CandidateBlockBodyValidation: Send + Sync { +pub trait BlockSyncBodyValidation: Send + Sync { fn validate_body(&self, block: &Block, backend: &B) -> Result<(), ValidationError>; } diff --git a/base_layer/core/src/validation/transaction_validators.rs b/base_layer/core/src/validation/transaction_validators.rs index 4f136aeea5..fe324bc0fa 100644 --- a/base_layer/core/src/validation/transaction_validators.rs +++ b/base_layer/core/src/validation/transaction_validators.rs @@ -23,11 +23,13 @@ use log::*; use crate::{ - blocks::BlockValidationError, - chain_storage::{BlockchainBackend, BlockchainDatabase, MmrTree}, - crypto::tari_utilities::Hashable, + chain_storage::{BlockchainBackend, BlockchainDatabase}, transactions::{transaction::Transaction, CryptoFactories}, - validation::{MempoolTransactionValidation, ValidationError}, + validation::{ + helpers::{check_inputs_are_utxos, check_not_duplicate_txos}, + MempoolTransactionValidation, + ValidationError, + }, }; pub const LOG_TARGET: &str = "c::val::transaction_validators"; @@ -103,8 +105,8 @@ impl TxInputAndMaturityValidator { impl MempoolTransactionValidation for TxInputAndMaturityValidator { fn validate(&self, tx: &Transaction) -> Result<(), ValidationError> { let db = self.db.db_read_access()?; - verify_inputs_are_spendable(tx, &*db)?; - check_not_duplicate_txos(tx, &*db)?; + check_inputs_are_utxos(tx.get_body(), &*db)?; + check_not_duplicate_txos(tx.get_body(), &*db)?; let tip_height = db.fetch_chain_metadata()?.height_of_longest_chain(); verify_timelocks(tx, tip_height)?; @@ -117,70 +119,11 @@ impl MempoolTransactionValidation for TxInputAndMaturityVa // input maturities fn verify_timelocks(tx: &Transaction, current_height: u64) -> Result<(), ValidationError> { if tx.min_spendable_height() > current_height + 1 { - return Err(ValidationError::MaturityError); - } - Ok(()) -} - -/// This function checks that the inputs exists in the UTXO set but do not exist in the STXO set. -fn verify_inputs_are_spendable(tx: &Transaction, db: &B) -> Result<(), ValidationError> { - let mut not_found_input = Vec::new(); - for input in tx.body.inputs() { - let output_hash = input.output_hash(); - if let Some(utxo_hash) = db.fetch_unspent_output_hash_by_commitment(&input.commitment)? { - // We know that the commitment exists in the UTXO set. Check that the output hash matches (i.e. all fields - // like output features match) - if utxo_hash == output_hash { - continue; - } - - warn!( - target: LOG_TARGET, - "Input spends a UTXO but does not produce the same hash as the output it spends: - {}", - input - ); - return Err(ValidationError::BlockError(BlockValidationError::InvalidInput)); - } - - // Wallet needs to know if a transaction has already been mined and uses this error variant to do so. - if db.fetch_output(&output_hash)?.is_some() { - warn!( - target: LOG_TARGET, - "Transaction validation failed due to already spent input: {}", input - ); - // We know that the output here must be spent because `fetch_unspent_output_hash_by_commitment` would have - // been Some - return Err(ValidationError::ContainsSTxO); - } - - if tx.body.outputs().iter().any(|output| output.hash() == output_hash) { - continue; - } - warn!( target: LOG_TARGET, - "Transaction uses input: {} which does not exist yet", input + "Transaction has a min spend height higher than the current tip" ); - not_found_input.push(output_hash); - } - if !not_found_input.is_empty() { - return Err(ValidationError::UnknownInputs(not_found_input)); - } - - Ok(()) -} - -/// This function checks that the outputs do not exist in the TxO set. -fn check_not_duplicate_txos(transaction: &Transaction, db: &B) -> Result<(), ValidationError> { - for output in transaction.body.outputs() { - if db.fetch_mmr_leaf_index(MmrTree::Utxo, &output.hash())?.is_some() { - warn!( - target: LOG_TARGET, - "Transaction validation failed due to previously spent output: {}", output - ); - return Err(ValidationError::ContainsTxO); - } + return Err(ValidationError::MaturityError); } Ok(()) } diff --git a/base_layer/core/tests/block_validation.rs b/base_layer/core/tests/block_validation.rs index 1a91df6012..d2add135de 100644 --- a/base_layer/core/tests/block_validation.rs +++ b/base_layer/core/tests/block_validation.rs @@ -20,26 +20,50 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::sync::Arc; - +use crate::helpers::{ + block_builders::{ + chain_block_with_coinbase, + chain_block_with_new_coinbase, + create_coinbase, + create_genesis_block_with_utxos, + find_header_with_achieved_difficulty, + }, + test_blockchain::TestBlockchain, +}; use monero::blockdata::block::Block as MoneroBlock; -use tari_crypto::inputs; - +use rand::{rngs::OsRng, RngCore}; +use std::sync::Arc; use tari_common::configuration::Network; use tari_core::{ blocks::{Block, BlockHeaderValidationError, BlockValidationError}, - chain_storage::{BlockchainDatabase, BlockchainDatabaseConfig, ChainStorageError, Validators}, - consensus::{consensus_constants::PowAlgorithmConstants, ConsensusConstantsBuilder, ConsensusManagerBuilder}, + chain_storage::{ + BlockHeaderAccumulatedData, + BlockchainDatabase, + BlockchainDatabaseConfig, + ChainBlock, + ChainStorageError, + Validators, + }, + consensus::{ + consensus_constants::PowAlgorithmConstants, + ConsensusConstantsBuilder, + ConsensusManager, + ConsensusManagerBuilder, + }, crypto::tari_utilities::hex::Hex, proof_of_work::{ monero_rx, monero_rx::{FixedByteArray, MoneroPowData}, + randomx_factory::RandomXFactory, PowAlgorithm, }, + tari_utilities::Hashable, test_helpers::blockchain::{create_store_with_consensus_and_validators, create_test_db}, transactions::{ - helpers::{schema_to_transaction, TestParams, UtxoTestParams}, - tari_amount::T, + aggregated_body::AggregateBody, + helpers::{create_unblinded_output, schema_to_transaction, spend_utxos, TestParams, UtxoTestParams}, + tari_amount::{uT, T}, + transaction::OutputFeatures, CryptoFactories, }, txn_schema, @@ -47,13 +71,15 @@ use tari_core::{ block_validators::{BlockValidator, BodyOnlyValidator, OrphanBlockValidator}, header_validator::HeaderValidator, mocks::MockValidator, - CandidateBlockBodyValidation, + BlockSyncBodyValidation, DifficultyCalculator, + HeaderValidation, + OrphanValidation, + PostOrphanBodyValidation, ValidationError, }, }; - -use crate::helpers::{block_builders::chain_block_with_new_coinbase, test_blockchain::TestBlockchain}; +use tari_crypto::{inputs, script}; mod helpers; @@ -230,3 +256,585 @@ fn inputs_are_not_malleable() { ValidationError::BlockError(BlockValidationError::MismatchedMmrRoots) )); } + +#[test] +fn test_orphan_validator() { + let factories = CryptoFactories::default(); + let network = Network::Weatherwax; + let consensus_constants = ConsensusConstantsBuilder::new(network) + .with_max_block_transaction_weight(80) + .build(); + let (genesis, outputs) = create_genesis_block_with_utxos(&factories, &[T, T, T], &consensus_constants); + let network = Network::LocalNet; + let rules = ConsensusManager::builder(network) + .with_consensus_constants(consensus_constants) + .with_block(genesis.clone()) + .build(); + let backend = create_test_db(); + let orphan_validator = OrphanBlockValidator::new(rules.clone(), false, factories.clone()); + let validators = Validators::new( + BodyOnlyValidator::default(), + HeaderValidator::new(rules.clone()), + orphan_validator.clone(), + ); + let db = BlockchainDatabase::new( + backend, + rules.clone(), + validators, + BlockchainDatabaseConfig::default(), + DifficultyCalculator::new(rules.clone(), Default::default()), + false, + ) + .unwrap(); + // we have created the blockchain, lets create a second valid block + + let (tx01, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[1].clone()], to: vec![20_000 * uT], fee: 10*uT, lock: 0, features: OutputFeatures::default()), + ); + let (tx02, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[2].clone()], to: vec![40_000 * uT], fee: 20*uT, lock: 0, features: OutputFeatures::default()), + ); + let (tx03, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[3].clone()], to: vec![40_000 * uT], fee: 20*uT, lock: 0, features: OutputFeatures::default()), + ); + let (tx04, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[3].clone()], to: vec![50_000 * uT], fee: 20*uT, lock: 2, features: OutputFeatures::default()), + ); + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone()], &rules, &factories); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + // this block should be okay + assert!(orphan_validator.validate(&new_block).is_ok()); + + // lets break the block weight + let (template, _) = + chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone(), tx03], &rules, &factories); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(orphan_validator.validate(&new_block).is_err()); + + // lets break the sorting + let (mut template, _) = + chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone()], &rules, &factories); + let outputs = vec![template.body.outputs()[1].clone(), template.body.outputs()[2].clone()]; + template.body = AggregateBody::new(template.body.inputs().clone(), outputs, template.body.kernels().clone()); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(orphan_validator.validate(&new_block).is_err()); + + // lets break spend rules + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx04.clone()], &rules, &factories); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(orphan_validator.validate(&new_block).is_err()); + + // let break coinbase value + let (coinbase_utxo, coinbase_kernel, _) = create_coinbase( + &factories, + 10000000.into(), + 1 + rules.consensus_constants(0).coinbase_lock_height(), + ); + let template = chain_block_with_coinbase( + &genesis, + vec![tx01.clone(), tx02.clone()], + coinbase_utxo, + coinbase_kernel, + &rules, + ); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(orphan_validator.validate(&new_block).is_err()); + + // let break coinbase lock height + let (coinbase_utxo, coinbase_kernel, _) = create_coinbase( + &factories, + rules.get_block_reward_at(1) + tx01.body.get_total_fee() + tx02.body.get_total_fee(), + 1, + ); + let template = chain_block_with_coinbase( + &genesis, + vec![tx01.clone(), tx02.clone()], + coinbase_utxo, + coinbase_kernel, + &rules, + ); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(orphan_validator.validate(&new_block).is_err()); + + // lets break accounting + let (mut template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01, tx02], &rules, &factories); + let outputs = vec![template.body.outputs()[1].clone(), tx04.body.outputs()[1].clone()]; + template.body = AggregateBody::new(template.body.inputs().clone(), outputs, template.body.kernels().clone()); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(orphan_validator.validate(&new_block).is_err()); +} + +#[test] +fn test_orphan_body_validation() { + let factories = CryptoFactories::default(); + let network = Network::Weatherwax; + // we dont want localnet's 1 difficulty or the full mined difficulty of weather wax but we want some. + let sha3_constants = PowAlgorithmConstants { + max_target_time: 1800, + min_difficulty: 10.into(), + max_difficulty: u64::MAX.into(), + target_time: 300, + }; + let consensus_constants = ConsensusConstantsBuilder::new(network) + .clear_proof_of_work() + .add_proof_of_work(PowAlgorithm::Sha3, sha3_constants) + .build(); + let (genesis, outputs) = create_genesis_block_with_utxos(&factories, &[T, T, T], &consensus_constants); + let network = Network::LocalNet; + let rules = ConsensusManager::builder(network) + .with_consensus_constants(consensus_constants) + .with_block(genesis.clone()) + .build(); + let backend = create_test_db(); + let body_only_validator = BodyOnlyValidator::default(); + let header_validator = HeaderValidator::new(rules.clone()); + let validators = Validators::new( + BodyOnlyValidator::default(), + HeaderValidator::new(rules.clone()), + OrphanBlockValidator::new(rules.clone(), false, factories.clone()), + ); + let db = BlockchainDatabase::new( + backend, + rules.clone(), + validators, + BlockchainDatabaseConfig::default(), + DifficultyCalculator::new(rules.clone(), Default::default()), + false, + ) + .unwrap(); + // we have created the blockchain, lets create a second valid block + + let (tx01, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[1].clone()], to: vec![20_000 * uT], fee: 10*uT, lock: 0, features: OutputFeatures::default()), + ); + let (tx02, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[2].clone()], to: vec![40_000 * uT], fee: 20*uT, lock: 0, features: OutputFeatures::default()), + ); + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01, tx02], &rules, &factories); + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + new_block.header.nonce = OsRng.next_u64(); + + find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + let difficulty_calculator = DifficultyCalculator::new(rules.clone(), RandomXFactory::default()); + let achieved_target_diff = header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .unwrap(); + let accumulated_data = BlockHeaderAccumulatedData::builder(genesis.accumulated_data()) + .with_hash(new_block.hash()) + .with_achieved_target_difficulty(achieved_target_diff) + .with_total_kernel_offset(new_block.header.total_kernel_offset.clone()) + .build() + .unwrap(); + + let chain_block = ChainBlock::try_construct(Arc::new(new_block), accumulated_data).unwrap(); + let metadata = db.get_chain_metadata().unwrap(); + // this block should be okay + assert!(body_only_validator + .validate_body_for_valid_orphan(&chain_block, &*db.db_read_access().unwrap(), &metadata) + .is_ok()); + + // lets break the chain sequence + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + new_block.header.nonce = OsRng.next_u64(); + new_block.header.height = 3; + find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + let achieved_target_diff = header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .unwrap(); + let accumulated_data = BlockHeaderAccumulatedData::builder(genesis.accumulated_data()) + .with_hash(new_block.hash()) + .with_achieved_target_difficulty(achieved_target_diff) + .with_total_kernel_offset(new_block.header.total_kernel_offset.clone()) + .build() + .unwrap(); + + let chain_block = ChainBlock::try_construct(Arc::new(new_block), accumulated_data).unwrap(); + let metadata = db.get_chain_metadata().unwrap(); + assert!(body_only_validator + .validate_body_for_valid_orphan(&chain_block, &*db.db_read_access().unwrap(), &metadata) + .is_err()); + + // lets have unknown inputs; + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + let test_params1 = TestParams::new(); + let test_params2 = TestParams::new(); + // We dont need proper utxo's with signatures as the post_orphan validator does not check accounting balance + + // signatures. + let unblinded_utxo = + create_unblinded_output(script!(Nop), OutputFeatures::default(), test_params1, outputs[1].value); + let unblinded_utxo2 = + create_unblinded_output(script!(Nop), OutputFeatures::default(), test_params2, outputs[2].value); + let inputs = vec![ + unblinded_utxo.as_transaction_input(&factories.commitment).unwrap(), + unblinded_utxo2.as_transaction_input(&factories.commitment).unwrap(), + ]; + new_block.body = AggregateBody::new(inputs, template.body.outputs().clone(), template.body.kernels().clone()); + new_block.body.sort(); + new_block.header.nonce = OsRng.next_u64(); + + find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + let difficulty_calculator = DifficultyCalculator::new(rules.clone(), RandomXFactory::default()); + let achieved_target_diff = header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .unwrap(); + let accumulated_data = BlockHeaderAccumulatedData::builder(genesis.accumulated_data()) + .with_hash(new_block.hash()) + .with_achieved_target_difficulty(achieved_target_diff) + .with_total_kernel_offset(new_block.header.total_kernel_offset.clone()) + .build() + .unwrap(); + + let chain_block = ChainBlock::try_construct(Arc::new(new_block), accumulated_data).unwrap(); + let metadata = db.get_chain_metadata().unwrap(); + assert!(body_only_validator + .validate_body_for_valid_orphan(&chain_block, &*db.db_read_access().unwrap(), &metadata) + .is_err()); + + // lets check duplicate txos + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + // We dont need proper utxo's with signatures as the post_orphan validator does not check accounting balance + + // signatures. + let inputs = vec![new_block.body.inputs()[0].clone(), new_block.body.inputs()[0].clone()]; + new_block.body = AggregateBody::new(inputs, template.body.outputs().clone(), template.body.kernels().clone()); + new_block.body.sort(); + new_block.header.nonce = OsRng.next_u64(); + + find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + let difficulty_calculator = DifficultyCalculator::new(rules.clone(), RandomXFactory::default()); + let achieved_target_diff = header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .unwrap(); + let accumulated_data = BlockHeaderAccumulatedData::builder(genesis.accumulated_data()) + .with_hash(new_block.hash()) + .with_achieved_target_difficulty(achieved_target_diff) + .with_total_kernel_offset(new_block.header.total_kernel_offset.clone()) + .build() + .unwrap(); + + let chain_block = ChainBlock::try_construct(Arc::new(new_block), accumulated_data).unwrap(); + let metadata = db.get_chain_metadata().unwrap(); + assert!(body_only_validator + .validate_body_for_valid_orphan(&chain_block, &*db.db_read_access().unwrap(), &metadata) + .is_err()); + + // check mmr roots + let mut new_block = db.prepare_block_merkle_roots(template).unwrap(); + new_block.header.output_mr = Vec::new(); + new_block.header.nonce = OsRng.next_u64(); + + find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + let difficulty_calculator = DifficultyCalculator::new(rules, RandomXFactory::default()); + let achieved_target_diff = header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .unwrap(); + let accumulated_data = BlockHeaderAccumulatedData::builder(genesis.accumulated_data()) + .with_hash(new_block.hash()) + .with_achieved_target_difficulty(achieved_target_diff) + .with_total_kernel_offset(new_block.header.total_kernel_offset.clone()) + .build() + .unwrap(); + + let chain_block = ChainBlock::try_construct(Arc::new(new_block), accumulated_data).unwrap(); + let metadata = db.get_chain_metadata().unwrap(); + assert!(body_only_validator + .validate_body_for_valid_orphan(&chain_block, &*db.db_read_access().unwrap(), &metadata) + .is_err()); +} + +#[test] +fn test_header_validation() { + let factories = CryptoFactories::default(); + let network = Network::Weatherwax; + // we dont want localnet's 1 difficulty or the full mined difficulty of weather wax but we want some. + let sha3_constants = PowAlgorithmConstants { + max_target_time: 1800, + min_difficulty: 20.into(), + max_difficulty: u64::MAX.into(), + target_time: 300, + }; + let consensus_constants = ConsensusConstantsBuilder::new(network) + .clear_proof_of_work() + .add_proof_of_work(PowAlgorithm::Sha3, sha3_constants) + .build(); + let (genesis, outputs) = create_genesis_block_with_utxos(&factories, &[T, T, T], &consensus_constants); + let network = Network::LocalNet; + let rules = ConsensusManager::builder(network) + .with_consensus_constants(consensus_constants) + .with_block(genesis.clone()) + .build(); + let backend = create_test_db(); + let header_validator = HeaderValidator::new(rules.clone()); + let validators = Validators::new( + BodyOnlyValidator::default(), + HeaderValidator::new(rules.clone()), + OrphanBlockValidator::new(rules.clone(), false, factories.clone()), + ); + let db = BlockchainDatabase::new( + backend, + rules.clone(), + validators, + BlockchainDatabaseConfig::default(), + DifficultyCalculator::new(rules.clone(), Default::default()), + false, + ) + .unwrap(); + // we have created the blockchain, lets create a second valid block + + let (tx01, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[1].clone()], to: vec![20_000 * uT], fee: 10*uT, lock: 0, features: OutputFeatures::default()), + ); + let (tx02, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[2].clone()], to: vec![40_000 * uT], fee: 20*uT, lock: 0, features: OutputFeatures::default()), + ); + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01, tx02], &rules, &factories); + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + new_block.header.nonce = OsRng.next_u64(); + + find_header_with_achieved_difficulty(&mut new_block.header, 20.into()); + let difficulty_calculator = DifficultyCalculator::new(rules.clone(), RandomXFactory::default()); + assert!(header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .is_ok()); + + // Lets break ftl rules + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + new_block.header.nonce = OsRng.next_u64(); + // we take the max ftl time and give 10 seconds for mining then check it, it should still be more than the ftl + new_block.header.timestamp = rules.consensus_constants(0).ftl().increase(10); + find_header_with_achieved_difficulty(&mut new_block.header, 20.into()); + assert!(header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .is_err()); + + // lets break the median rules + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + new_block.header.nonce = OsRng.next_u64(); + // we take the max ftl time and give 10 seconds for mining then check it, it should still be more than the ftl + new_block.header.timestamp = genesis.header().timestamp.checked_sub(100.into()).unwrap(); + find_header_with_achieved_difficulty(&mut new_block.header, 20.into()); + assert!(header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .is_err()); + + // lets break difficulty + let mut new_block = db.prepare_block_merkle_roots(template).unwrap(); + new_block.header.nonce = OsRng.next_u64(); + find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + let mut result = header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .is_err(); + new_block.header.nonce = OsRng.next_u64(); + let mut counter = 0; + while counter < 10 && !result { + counter += 1; + new_block.header.nonce = OsRng.next_u64(); + find_header_with_achieved_difficulty(&mut new_block.header, 10.into()); + result = header_validator + .validate( + &*db.db_read_access().unwrap(), + &new_block.header, + &difficulty_calculator, + ) + .is_err(); + } + assert!(result); +} + +#[test] +fn test_block_sync_body_validator() { + let factories = CryptoFactories::default(); + let network = Network::Weatherwax; + let consensus_constants = ConsensusConstantsBuilder::new(network) + .with_max_block_transaction_weight(80) + .build(); + let (genesis, outputs) = create_genesis_block_with_utxos(&factories, &[T, T, T], &consensus_constants); + let network = Network::LocalNet; + let rules = ConsensusManager::builder(network) + .with_consensus_constants(consensus_constants) + .with_block(genesis.clone()) + .build(); + let backend = create_test_db(); + let validator = BlockValidator::new(rules.clone(), false, factories.clone()); + let validators = Validators::new( + BodyOnlyValidator::default(), + HeaderValidator::new(rules.clone()), + OrphanBlockValidator::new(rules.clone(), false, factories.clone()), + ); + let db = BlockchainDatabase::new( + backend, + rules.clone(), + validators, + BlockchainDatabaseConfig::default(), + DifficultyCalculator::new(rules.clone(), Default::default()), + false, + ) + .unwrap(); + // we have created the blockchain, lets create a second valid block + + let (tx01, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[1].clone()], to: vec![20_000 * uT], fee: 10*uT, lock: 0, features: OutputFeatures::default()), + ); + let (tx02, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[2].clone()], to: vec![40_000 * uT], fee: 20*uT, lock: 0, features: OutputFeatures::default()), + ); + let (tx03, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[3].clone()], to: vec![40_000 * uT], fee: 20*uT, lock: 0, features: OutputFeatures::default()), + ); + let (tx04, _, _) = spend_utxos( + txn_schema!(from: vec![outputs[3].clone()], to: vec![50_000 * uT], fee: 20*uT, lock: 2, features: OutputFeatures::default()), + ); + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone()], &rules, &factories); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + // this block should be okay + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_ok()); + + // lets break the block weight + let (template, _) = + chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone(), tx03], &rules, &factories); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); + + // lets break spend rules + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx04.clone()], &rules, &factories); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); + + // lets break the sorting + let (mut template, _) = + chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone()], &rules, &factories); + let output = vec![template.body.outputs()[1].clone(), template.body.outputs()[2].clone()]; + template.body = AggregateBody::new(template.body.inputs().clone(), output, template.body.kernels().clone()); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); + + // lets have unknown inputs; + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone()], &rules, &factories); + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + let test_params1 = TestParams::new(); + let test_params2 = TestParams::new(); + // We dont need proper utxo's with signatures as the post_orphan validator does not check accounting balance + + // signatures. + let unblinded_utxo = + create_unblinded_output(script!(Nop), OutputFeatures::default(), test_params1, outputs[1].value); + let unblinded_utxo2 = + create_unblinded_output(script!(Nop), OutputFeatures::default(), test_params2, outputs[2].value); + let inputs = vec![ + unblinded_utxo.as_transaction_input(&factories.commitment).unwrap(), + unblinded_utxo2.as_transaction_input(&factories.commitment).unwrap(), + ]; + new_block.body = AggregateBody::new(inputs, template.body.outputs().clone(), template.body.kernels().clone()); + new_block.body.sort(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); + + // lets check duplicate txos + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone()], &rules, &factories); + let mut new_block = db.prepare_block_merkle_roots(template.clone()).unwrap(); + // We dont need proper utxo's with signatures as the post_orphan validator does not check accounting balance + + // signatures. + let inputs = vec![new_block.body.inputs()[0].clone(), new_block.body.inputs()[0].clone()]; + new_block.body = AggregateBody::new(inputs, template.body.outputs().clone(), template.body.kernels().clone()); + new_block.body.sort(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); + + // let break coinbase value + let (coinbase_utxo, coinbase_kernel, _) = create_coinbase( + &factories, + 10000000.into(), + 1 + rules.consensus_constants(0).coinbase_lock_height(), + ); + let template = chain_block_with_coinbase( + &genesis, + vec![tx01.clone(), tx02.clone()], + coinbase_utxo, + coinbase_kernel, + &rules, + ); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); + + // let break coinbase lock height + let (coinbase_utxo, coinbase_kernel, _) = create_coinbase( + &factories, + rules.get_block_reward_at(1) + tx01.body.get_total_fee() + tx02.body.get_total_fee(), + 1, + ); + let template = chain_block_with_coinbase( + &genesis, + vec![tx01.clone(), tx02.clone()], + coinbase_utxo, + coinbase_kernel, + &rules, + ); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); + + // lets break accounting + let (mut template, _) = + chain_block_with_new_coinbase(&genesis, vec![tx01.clone(), tx02.clone()], &rules, &factories); + let outputs = vec![template.body.outputs()[1].clone(), tx04.body.outputs()[1].clone()]; + template.body = AggregateBody::new(template.body.inputs().clone(), outputs, template.body.kernels().clone()); + let new_block = db.prepare_block_merkle_roots(template).unwrap(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); + + // lets the mmr root + let (template, _) = chain_block_with_new_coinbase(&genesis, vec![tx01, tx02], &rules, &factories); + let mut new_block = db.prepare_block_merkle_roots(template).unwrap(); + new_block.header.output_mr = Vec::new(); + assert!(validator + .validate_body(&new_block, &*db.db_read_access().unwrap()) + .is_err()); +} diff --git a/integration_tests/helpers/transactionBuilder.js b/integration_tests/helpers/transactionBuilder.js index a9fef132ca..6025c82dda 100644 --- a/integration_tests/helpers/transactionBuilder.js +++ b/integration_tests/helpers/transactionBuilder.js @@ -359,7 +359,7 @@ class TransactionBuilder { const excess = tari_crypto.commit(privateKey, BigInt(0)); this.kv.new_key("nonce"); const public_nonce = this.kv.public_key("nonce"); - const challenge = this.buildChallenge(public_nonce, 0, lockHeight); + const challenge = this.buildChallenge(public_nonce, 0, 0); const private_nonce = this.kv.private_key("nonce"); const sig = tari_crypto.sign_challenge_with_nonce( privateKey, @@ -420,7 +420,7 @@ class TransactionBuilder { { features: 1, fee: 0, - lock_height: lockHeight, + lock_height: 0, excess: Buffer.from(excess.commitment, "hex"), excess_sig: { public_nonce: Buffer.from(sig.public_nonce, "hex"),