Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Commit

Permalink
Add prospective parachains subsystem tests (#6454)
Browse files Browse the repository at this point in the history
* Add prospective parachains subsystem test

* Add `should_do_no_work_if_async_backing_disabled_for_leaf` test

* Implement `activate_leaf` helper, up to getting ancestry

* Finish implementing `activate_leaf`

* Small refactor in `activate_leaf`

* Get `CandidateSeconded` working

* Finish `send_candidate_and_check_if_found` test

* Refactor; send more leaves & candidates

* Refactor test

* Implement `check_candidate_parent_leaving_view` test

* Start work on `check_candidate_on_multiple_forks` test

* Don’t associate specific parachains with leaf

* Finish `correctly_updates_leaves` test

* Fix cycle due to reused head data

* Fix `check_backable_query` test

* Fix `check_candidate_on_multiple_forks` test

* Add `check_depth_and_pvd_queries` test

* Address review comments

* Remove TODO

* add a new index for output head data to candidate storage

* Resolve test TODOs

* Fix compile errors

* test candidate storage pruning, make sure new index is cleaned up

---------

Co-authored-by: Robert Habermeier <rphmeier@gmail.com>
  • Loading branch information
mrcnski and rphmeier committed Jan 30, 2023
1 parent d2e6081 commit 11556a6
Show file tree
Hide file tree
Showing 10 changed files with 1,407 additions and 28 deletions.
7 changes: 7 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion node/core/backing/src/tests/prospective_parachains.rs
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,6 @@ fn seconding_sanity_check_allowed() {
const LEAF_A_ANCESTRY_LEN: BlockNumber = 3;
let para_id = test_state.chain_ids[0];

let leaf_b_hash = Hash::from_low_u64_be(128);
// `a` is grandparent of `b`.
let leaf_a_hash = Hash::from_low_u64_be(130);
let leaf_a_parent = get_parent_hash(leaf_a_hash);
Expand All @@ -333,6 +332,7 @@ fn seconding_sanity_check_allowed() {
const LEAF_B_BLOCK_NUMBER: BlockNumber = LEAF_A_BLOCK_NUMBER + 2;
const LEAF_B_ANCESTRY_LEN: BlockNumber = 4;

let leaf_b_hash = Hash::from_low_u64_be(128);
let activated = ActivatedLeaf {
hash: leaf_b_hash,
number: LEAF_B_BLOCK_NUMBER,
Expand Down
3 changes: 1 addition & 2 deletions node/core/dispute-coordinator/src/scraping/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,7 @@ fn make_candidate_receipt(relay_parent: Hash) -> CandidateReceipt {
para_head: zeros,
validation_code_hash: zeros.into(),
};
let candidate = CandidateReceipt { descriptor, commitments_hash: zeros };
candidate
CandidateReceipt { descriptor, commitments_hash: zeros }
}

/// Get a dummy `ActivatedLeaf` for a given block number.
Expand Down
7 changes: 7 additions & 0 deletions node/core/prospective-parachains/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,14 @@ polkadot-node-subsystem-util = { path = "../../subsystem-util" }

[dev-dependencies]
assert_matches = "1"
polkadot-node-subsystem-test-helpers = { path = "../../subsystem-test-helpers" }
polkadot-node-subsystem-types = { path = "../../subsystem-types" }
polkadot-primitives-test-helpers = { path = "../../../primitives/test-helpers" }
sp-core = { git = "https://github.com/paritytech/substrate", branch = "master" }
sc-keystore = { git = "https://github.com/paritytech/substrate", branch = "master" }
sp-application-crypto = { git = "https://github.com/paritytech/substrate", branch = "master" }
sp-keyring = { git = "https://github.com/paritytech/substrate", branch = "master" }
sp-keystore = { git = "https://github.com/paritytech/substrate", branch = "master" }

[features]
# If not enabled, the dispute coordinator will do nothing.
Expand Down
70 changes: 53 additions & 17 deletions node/core/prospective-parachains/src/fragment_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,17 +83,24 @@ pub enum CandidateStorageInsertionError {
}

pub(crate) struct CandidateStorage {
// Index from parent head hash to candidate hashes.
// Index from head data hash to candidate hashes with that head data as a parent.
by_parent_head: HashMap<Hash, HashSet<CandidateHash>>,

// Index from head data hash to candidate hashes outputting that head data.
by_output_head: HashMap<Hash, HashSet<CandidateHash>>,

// Index from candidate hash to fragment node.
by_candidate_hash: HashMap<CandidateHash, CandidateEntry>,
}

impl CandidateStorage {
/// Create a new `CandidateStorage`.
pub fn new() -> Self {
CandidateStorage { by_parent_head: HashMap::new(), by_candidate_hash: HashMap::new() }
CandidateStorage {
by_parent_head: HashMap::new(),
by_output_head: HashMap::new(),
by_candidate_hash: HashMap::new(),
}
}

/// Introduce a new candidate.
Expand All @@ -113,7 +120,7 @@ impl CandidateStorage {
}

let parent_head_hash = persisted_validation_data.parent_head.hash();

let output_head_hash = candidate.commitments.head_data.hash();
let entry = CandidateEntry {
candidate_hash,
relay_parent: candidate.descriptor.relay_parent,
Expand All @@ -129,6 +136,7 @@ impl CandidateStorage {
};

self.by_parent_head.entry(parent_head_hash).or_default().insert(candidate_hash);
self.by_output_head.entry(output_head_hash).or_default().insert(candidate_hash);
// sanity-checked already.
self.by_candidate_hash.insert(candidate_hash, entry);

Expand Down Expand Up @@ -184,18 +192,32 @@ impl CandidateStorage {
self.by_parent_head.retain(|_parent, children| {
children.retain(|h| pred(h));
!children.is_empty()
})
});
self.by_output_head.retain(|_output, candidates| {
candidates.retain(|h| pred(h));
!candidates.is_empty()
});
}

/// Get head-data by hash.
pub(crate) fn head_data_by_hash(&self, hash: &Hash) -> Option<&HeadData> {
// Get some candidate which has a parent-head with the same hash as requested.
let a_candidate_hash = self.by_parent_head.get(hash).and_then(|m| m.iter().next())?;

// Extract the full parent head from that candidate's `PersistedValidationData`.
self.by_candidate_hash
.get(a_candidate_hash)
.map(|e| &e.candidate.persisted_validation_data.parent_head)
// First, search for candidates outputting this head data and extract the head data
// from their commitments if they exist.
//
// Otherwise, search for candidates building upon this head data and extract the head data
// from their persisted validation data if they exist.
self.by_output_head
.get(hash)
.and_then(|m| m.iter().next())
.and_then(|a_candidate| self.by_candidate_hash.get(a_candidate))
.map(|e| &e.candidate.commitments.head_data)
.or_else(|| {
self.by_parent_head
.get(hash)
.and_then(|m| m.iter().next())
.and_then(|a_candidate| self.by_candidate_hash.get(a_candidate))
.map(|e| &e.candidate.persisted_validation_data.parent_head)
})
}

fn iter_para_children<'a>(
Expand All @@ -213,6 +235,11 @@ impl CandidateStorage {
fn get(&'_ self, candidate_hash: &CandidateHash) -> Option<&'_ CandidateEntry> {
self.by_candidate_hash.get(candidate_hash)
}

#[cfg(test)]
pub fn len(&self) -> (usize, usize) {
(self.by_parent_head.len(), self.by_candidate_hash.len())
}
}

/// The state of a candidate.
Expand All @@ -230,6 +257,7 @@ enum CandidateState {
Backed,
}

#[derive(Debug)]
struct CandidateEntry {
candidate_hash: CandidateHash,
relay_parent: Hash,
Expand All @@ -251,7 +279,12 @@ pub(crate) struct Scope {
/// An error variant indicating that ancestors provided to a scope
/// had unexpected order.
#[derive(Debug)]
pub struct UnexpectedAncestor;
pub struct UnexpectedAncestor {
/// The block number that this error occurred at.
pub number: BlockNumber,
/// The previous seen block number, which did not match `number`.
pub prev: BlockNumber,
}

impl Scope {
/// Define a new [`Scope`].
Expand Down Expand Up @@ -283,9 +316,9 @@ impl Scope {
let mut prev = relay_parent.number;
for ancestor in ancestors {
if prev == 0 {
return Err(UnexpectedAncestor)
return Err(UnexpectedAncestor { number: ancestor.number, prev })
} else if ancestor.number != prev - 1 {
return Err(UnexpectedAncestor)
return Err(UnexpectedAncestor { number: ancestor.number, prev })
} else if prev == base_constraints.min_relay_parent_number {
break
} else {
Expand Down Expand Up @@ -889,8 +922,8 @@ mod tests {
let base_constraints = make_constraints(8, vec![8, 9], vec![1, 2, 3].into());

assert_matches!(
Scope::with_ancestors(para_id, relay_parent, base_constraints, max_depth, ancestors,),
Err(UnexpectedAncestor)
Scope::with_ancestors(para_id, relay_parent, base_constraints, max_depth, ancestors),
Err(UnexpectedAncestor { number: 8, prev: 10 })
);
}

Expand All @@ -914,7 +947,7 @@ mod tests {

assert_matches!(
Scope::with_ancestors(para_id, relay_parent, base_constraints, max_depth, ancestors,),
Err(UnexpectedAncestor)
Err(UnexpectedAncestor { number: 99999, prev: 0 })
);
}

Expand Down Expand Up @@ -991,16 +1024,19 @@ mod tests {
);

let candidate_hash = candidate.hash();
let output_head_hash = candidate.commitments.head_data.hash();
let parent_head_hash = pvd.parent_head.hash();

storage.add_candidate(candidate, pvd).unwrap();
storage.retain(|_| true);
assert!(storage.contains(&candidate_hash));
assert_eq!(storage.iter_para_children(&parent_head_hash).count(), 1);
assert!(storage.head_data_by_hash(&output_head_hash).is_some());

storage.retain(|_| false);
assert!(!storage.contains(&candidate_hash));
assert_eq!(storage.iter_para_children(&parent_head_hash).count(), 0);
assert!(storage.head_data_by_hash(&output_head_hash).is_none());
}

#[test]
Expand Down
4 changes: 3 additions & 1 deletion node/core/prospective-parachains/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,8 @@ use crate::{

mod error;
mod fragment_tree;
#[cfg(test)]
mod tests;

const LOG_TARGET: &str = "parachain::prospective-parachains";

Expand Down Expand Up @@ -684,7 +686,7 @@ async fn fetch_ancestry<Context>(
let hashes = rx.map_err(JfyiError::ChainApiRequestCanceled).await??;
let mut block_info = Vec::with_capacity(hashes.len());
for hash in hashes {
match fetch_block_info(ctx, relay_hash).await? {
match fetch_block_info(ctx, hash).await? {
None => {
gum::warn!(
target: LOG_TARGET,
Expand Down
Loading

0 comments on commit 11556a6

Please sign in to comment.