Skip to content

Commit

Permalink
test: cucumber forced sync
Browse files Browse the repository at this point in the history
  • Loading branch information
Cifko committed Aug 25, 2021
1 parent db4c0b9 commit 19ef588
Show file tree
Hide file tree
Showing 7 changed files with 69 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -480,8 +480,8 @@ impl<'a, B: BlockchainBackend + 'static> HeaderSynchronizer<'a, B> {
) -> Result<(), BlockHeaderSyncError> {
const COMMIT_EVERY_N_HEADERS: usize = 1000;

// Peer returned less than the max headers. This indicates that there are no further headers to request.
if self.header_validator.valid_headers().len() < NUM_INITIAL_HEADERS_TO_REQUEST as usize {
// Peer returned no more than the max headers. This indicates that there are no further headers to request.
if self.header_validator.valid_headers().len() <= NUM_INITIAL_HEADERS_TO_REQUEST as usize {
debug!(target: LOG_TARGET, "No further headers to download");
if !self.pending_chain_has_higher_pow(&split_info.local_tip_header)? {
return Err(BlockHeaderSyncError::WeakerChain);
Expand Down
12 changes: 7 additions & 5 deletions common/src/configuration/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -401,11 +401,13 @@ fn convert_node_config(

// block sync
let key = config_string("base_node", &net_str, "force_sync_peers");
let force_sync_peers = optional(
cfg.get_array(&key)
.map(|values| values.into_iter().map(|v| v.into_str().unwrap()).collect()),
)?
.unwrap_or_default();
let force_sync_peers = match cfg.get_array(&key) {
Ok(peers) => peers.into_iter().map(|v| v.into_str().unwrap()).collect(),
Err(..) => match cfg.get_str(&key) {
Ok(s) => s.split(',').map(|v| v.to_string()).collect(),
Err(..) => vec![],
},
};

// Liveness auto ping interval
let key = config_string("base_node", &net_str, "auto_ping_interval");
Expand Down
29 changes: 25 additions & 4 deletions integration_tests/features/Sync.feature
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ Feature: Block Sync
# All nodes should sync to tip
Then all nodes are at height 20

@critical
@critical
Scenario: Pruned mode simple sync
Given I have 1 seed nodes
Given I have a SHA3 miner NODE1 connected to all seed nodes
Expand All @@ -36,7 +36,7 @@ Feature: Block Sync
Given I have a pruned node PNODE1 connected to node NODE1 with pruning horizon set to 5
Then all nodes are at height 20

@critical
@critical
Scenario: When a new node joins the network, it should receive all peers
Given I have 10 seed nodes
And I have a base node NODE1 connected to all seed nodes
Expand Down Expand Up @@ -103,7 +103,7 @@ Feature: Block Sync
When I mine 15 blocks on PNODE2
Then all nodes are at height 23

Scenario: Node should not sync from pruned node
Scenario: Node should not sync from pruned node
Given I have a base node NODE1 connected to all seed nodes
Given I have a pruned node PNODE1 connected to node NODE1 with pruning horizon set to 5
When I mine 40 blocks on NODE1
Expand Down Expand Up @@ -152,7 +152,7 @@ Feature: Block Sync
| X1 | Y1 | SYNC_TIME |
| 1000 | 50 | 60 |

Scenario: Pruned mode network only
Scenario: Pruned mode network only
Given I have a base node NODE1 connected to all seed nodes
Given I have a pruned node PNODE1 connected to node NODE1 with pruning horizon set to 5
Given I have a pruned node PNODE2 connected to node PNODE1 with pruning horizon set to 5
Expand All @@ -167,3 +167,24 @@ Scenario: Pruned mode network only
Then node PNODE2 is at height 20
Given I have a pruned node PNODE3 connected to node PNODE1 with pruning horizon set to 5
Then node PNODE3 is at height 20

Scenario Outline: Force sync many nodes agains one peer
Given I have a base node BASE
And I have a SHA3 miner MINER connected to node BASE
And mining node MINER mines <BLOCKS> blocks
And I have <NODES> base nodes with pruning horizon <PRUNE_HORIZON> force syncing on node BASE
When I wait <SYNC_TIME> seconds
Then all nodes are at height <BLOCKS>

@critical @long-running
Examples:
| NODES | BLOCKS | PRUNE_HORIZON | SYNC_TIME |
| 5 | 100 | 0 | 30 |
| 10 | 100 | 0 | 30 |
| 20 | 100 | 0 | 30 |
| 5 | 1001 | 0 | 60 |
| 10 | 1001 | 0 | 60 |
| 20 | 1001 | 0 | 60 |
| 5 | 1001 | 100 | 90 |
| 10 | 1001 | 100 | 90 |
| 20 | 1001 | 100 | 90 |
20 changes: 20 additions & 0 deletions integration_tests/features/support/steps.js
Original file line number Diff line number Diff line change
Expand Up @@ -3609,3 +3609,23 @@ Then(
wallet.clearCallbackCounters();
}
);

When(
"I have {int} base nodes with pruning horizon {int} force syncing on node {word}",
{ timeout: 190 * 1000 },
async function (nodes_count, horizon, force_sync_to) {
const promises = [];
const force_sync_address = this.getNode(force_sync_to).peerAddress();
for (let i = 0; i < nodes_count; i++) {
const base_node = this.createNode(`BaseNode${i}`, {
pruningHorizon: horizon,
});
base_node.setPeerSeeds([force_sync_address]);
base_node.setForceSyncPeers([force_sync_address]);
promises.push(
base_node.startNew().then(() => this.addNode(`BaseNode${i}`, base_node))
);
}
await Promise.all(promises);
}
);
2 changes: 1 addition & 1 deletion integration_tests/features/support/world.js
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ class CustomWorld {
console.error(err);
failed += 1;
if (failed > canFail)
reject(`Too many failed. Expected less than ${canFail} failures`);
reject(`Too many failed. Expected at most ${canFail} failures`);
});
}
});
Expand Down
8 changes: 7 additions & 1 deletion integration_tests/helpers/baseNodeProcess.js
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ class BaseNodeProcess {
this.peerSeeds = addresses.join(",");
}

setForceSyncPeers(addresses) {
this.forceSyncPeers = addresses.join(",");
}

getGrpcAddress() {
const address = "127.0.0.1:" + this.grpcPort;
// console.log("Base Node GRPC Address:",address);
Expand Down Expand Up @@ -113,7 +117,9 @@ class BaseNodeProcess {
"127.0.0.1:8080",
"127.0.0.1:8085",
this.options,
this.peerSeeds
this.peerSeeds,
"DirectAndStoreAndForward",
this.forceSyncPeers
);
}

Expand Down
10 changes: 7 additions & 3 deletions integration_tests/helpers/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ function mapEnvs(options) {
return res;
}

function baseEnvs(peerSeeds = []) {
function baseEnvs(peerSeeds = [], forceSyncPeers = []) {
const envs = {
RUST_BACKTRACE: 1,
TARI_BASE_NODE__NETWORK: "localnet",
Expand Down Expand Up @@ -101,6 +101,9 @@ function baseEnvs(peerSeeds = []) {
TARI_MINING_NODE__VALIDATE_TIP_TIMEOUT_SEC: 2,
TARI_WALLET__SCAN_FOR_UTXO_INTERVAL: 5,
};
if (forceSyncPeers.length != 0) {
envs.TARI_BASE_NODE__LOCALNET__FORCE_SYNC_PEERS = forceSyncPeers;
}
if (peerSeeds.length != 0) {
envs.TARI_BASE_NODE__LOCALNET__PEER_SEEDS = peerSeeds;
} else {
Expand All @@ -127,9 +130,10 @@ function createEnv(
transcoderFullAddress = "127.0.0.1:8085",
options,
peerSeeds = [],
_txnSendingMechanism = "DirectAndStoreAndForward"
_txnSendingMechanism = "DirectAndStoreAndForward",
forceSyncPeers = []
) {
const envs = baseEnvs(peerSeeds);
const envs = baseEnvs(peerSeeds, forceSyncPeers);
const network =
options && options.network ? options.network.toUpperCase() : "LOCALNET";

Expand Down

0 comments on commit 19ef588

Please sign in to comment.