diff --git a/sync/src/block_sync.rs b/sync/src/block_sync.rs index 7c3cbf2d72f..ff541114023 100644 --- a/sync/src/block_sync.rs +++ b/sync/src/block_sync.rs @@ -34,6 +34,7 @@ const MAX_RECEPITS_TO_REQUEST: usize = 128; const SUBCHAIN_SIZE: u64 = 256; const MAX_ROUND_PARENTS: usize = 32; const MAX_PARALLEL_SUBCHAIN_DOWNLOAD: usize = 5; +const MAX_REORG_BLOCKS: u64 = 20; #[derive(Copy, Clone, Eq, PartialEq, Debug)] /// Downloader state @@ -262,7 +263,8 @@ impl BlockDownloader { State::Blocks => { let count = headers.len(); // At least one of the heades must advance the subchain. Otherwise they are all useless. - if !any_known { + if count == 0 || !any_known { + trace!(target: "sync", "No useful headers"); return Err(BlockDownloaderImportError::Useless); } self.blocks.insert_headers(headers); @@ -340,14 +342,21 @@ impl BlockDownloader { self.last_imported_hash = p.clone(); trace!(target: "sync", "Searching common header from the last round {} ({})", self.last_imported_block, self.last_imported_hash); } else { - match io.chain().block_hash(BlockID::Number(self.last_imported_block - 1)) { - Some(h) => { - self.last_imported_block -= 1; - self.last_imported_hash = h; - trace!(target: "sync", "Searching common header in the blockchain {} ({})", self.last_imported_block, self.last_imported_hash); - } - None => { - debug!(target: "sync", "Could not revert to previous block, last: {} ({})", self.last_imported_block, self.last_imported_hash); + let best = io.chain().chain_info().best_block_number; + if best > self.last_imported_block && best - self.last_imported_block > MAX_REORG_BLOCKS { + debug!(target: "sync", "Could not revert to previous ancient block, last: {} ({})", self.last_imported_block, self.last_imported_hash); + self.reset(); + } else { + match io.chain().block_hash(BlockID::Number(self.last_imported_block - 1)) { + Some(h) => { + self.last_imported_block -= 1; + self.last_imported_hash = h; + trace!(target: "sync", "Searching common header in the blockchain {} ({})", self.last_imported_block, self.last_imported_hash); + } + None => { + debug!(target: "sync", "Could not revert to previous block, last: {} ({})", self.last_imported_block, self.last_imported_hash); + self.reset(); + } } } } @@ -362,7 +371,9 @@ impl BlockDownloader { match self.state { State::Idle => { self.start_sync_round(io); - return self.request_blocks(io, num_active_peers); + if self.state == State::ChainHead { + return self.request_blocks(io, num_active_peers); + } }, State::ChainHead => { if num_active_peers < MAX_PARALLEL_SUBCHAIN_DOWNLOAD { diff --git a/sync/src/chain.rs b/sync/src/chain.rs index d2939fbacf1..ffd89ecdde5 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -1144,6 +1144,7 @@ impl ChainSync { let have_latest = io.chain().block_status(BlockID::Hash(peer_latest)) != BlockStatus::Unknown; if !have_latest && (higher_difficulty || force || self.state == SyncState::NewBlocks) { // check if got new blocks to download + trace!(target: "sync", "Syncing with {}, force={}, td={:?}, our td={}, state={:?}", peer_id, force, peer_difficulty, syncing_difficulty, self.state); if let Some(request) = self.new_blocks.request_blocks(io, num_active_peers) { self.request_blocks(io, peer_id, request, BlockSet::NewBlocks); if self.state == SyncState::Idle { diff --git a/sync/src/tests/chain.rs b/sync/src/tests/chain.rs index 17c0511625d..5fe34428ebd 100644 --- a/sync/src/tests/chain.rs +++ b/sync/src/tests/chain.rs @@ -79,14 +79,14 @@ fn empty_blocks() { fn forked() { ::env_logger::init().ok(); let mut net = TestNet::new(3); - net.peer_mut(0).chain.add_blocks(300, EachBlockWith::Uncle); - net.peer_mut(1).chain.add_blocks(300, EachBlockWith::Uncle); - net.peer_mut(2).chain.add_blocks(300, EachBlockWith::Uncle); - net.peer_mut(0).chain.add_blocks(100, EachBlockWith::Nothing); //fork - net.peer_mut(1).chain.add_blocks(200, EachBlockWith::Uncle); - net.peer_mut(2).chain.add_blocks(200, EachBlockWith::Uncle); - net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Uncle); //fork between 1 and 2 - net.peer_mut(2).chain.add_blocks(10, EachBlockWith::Nothing); + net.peer_mut(0).chain.add_blocks(30, EachBlockWith::Uncle); + net.peer_mut(1).chain.add_blocks(30, EachBlockWith::Uncle); + net.peer_mut(2).chain.add_blocks(30, EachBlockWith::Uncle); + net.peer_mut(0).chain.add_blocks(10, EachBlockWith::Nothing); //fork + net.peer_mut(1).chain.add_blocks(20, EachBlockWith::Uncle); + net.peer_mut(2).chain.add_blocks(20, EachBlockWith::Uncle); + net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Uncle); //fork between 1 and 2 + net.peer_mut(2).chain.add_blocks(1, EachBlockWith::Nothing); // peer 1 has the best chain of 601 blocks let peer1_chain = net.peer(1).chain.numbers.read().clone(); net.sync(); @@ -102,12 +102,12 @@ fn forked_with_misbehaving_peer() { let mut net = TestNet::new(3); // peer 0 is on a totally different chain with higher total difficulty net.peer_mut(0).chain = TestBlockChainClient::new_with_extra_data(b"fork".to_vec()); - net.peer_mut(0).chain.add_blocks(500, EachBlockWith::Nothing); - net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Nothing); - net.peer_mut(2).chain.add_blocks(100, EachBlockWith::Nothing); + net.peer_mut(0).chain.add_blocks(50, EachBlockWith::Nothing); + net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Nothing); + net.peer_mut(2).chain.add_blocks(10, EachBlockWith::Nothing); - net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Nothing); - net.peer_mut(2).chain.add_blocks(200, EachBlockWith::Uncle); + net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Nothing); + net.peer_mut(2).chain.add_blocks(20, EachBlockWith::Uncle); // peer 1 should sync to peer 2, others should not change let peer0_chain = net.peer(0).chain.numbers.read().clone(); let peer2_chain = net.peer(2).chain.numbers.read().clone();