diff --git a/beacon_node/network/src/beacon_processor/chain_segment.rs b/beacon_node/network/src/beacon_processor/chain_segment.rs index f402bbb313..a327565e70 100644 --- a/beacon_node/network/src/beacon_processor/chain_segment.rs +++ b/beacon_node/network/src/beacon_processor/chain_segment.rs @@ -40,19 +40,19 @@ pub fn handle_chain_segment( 0 }; - debug!(log, "Processing batch"; "batch_epoch" => epoch, "blocks" => downloaded_blocks.len(), "start_slot" => start_slot, "end_slot" => end_slot); + debug!(log, "Processing batch"; "batch_epoch" => epoch, "blocks" => downloaded_blocks.len(), "first_block_slot" => start_slot, "last_block_slot" => end_slot, "service" => "sync"); let result = match process_blocks(chain, downloaded_blocks.iter(), &log) { (_, Ok(_)) => { - debug!(log, "Batch processed"; "batch_epoch" => epoch , "start_slot" => start_slot, "end_slot" => end_slot); + debug!(log, "Batch processed"; "batch_epoch" => epoch , "first_block_slot" => start_slot, "last_block_slot" => end_slot, "service"=> "sync"); BatchProcessResult::Success } (imported_blocks, Err(e)) if imported_blocks > 0 => { debug!(log, "Batch processing failed but imported some blocks"; - "batch_epoch" => epoch, "error" => e, "imported_blocks"=> imported_blocks); + "batch_epoch" => epoch, "error" => e, "imported_blocks"=> imported_blocks, "service" => "sync"); BatchProcessResult::Partial } (_, Err(e)) => { - debug!(log, "Batch processing failed"; "batch_epoch" => epoch, "error" => e); + debug!(log, "Batch processing failed"; "batch_epoch" => epoch, "error" => e, "service" => "sync"); BatchProcessResult::Failed } }; diff --git a/beacon_node/network/src/sync/range_sync/chain_collection.rs b/beacon_node/network/src/sync/range_sync/chain_collection.rs index 1543710ccf..86d31e636b 100644 --- a/beacon_node/network/src/sync/range_sync/chain_collection.rs +++ b/beacon_node/network/src/sync/range_sync/chain_collection.rs @@ -359,6 +359,22 @@ impl ChainCollection { self.state = head_state; } + /// This is called once a head chain has completed syncing. It removes all non-syncing head + /// chains and re-status their peers. + pub fn clear_head_chains(&mut self, network: &mut SyncNetworkContext) { + let log_ref = &self.log; + self.head_chains.retain(|chain| { + if !chain.is_syncing() + { + debug!(log_ref, "Removing old head chain"; "start_epoch" => chain.start_epoch, "end_slot" => chain.target_head_slot); + chain.status_peers(network); + false + } else { + true + } + }); + } + /// Add a new finalized chain to the collection. pub fn new_finalized_chain( &mut self, diff --git a/beacon_node/network/src/sync/range_sync/range.rs b/beacon_node/network/src/sync/range_sync/range.rs index f0ee38e104..d3c16f3e9a 100644 --- a/beacon_node/network/src/sync/range_sync/range.rs +++ b/beacon_node/network/src/sync/range_sync/range.rs @@ -322,6 +322,10 @@ impl RangeSync { // the chain is complete, re-status it's peers and remove it chain.status_peers(network); + // Remove non-syncing head chains and re-status the peers + // This removes a build-up of potentially duplicate head chains. Any + // legitimate head chains will be re-established + self.chains.clear_head_chains(network); // update the state of the collection self.chains.update(network); // update the global state and log any change