Lenient duplicate checks on HTTP API for block publication (#5574)

* start splitting gossip verification

* WIP

* Gossip verify separate (#7)

* save

* save

* make ProvenancedBlock concrete

* delete into gossip verified block contents

* get rid of IntoBlobSidecar trait

* remove IntoGossipVerified trait

* get tests compiling

* don't check sidecar slashability in publish

* remove second publish closure

* drop blob bool. also prefer using message index over index of position in list

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Fix low-hanging tests

* Fix tests and clean up

* Clean up imports

* more cleanup

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Further refine behaviour and add tests

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Remove empty line

* Fix test (block is not fully imported just gossip verified)

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Update for unstable & use empty blob list

* Update comment

* Add test for duplicate block case

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Clarify unreachable case

* Fix another publish_block case

* Remove unreachable case in filter chain segment

* Revert unrelated blob optimisation

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Fix merge conflicts

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Fix some compilation issues. Impl is fucked though

* Support peerDAS

* Fix tests

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Fix conflict

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate

* Address review comments

* Merge remote-tracking branch 'origin/unstable' into gossip-verify-separate
This commit is contained in:
Michael Sproul
2024-09-24 14:52:44 +10:00
committed by GitHub
parent 1447eeb40b
commit 2792705331
21 changed files with 1071 additions and 516 deletions

View File

@@ -49,14 +49,10 @@
#![allow(clippy::result_large_err)]
use crate::beacon_snapshot::PreProcessingSnapshot;
use crate::blob_verification::{GossipBlobError, GossipVerifiedBlob, GossipVerifiedBlobList};
use crate::block_verification_types::{
AsBlock, BlockContentsError, BlockImportData, GossipVerifiedBlockContents, RpcBlock,
};
use crate::blob_verification::GossipBlobError;
use crate::block_verification_types::{AsBlock, BlockImportData, RpcBlock};
use crate::data_availability_checker::{AvailabilityCheckError, MaybeAvailableBlock};
use crate::data_column_verification::{
GossipDataColumnError, GossipVerifiedDataColumn, GossipVerifiedDataColumnList,
};
use crate::data_column_verification::GossipDataColumnError;
use crate::eth1_finalization_cache::Eth1FinalizationData;
use crate::execution_payload::{
is_optimistic_candidate_block, validate_execution_payload_for_gossip, validate_merge_block,
@@ -71,7 +67,7 @@ use crate::{
metrics, BeaconChain, BeaconChainError, BeaconChainTypes,
};
use derivative::Derivative;
use eth2::types::{BlockGossip, EventKind, PublishBlockRequest};
use eth2::types::{BlockGossip, EventKind};
use execution_layer::PayloadStatus;
pub use fork_choice::{AttestationFromBlock, PayloadVerificationStatus};
use lighthouse_metrics::TryExt;
@@ -82,7 +78,6 @@ use slog::{debug, error, warn, Logger};
use slot_clock::SlotClock;
use ssz::Encode;
use ssz_derive::{Decode, Encode};
use ssz_types::VariableList;
use state_processing::per_block_processing::{errors::IntoWithIndex, is_merge_transition_block};
use state_processing::{
block_signature_verifier::{BlockSignatureVerifier, Error as BlockSignatureVerifierError},
@@ -98,14 +93,12 @@ use std::io::Write;
use std::sync::Arc;
use store::{Error as DBError, HotStateSummary, KeyValueStore, StoreOp};
use task_executor::JoinHandle;
use types::data_column_sidecar::DataColumnSidecarError;
use types::{
BeaconBlockRef, BeaconState, BeaconStateError, BlobsList, ChainSpec, DataColumnSubnetId, Epoch,
EthSpec, ExecutionBlockHash, FullPayload, Hash256, InconsistentFork, KzgProofs, PublicKey,
PublicKeyBytes, RelativeEpoch, RuntimeVariableList, SignedBeaconBlock, SignedBeaconBlockHeader,
Slot,
data_column_sidecar::DataColumnSidecarError, BeaconBlockRef, BeaconState, BeaconStateError,
BlobsList, ChainSpec, DataColumnSidecarList, Epoch, EthSpec, ExecPayload, ExecutionBlockHash,
FullPayload, Hash256, InconsistentFork, PublicKey, PublicKeyBytes, RelativeEpoch,
SignedBeaconBlock, SignedBeaconBlockHeader, Slot,
};
use types::{BlobSidecar, ExecPayload};
pub const POS_PANDA_BANNER: &str = r#"
,,, ,,, ,,, ,,,
@@ -187,12 +180,18 @@ pub enum BlockError {
/// It's unclear if this block is valid, but it conflicts with finality and shouldn't be
/// imported.
NotFinalizedDescendant { block_parent_root: Hash256 },
/// Block is already known, no need to re-import.
/// Block is already known and valid, no need to re-import.
///
/// ## Peer scoring
///
/// The block is valid and we have already imported a block with this hash.
BlockIsAlreadyKnown(Hash256),
DuplicateFullyImported(Hash256),
/// Block has already been seen on gossip but has not necessarily finished being imported.
///
/// ## Peer scoring
///
/// The block could be valid, or invalid. We don't know.
DuplicateImportStatusUnknown(Hash256),
/// The block slot exceeds the MAXIMUM_BLOCK_SLOT_NUMBER.
///
/// ## Peer scoring
@@ -704,115 +703,57 @@ pub struct ExecutionPendingBlock<T: BeaconChainTypes> {
pub payload_verification_handle: PayloadVerificationHandle,
}
pub trait IntoGossipVerifiedBlockContents<T: BeaconChainTypes>: Sized {
pub trait IntoGossipVerifiedBlock<T: BeaconChainTypes>: Sized {
fn into_gossip_verified_block(
self,
chain: &BeaconChain<T>,
) -> Result<GossipVerifiedBlockContents<T>, BlockContentsError>;
fn inner_block(&self) -> &SignedBeaconBlock<T::EthSpec>;
) -> Result<GossipVerifiedBlock<T>, BlockError>;
fn inner_block(&self) -> Arc<SignedBeaconBlock<T::EthSpec>>;
}
impl<T: BeaconChainTypes> IntoGossipVerifiedBlockContents<T> for GossipVerifiedBlockContents<T> {
impl<T: BeaconChainTypes> IntoGossipVerifiedBlock<T> for GossipVerifiedBlock<T> {
fn into_gossip_verified_block(
self,
_chain: &BeaconChain<T>,
) -> Result<GossipVerifiedBlockContents<T>, BlockContentsError> {
) -> Result<GossipVerifiedBlock<T>, BlockError> {
Ok(self)
}
fn inner_block(&self) -> &SignedBeaconBlock<T::EthSpec> {
self.0.block.as_block()
fn inner_block(&self) -> Arc<SignedBeaconBlock<T::EthSpec>> {
self.block_cloned()
}
}
impl<T: BeaconChainTypes> IntoGossipVerifiedBlockContents<T> for PublishBlockRequest<T::EthSpec> {
impl<T: BeaconChainTypes> IntoGossipVerifiedBlock<T> for Arc<SignedBeaconBlock<T::EthSpec>> {
fn into_gossip_verified_block(
self,
chain: &BeaconChain<T>,
) -> Result<GossipVerifiedBlockContents<T>, BlockContentsError> {
let (block, blobs) = self.deconstruct();
let peer_das_enabled = chain.spec.is_peer_das_enabled_for_epoch(block.epoch());
let (gossip_verified_blobs, gossip_verified_data_columns) = if peer_das_enabled {
let gossip_verified_data_columns =
build_gossip_verified_data_columns(chain, &block, blobs.map(|(_, blobs)| blobs))?;
(None, gossip_verified_data_columns)
} else {
let gossip_verified_blobs = build_gossip_verified_blobs(chain, &block, blobs)?;
(gossip_verified_blobs, None)
};
let gossip_verified_block = GossipVerifiedBlock::new(block, chain)?;
Ok((
gossip_verified_block,
gossip_verified_blobs,
gossip_verified_data_columns,
))
) -> Result<GossipVerifiedBlock<T>, BlockError> {
GossipVerifiedBlock::new(self, chain)
}
fn inner_block(&self) -> &SignedBeaconBlock<T::EthSpec> {
self.signed_block()
fn inner_block(&self) -> Arc<SignedBeaconBlock<T::EthSpec>> {
self.clone()
}
}
#[allow(clippy::type_complexity)]
fn build_gossip_verified_blobs<T: BeaconChainTypes>(
chain: &BeaconChain<T>,
block: &Arc<SignedBeaconBlock<T::EthSpec, FullPayload<T::EthSpec>>>,
blobs: Option<(KzgProofs<T::EthSpec>, BlobsList<T::EthSpec>)>,
) -> Result<Option<GossipVerifiedBlobList<T>>, BlockContentsError> {
blobs
.map(|(kzg_proofs, blobs)| {
let mut gossip_verified_blobs = vec![];
for (i, (kzg_proof, blob)) in kzg_proofs.iter().zip(blobs).enumerate() {
let _timer =
metrics::start_timer(&metrics::BLOB_SIDECAR_INCLUSION_PROOF_COMPUTATION);
let blob = BlobSidecar::new(i, blob, block, *kzg_proof)
.map_err(BlockContentsError::BlobSidecarError)?;
drop(_timer);
let gossip_verified_blob =
GossipVerifiedBlob::new(Arc::new(blob), i as u64, chain)?;
gossip_verified_blobs.push(gossip_verified_blob);
}
let gossip_verified_blobs = VariableList::from(gossip_verified_blobs);
Ok::<_, BlockContentsError>(gossip_verified_blobs)
})
.transpose()
}
fn build_gossip_verified_data_columns<T: BeaconChainTypes>(
pub fn build_blob_data_column_sidecars<T: BeaconChainTypes>(
chain: &BeaconChain<T>,
block: &SignedBeaconBlock<T::EthSpec, FullPayload<T::EthSpec>>,
blobs: Option<BlobsList<T::EthSpec>>,
) -> Result<Option<GossipVerifiedDataColumnList<T>>, BlockContentsError> {
blobs
// Only attempt to build data columns if blobs is non empty to avoid skewing the metrics.
.filter(|b| !b.is_empty())
.map(|blobs| {
let mut timer = metrics::start_timer_vec(
&metrics::DATA_COLUMN_SIDECAR_COMPUTATION,
&[&blobs.len().to_string()],
);
let sidecars = blobs_to_data_column_sidecars(&blobs, block, &chain.kzg, &chain.spec)
.discard_timer_on_break(&mut timer)?;
drop(timer);
let mut gossip_verified_data_columns = vec![];
for sidecar in sidecars {
let subnet = DataColumnSubnetId::from_column_index::<T::EthSpec>(
sidecar.index as usize,
&chain.spec,
);
let column = GossipVerifiedDataColumn::new(sidecar, subnet.into(), chain)?;
gossip_verified_data_columns.push(column);
}
let gossip_verified_data_columns = RuntimeVariableList::new(
gossip_verified_data_columns,
chain.spec.number_of_columns,
)
.map_err(DataColumnSidecarError::SszError)?;
Ok::<_, BlockContentsError>(gossip_verified_data_columns)
})
.transpose()
blobs: BlobsList<T::EthSpec>,
) -> Result<DataColumnSidecarList<T::EthSpec>, DataColumnSidecarError> {
// Only attempt to build data columns if blobs is non empty to avoid skewing the metrics.
if blobs.is_empty() {
return Ok(vec![]);
}
let mut timer = metrics::start_timer_vec(
&metrics::DATA_COLUMN_SIDECAR_COMPUTATION,
&[&blobs.len().to_string()],
);
let sidecars = blobs_to_data_column_sidecars(&blobs, block, &chain.kzg, &chain.spec)
.discard_timer_on_break(&mut timer)?;
drop(timer);
Ok(sidecars)
}
/// Implemented on types that can be converted into a `ExecutionPendingBlock`.
@@ -912,7 +853,7 @@ impl<T: BeaconChainTypes> GossipVerifiedBlock<T> {
// already know this block.
let fork_choice_read_lock = chain.canonical_head.fork_choice_read_lock();
if fork_choice_read_lock.contains_block(&block_root) {
return Err(BlockError::BlockIsAlreadyKnown(block_root));
return Err(BlockError::DuplicateFullyImported(block_root));
}
// Do not process a block that doesn't descend from the finalized root.
@@ -1046,7 +987,9 @@ impl<T: BeaconChainTypes> GossipVerifiedBlock<T> {
SeenBlock::Slashable => {
return Err(BlockError::Slashable);
}
SeenBlock::Duplicate => return Err(BlockError::BlockIsAlreadyKnown(block_root)),
SeenBlock::Duplicate => {
return Err(BlockError::DuplicateImportStatusUnknown(block_root))
}
SeenBlock::UniqueNonSlashable => {}
};
@@ -1894,7 +1837,7 @@ pub fn check_block_relevancy<T: BeaconChainTypes>(
.fork_choice_read_lock()
.contains_block(&block_root)
{
return Err(BlockError::BlockIsAlreadyKnown(block_root));
return Err(BlockError::DuplicateFullyImported(block_root));
}
Ok(block_root)