Merge branch 'unstable' into peerdas-devnet-7

# Conflicts:
#	beacon_node/beacon_chain/src/block_verification_types.rs
#	beacon_node/beacon_chain/src/data_availability_checker.rs
#	beacon_node/beacon_chain/src/test_utils.rs
#	beacon_node/beacon_chain/tests/block_verification.rs
#	beacon_node/network/src/sync/block_sidecar_coupling.rs
#	beacon_node/network/src/sync/network_context.rs
#	beacon_node/network/src/sync/tests/range.rs
This commit is contained in:
Jimmy Chen
2025-06-12 09:21:44 +02:00
46 changed files with 1367 additions and 388 deletions

View File

@@ -30,8 +30,6 @@ type E = MainnetEthSpec;
const VALIDATOR_COUNT: usize = 24;
const CHAIN_SEGMENT_LENGTH: usize = 64 * 5;
const BLOCK_INDICES: &[usize] = &[0, 1, 32, 64, 68 + 1, 129, CHAIN_SEGMENT_LENGTH - 1];
// Default custody group count for tests
const CGC: usize = 8;
/// A cached set of keys.
static KEYPAIRS: LazyLock<Vec<Keypair>> =
@@ -143,15 +141,10 @@ fn build_rpc_block(
Some(DataSidecars::Blobs(blobs)) => {
RpcBlock::new(None, block, Some(blobs.clone())).unwrap()
}
Some(DataSidecars::DataColumns(columns)) => RpcBlock::new_with_custody_columns(
None,
block,
columns.clone(),
columns.iter().map(|d| d.index()).collect(),
spec,
)
.unwrap(),
None => RpcBlock::new_without_blobs(None, block, 0),
Some(DataSidecars::DataColumns(columns)) => {
RpcBlock::new_with_custody_columns(None, block, columns.clone(), spec).unwrap()
}
None => RpcBlock::new_without_blobs(None, block),
}
}
@@ -374,7 +367,6 @@ async fn chain_segment_non_linear_parent_roots() {
blocks[3] = RpcBlock::new_without_blobs(
None,
Arc::new(SignedBeaconBlock::from_block(block, signature)),
harness.sampling_column_count,
);
assert!(
@@ -412,7 +404,6 @@ async fn chain_segment_non_linear_slots() {
blocks[3] = RpcBlock::new_without_blobs(
None,
Arc::new(SignedBeaconBlock::from_block(block, signature)),
harness.sampling_column_count,
);
assert!(
@@ -440,7 +431,6 @@ async fn chain_segment_non_linear_slots() {
blocks[3] = RpcBlock::new_without_blobs(
None,
Arc::new(SignedBeaconBlock::from_block(block, signature)),
harness.sampling_column_count,
);
assert!(
@@ -582,11 +572,7 @@ async fn invalid_signature_gossip_block() {
.into_block_error()
.expect("should import all blocks prior to the one being tested");
let signed_block = SignedBeaconBlock::from_block(block, junk_signature());
let rpc_block = RpcBlock::new_without_blobs(
None,
Arc::new(signed_block),
harness.sampling_column_count,
);
let rpc_block = RpcBlock::new_without_blobs(None, Arc::new(signed_block));
let process_res = harness
.chain
.process_block(
@@ -1006,7 +992,6 @@ async fn block_gossip_verification() {
let (chain_segment, chain_segment_blobs) = get_chain_segment().await;
let block_index = CHAIN_SEGMENT_LENGTH - 2;
let cgc = harness.chain.spec.custody_requirement as usize;
harness
.chain
@@ -1020,7 +1005,7 @@ async fn block_gossip_verification() {
{
let gossip_verified = harness
.chain
.verify_block_for_gossip(snapshot.beacon_block.clone(), get_cgc(&blobs_opt))
.verify_block_for_gossip(snapshot.beacon_block.clone())
.await
.expect("should obtain gossip verified block");
@@ -1062,7 +1047,7 @@ async fn block_gossip_verification() {
*block.slot_mut() = expected_block_slot;
assert!(
matches!(
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature)), cgc).await),
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature))).await),
BlockError::FutureSlot {
present_slot,
block_slot,
@@ -1096,7 +1081,7 @@ async fn block_gossip_verification() {
*block.slot_mut() = expected_finalized_slot;
assert!(
matches!(
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature)), cgc).await),
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature))).await),
BlockError::WouldRevertFinalizedSlot {
block_slot,
finalized_slot,
@@ -1126,10 +1111,10 @@ async fn block_gossip_verification() {
unwrap_err(
harness
.chain
.verify_block_for_gossip(
Arc::new(SignedBeaconBlock::from_block(block, junk_signature())),
cgc
)
.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(
block,
junk_signature()
)),)
.await
),
BlockError::InvalidSignature(InvalidSignature::ProposerSignature)
@@ -1154,7 +1139,7 @@ async fn block_gossip_verification() {
*block.parent_root_mut() = parent_root;
assert!(
matches!(
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature)), cgc).await),
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature))).await),
BlockError::ParentUnknown {parent_root: p}
if p == parent_root
),
@@ -1180,7 +1165,7 @@ async fn block_gossip_verification() {
*block.parent_root_mut() = parent_root;
assert!(
matches!(
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature)), cgc).await),
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature))).await),
BlockError::NotFinalizedDescendant { block_parent_root }
if block_parent_root == parent_root
),
@@ -1217,7 +1202,7 @@ async fn block_gossip_verification() {
);
assert!(
matches!(
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(block.clone()), cgc).await),
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(block.clone())).await),
BlockError::IncorrectBlockProposer {
block,
local_shuffling,
@@ -1229,7 +1214,7 @@ async fn block_gossip_verification() {
// Check to ensure that we registered this is a valid block from this proposer.
assert!(
matches!(
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(block.clone()), cgc).await),
unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(block.clone())).await),
BlockError::DuplicateImportStatusUnknown(_),
),
"should register any valid signature against the proposer, even if the block failed later verification"
@@ -1237,11 +1222,7 @@ async fn block_gossip_verification() {
let block = chain_segment[block_index].beacon_block.clone();
assert!(
harness
.chain
.verify_block_for_gossip(block, cgc)
.await
.is_ok(),
harness.chain.verify_block_for_gossip(block).await.is_ok(),
"the valid block should be processed"
);
@@ -1259,7 +1240,7 @@ async fn block_gossip_verification() {
matches!(
harness
.chain
.verify_block_for_gossip(block.clone(), cgc)
.verify_block_for_gossip(block.clone())
.await
.expect_err("should error when processing known block"),
BlockError::DuplicateImportStatusUnknown(_)
@@ -1335,17 +1316,8 @@ async fn verify_block_for_gossip_slashing_detection() {
let state = harness.get_current_state();
let ((block1, blobs1), _) = harness.make_block(state.clone(), Slot::new(1)).await;
let ((block2, _blobs2), _) = harness.make_block(state, Slot::new(1)).await;
let cgc = if block1.fork_name_unchecked().fulu_enabled() {
harness.get_sampling_column_count()
} else {
0
};
let verified_block = harness
.chain
.verify_block_for_gossip(block1, cgc)
.await
.unwrap();
let verified_block = harness.chain.verify_block_for_gossip(block1).await.unwrap();
if let Some((kzg_proofs, blobs)) = blobs1 {
harness
@@ -1368,7 +1340,7 @@ async fn verify_block_for_gossip_slashing_detection() {
)
.await
.unwrap();
unwrap_err(harness.chain.verify_block_for_gossip(block2, CGC).await);
unwrap_err(harness.chain.verify_block_for_gossip(block2).await);
// Slasher should have been handed the two conflicting blocks and crafted a slashing.
slasher.process_queued(Epoch::new(0)).unwrap();
@@ -1392,11 +1364,7 @@ async fn verify_block_for_gossip_doppelganger_detection() {
.attestations()
.map(|att| att.clone_as_attestation())
.collect::<Vec<_>>();
let verified_block = harness
.chain
.verify_block_for_gossip(block, CGC)
.await
.unwrap();
let verified_block = harness.chain.verify_block_for_gossip(block).await.unwrap();
harness
.chain
.process_block(
@@ -1543,7 +1511,7 @@ async fn add_base_block_to_altair_chain() {
assert!(matches!(
harness
.chain
.verify_block_for_gossip(Arc::new(base_block.clone()), CGC)
.verify_block_for_gossip(Arc::new(base_block.clone()))
.await
.expect_err("should error when processing base block"),
BlockError::InconsistentFork(InconsistentFork {
@@ -1553,7 +1521,7 @@ async fn add_base_block_to_altair_chain() {
));
// Ensure that it would be impossible to import via `BeaconChain::process_block`.
let base_rpc_block = RpcBlock::new_without_blobs(None, Arc::new(base_block.clone()), 0);
let base_rpc_block = RpcBlock::new_without_blobs(None, Arc::new(base_block.clone()));
assert!(matches!(
harness
.chain
@@ -1577,7 +1545,7 @@ async fn add_base_block_to_altair_chain() {
harness
.chain
.process_chain_segment(
vec![RpcBlock::new_without_blobs(None, Arc::new(base_block), 0)],
vec![RpcBlock::new_without_blobs(None, Arc::new(base_block))],
NotifyExecutionLayer::Yes,
)
.await,
@@ -1680,7 +1648,7 @@ async fn add_altair_block_to_base_chain() {
assert!(matches!(
harness
.chain
.verify_block_for_gossip(Arc::new(altair_block.clone()), CGC)
.verify_block_for_gossip(Arc::new(altair_block.clone()))
.await
.expect_err("should error when processing altair block"),
BlockError::InconsistentFork(InconsistentFork {
@@ -1690,7 +1658,7 @@ async fn add_altair_block_to_base_chain() {
));
// Ensure that it would be impossible to import via `BeaconChain::process_block`.
let altair_rpc_block = RpcBlock::new_without_blobs(None, Arc::new(altair_block.clone()), 0);
let altair_rpc_block = RpcBlock::new_without_blobs(None, Arc::new(altair_block.clone()));
assert!(matches!(
harness
.chain
@@ -1714,7 +1682,7 @@ async fn add_altair_block_to_base_chain() {
harness
.chain
.process_chain_segment(
vec![RpcBlock::new_without_blobs(None, Arc::new(altair_block), 0)],
vec![RpcBlock::new_without_blobs(None, Arc::new(altair_block))],
NotifyExecutionLayer::Yes
)
.await,
@@ -1775,11 +1743,7 @@ async fn import_duplicate_block_unrealized_justification() {
// Create two verified variants of the block, representing the same block being processed in
// parallel.
let notify_execution_layer = NotifyExecutionLayer::Yes;
let rpc_block = RpcBlock::new_without_blobs(
Some(block_root),
block.clone(),
harness.sampling_column_count,
);
let rpc_block = RpcBlock::new_without_blobs(Some(block_root), block.clone());
let verified_block1 = rpc_block
.clone()
.into_execution_pending_block(block_root, chain, notify_execution_layer)
@@ -1850,14 +1814,3 @@ async fn import_execution_pending_block<T: BeaconChainTypes>(
}
}
}
fn get_cgc<E: EthSpec>(blobs_opt: &Option<DataSidecars<E>>) -> usize {
if let Some(data_sidecars) = blobs_opt.as_ref() {
match data_sidecars {
DataSidecars::Blobs(_) => 0,
DataSidecars::DataColumns(d) => d.len(),
}
} else {
0
}
}

View File

@@ -1,11 +1,15 @@
use beacon_chain::blob_verification::GossipVerifiedBlob;
use beacon_chain::test_utils::BeaconChainHarness;
use eth2::types::{EventKind, SseBlobSidecar};
use beacon_chain::data_column_verification::GossipVerifiedDataColumn;
use beacon_chain::test_utils::{BeaconChainHarness, TEST_DATA_COLUMN_SIDECARS_SSZ};
use eth2::types::{EventKind, SseBlobSidecar, SseDataColumnSidecar};
use rand::rngs::StdRng;
use rand::SeedableRng;
use std::sync::Arc;
use types::blob_sidecar::FixedBlobSidecarList;
use types::{BlobSidecar, EthSpec, ForkName, MinimalEthSpec};
use types::test_utils::TestRandom;
use types::{
BlobSidecar, DataColumnSidecar, EthSpec, ForkName, MinimalEthSpec, RuntimeVariableList,
};
type E = MinimalEthSpec;
@@ -43,6 +47,42 @@ async fn blob_sidecar_event_on_process_gossip_blob() {
assert_eq!(sidecar_event, EventKind::BlobSidecar(expected_sse_blobs));
}
/// Verifies that a data column event is emitted when a gossip verified data column is received via gossip or the publish block API.
#[tokio::test]
async fn data_column_sidecar_event_on_process_gossip_data_column() {
let spec = Arc::new(ForkName::Fulu.make_genesis_spec(E::default_spec()));
let harness = BeaconChainHarness::builder(E::default())
.spec(spec)
.deterministic_keypairs(8)
.fresh_ephemeral_store()
.mock_execution_layer()
.build();
// subscribe to blob sidecar events
let event_handler = harness.chain.event_handler.as_ref().unwrap();
let mut data_column_event_receiver = event_handler.subscribe_data_column_sidecar();
// build and process a gossip verified data column
let mut rng = StdRng::seed_from_u64(0xDEADBEEF0BAD5EEDu64);
let sidecar = Arc::new(DataColumnSidecar::random_for_test(&mut rng));
let gossip_verified_data_column = GossipVerifiedDataColumn::__new_for_testing(sidecar);
let expected_sse_data_column = SseDataColumnSidecar::from_data_column_sidecar(
gossip_verified_data_column.as_data_column(),
);
let _ = harness
.chain
.process_gossip_data_columns(vec![gossip_verified_data_column], || Ok(()))
.await
.unwrap();
let sidecar_event = data_column_event_receiver.try_recv().unwrap();
assert_eq!(
sidecar_event,
EventKind::DataColumnSidecar(expected_sse_data_column)
);
}
/// Verifies that a blob event is emitted when blobs are received via RPC.
#[tokio::test]
async fn blob_sidecar_event_on_process_rpc_blobs() {
@@ -95,3 +135,41 @@ async fn blob_sidecar_event_on_process_rpc_blobs() {
}
assert_eq!(sse_blobs, expected_sse_blobs);
}
#[tokio::test]
async fn data_column_sidecar_event_on_process_rpc_columns() {
let spec = Arc::new(ForkName::Fulu.make_genesis_spec(E::default_spec()));
let harness = BeaconChainHarness::builder(E::default())
.spec(spec.clone())
.deterministic_keypairs(8)
.fresh_ephemeral_store()
.mock_execution_layer()
.build();
// subscribe to blob sidecar events
let event_handler = harness.chain.event_handler.as_ref().unwrap();
let mut data_column_event_receiver = event_handler.subscribe_data_column_sidecar();
// load the precomputed column sidecar to avoid computing them for every block in the tests.
let mut sidecar = RuntimeVariableList::<DataColumnSidecar<E>>::from_ssz_bytes(
TEST_DATA_COLUMN_SIDECARS_SSZ,
spec.number_of_columns as usize,
)
.unwrap()[0]
.clone();
let parent_root = harness.chain.head().head_block_root();
sidecar.signed_block_header.message.parent_root = parent_root;
let expected_sse_data_column = SseDataColumnSidecar::from_data_column_sidecar(&sidecar);
let _ = harness
.chain
.process_rpc_custody_columns(vec![Arc::new(sidecar)])
.await
.unwrap();
let sidecar_event = data_column_event_receiver.try_recv().unwrap();
assert_eq!(
sidecar_event,
EventKind::DataColumnSidecar(expected_sse_data_column)
);
}

View File

@@ -22,7 +22,6 @@ use task_executor::ShutdownReason;
use types::*;
const VALIDATOR_COUNT: usize = 32;
const CGC: usize = 8;
type E = MainnetEthSpec;
@@ -686,8 +685,7 @@ async fn invalidates_all_descendants() {
assert_eq!(fork_parent_state.slot(), fork_parent_slot);
let ((fork_block, _), _fork_post_state) =
rig.harness.make_block(fork_parent_state, fork_slot).await;
let fork_rpc_block =
RpcBlock::new_without_blobs(None, fork_block.clone(), rig.harness.sampling_column_count);
let fork_rpc_block = RpcBlock::new_without_blobs(None, fork_block.clone());
let fork_block_root = rig
.harness
.chain
@@ -789,8 +787,7 @@ async fn switches_heads() {
let ((fork_block, _), _fork_post_state) =
rig.harness.make_block(fork_parent_state, fork_slot).await;
let fork_parent_root = fork_block.parent_root();
let fork_rpc_block =
RpcBlock::new_without_blobs(None, fork_block.clone(), rig.harness.sampling_column_count);
let fork_rpc_block = RpcBlock::new_without_blobs(None, fork_block.clone());
let fork_block_root = rig
.harness
.chain
@@ -1054,14 +1051,13 @@ async fn invalid_parent() {
// Ensure the block built atop an invalid payload is invalid for gossip.
assert!(matches!(
rig.harness.chain.clone().verify_block_for_gossip(block.clone(), CGC).await,
rig.harness.chain.clone().verify_block_for_gossip(block.clone()).await,
Err(BlockError::ParentExecutionPayloadInvalid { parent_root: invalid_root })
if invalid_root == parent_root
));
// Ensure the block built atop an invalid payload is invalid for import.
let rpc_block =
RpcBlock::new_without_blobs(None, block.clone(), rig.harness.sampling_column_count);
let rpc_block = RpcBlock::new_without_blobs(None, block.clone());
assert!(matches!(
rig.harness.chain.process_block(rpc_block.block_root(), rpc_block, NotifyExecutionLayer::Yes, BlockImportSource::Lookup,
|| Ok(()),
@@ -1385,8 +1381,7 @@ async fn recover_from_invalid_head_by_importing_blocks() {
} = InvalidHeadSetup::new().await;
// Import the fork block, it should become the head.
let fork_rpc_block =
RpcBlock::new_without_blobs(None, fork_block.clone(), rig.harness.sampling_column_count);
let fork_rpc_block = RpcBlock::new_without_blobs(None, fork_block.clone());
rig.harness
.chain
.process_block(

View File

@@ -2791,11 +2791,7 @@ async fn process_blocks_and_attestations_for_unaligned_checkpoint() {
assert_eq!(split.block_root, valid_fork_block.parent_root());
assert_ne!(split.state_root, unadvanced_split_state_root);
let invalid_fork_rpc_block = RpcBlock::new_without_blobs(
None,
invalid_fork_block.clone(),
harness.sampling_column_count,
);
let invalid_fork_rpc_block = RpcBlock::new_without_blobs(None, invalid_fork_block.clone());
// Applying the invalid block should fail.
let err = harness
.chain
@@ -2811,11 +2807,7 @@ async fn process_blocks_and_attestations_for_unaligned_checkpoint() {
assert!(matches!(err, BlockError::WouldRevertFinalizedSlot { .. }));
// Applying the valid block should succeed, but it should not become head.
let valid_fork_rpc_block = RpcBlock::new_without_blobs(
None,
valid_fork_block.clone(),
harness.sampling_column_count,
);
let valid_fork_rpc_block = RpcBlock::new_without_blobs(None, valid_fork_block.clone());
harness
.chain
.process_block(