Merge remote-tracking branch 'upstream/unstable' into gloas-containers

This commit is contained in:
Mark Mackey
2025-10-22 16:05:06 -05:00
78 changed files with 5569 additions and 615 deletions

View File

@@ -0,0 +1,120 @@
#![cfg(not(debug_assertions))]
use beacon_chain::test_utils::{
AttestationStrategy, BeaconChainHarness, BlockStrategy, EphemeralHarnessType, test_spec,
};
use beacon_chain::{
AvailabilityProcessingStatus, BlockError, ChainConfig, InvalidSignature, NotifyExecutionLayer,
block_verification_types::AsBlock,
};
use logging::create_test_tracing_subscriber;
use std::sync::{Arc, LazyLock};
use types::{blob_sidecar::FixedBlobSidecarList, *};
type E = MainnetEthSpec;
// Should ideally be divisible by 3.
const VALIDATOR_COUNT: usize = 24;
/// A cached set of keys.
static KEYPAIRS: LazyLock<Vec<Keypair>> =
LazyLock::new(|| types::test_utils::generate_deterministic_keypairs(VALIDATOR_COUNT));
fn get_harness(
validator_count: usize,
spec: Arc<ChainSpec>,
) -> BeaconChainHarness<EphemeralHarnessType<E>> {
create_test_tracing_subscriber();
let harness = BeaconChainHarness::builder(MainnetEthSpec)
.spec(spec)
.chain_config(ChainConfig {
reconstruct_historic_states: true,
..ChainConfig::default()
})
.keypairs(KEYPAIRS[0..validator_count].to_vec())
.fresh_ephemeral_store()
.mock_execution_layer()
.build();
harness.advance_slot();
harness
}
// Regression test for https://github.com/sigp/lighthouse/issues/7650
#[tokio::test]
async fn rpc_blobs_with_invalid_header_signature() {
let spec = Arc::new(test_spec::<E>());
// Only run this test if blobs are enabled and columns are disabled.
if spec.deneb_fork_epoch.is_none() || spec.is_fulu_scheduled() {
return;
}
let harness = get_harness(VALIDATOR_COUNT, spec);
let num_blocks = E::slots_per_epoch() as usize;
// Add some chain depth.
harness
.extend_chain(
num_blocks,
BlockStrategy::OnCanonicalHead,
AttestationStrategy::AllValidators,
)
.await;
// Produce a block with blobs.
harness.execution_block_generator().set_min_blob_count(1);
let head_state = harness.get_current_state();
let slot = head_state.slot() + 1;
let ((signed_block, opt_blobs), _) = harness.make_block(head_state, slot).await;
let (kzg_proofs, blobs) = opt_blobs.unwrap();
assert!(!blobs.is_empty());
let block_root = signed_block.canonical_root();
// Process the block without blobs so that it doesn't become available.
harness.advance_slot();
let rpc_block = harness
.build_rpc_block_from_blobs(block_root, signed_block.clone(), None)
.unwrap();
let availability = harness
.chain
.process_block(
block_root,
rpc_block,
NotifyExecutionLayer::Yes,
BlockImportSource::RangeSync,
|| Ok(()),
)
.await
.unwrap();
assert_eq!(
availability,
AvailabilityProcessingStatus::MissingComponents(slot, block_root)
);
// Build blob sidecars with invalid signatures in the block header.
let mut corrupt_block = (*signed_block).clone();
*corrupt_block.signature_mut() = Signature::infinity().unwrap();
let max_len = harness
.chain
.spec
.max_blobs_per_block(slot.epoch(E::slots_per_epoch())) as usize;
let mut blob_sidecars = FixedBlobSidecarList::new(vec![None; max_len]);
for (i, (kzg_proof, blob)) in kzg_proofs.into_iter().zip(blobs).enumerate() {
let blob_sidecar = BlobSidecar::new(i, blob, &corrupt_block, kzg_proof).unwrap();
blob_sidecars[i] = Some(Arc::new(blob_sidecar));
}
let err = harness
.chain
.process_rpc_blobs(slot, block_root, blob_sidecars)
.await
.unwrap_err();
assert!(matches!(
err,
BlockError::InvalidSignature(InvalidSignature::ProposerSignature)
));
}

View File

@@ -4,6 +4,7 @@ use beacon_chain::block_verification_types::{AsBlock, ExecutedBlock, RpcBlock};
use beacon_chain::data_column_verification::CustodyDataColumn;
use beacon_chain::{
AvailabilityProcessingStatus, BeaconChain, BeaconChainTypes, ExecutionPendingBlock,
custody_context::NodeCustodyType,
test_utils::{
AttestationStrategy, BeaconChainHarness, BlockStrategy, EphemeralHarnessType, test_spec,
},
@@ -45,7 +46,7 @@ async fn get_chain_segment() -> (Vec<BeaconSnapshot<E>>, Vec<Option<DataSidecars
// The assumption that you can re-import a block based on what you have in your DB
// is no longer true, as fullnodes stores less than what they sample.
// We use a supernode here to build a chain segment.
let harness = get_harness(VALIDATOR_COUNT, true);
let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Supernode);
harness
.extend_chain(
@@ -106,7 +107,7 @@ async fn get_chain_segment() -> (Vec<BeaconSnapshot<E>>, Vec<Option<DataSidecars
fn get_harness(
validator_count: usize,
supernode: bool,
node_custody_type: NodeCustodyType,
) -> BeaconChainHarness<EphemeralHarnessType<E>> {
let harness = BeaconChainHarness::builder(MainnetEthSpec)
.default_spec()
@@ -115,7 +116,7 @@ fn get_harness(
..ChainConfig::default()
})
.keypairs(KEYPAIRS[0..validator_count].to_vec())
.import_all_data_columns(supernode)
.node_custody_type(node_custody_type)
.fresh_ephemeral_store()
.mock_execution_layer()
.build();
@@ -259,7 +260,7 @@ fn update_data_column_signed_header<E: EthSpec>(
#[tokio::test]
async fn chain_segment_full_segment() {
let harness = get_harness(VALIDATOR_COUNT, false);
let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Fullnode);
let (chain_segment, chain_segment_blobs) = get_chain_segment().await;
let blocks: Vec<RpcBlock<E>> = chain_segment_blocks(&chain_segment, &chain_segment_blobs)
.into_iter()
@@ -297,7 +298,7 @@ async fn chain_segment_full_segment() {
#[tokio::test]
async fn chain_segment_varying_chunk_size() {
for chunk_size in &[1, 2, 3, 5, 31, 32, 33, 42] {
let harness = get_harness(VALIDATOR_COUNT, false);
let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Fullnode);
let (chain_segment, chain_segment_blobs) = get_chain_segment().await;
let blocks: Vec<RpcBlock<E>> = chain_segment_blocks(&chain_segment, &chain_segment_blobs)
.into_iter()
@@ -329,7 +330,7 @@ async fn chain_segment_varying_chunk_size() {
#[tokio::test]
async fn chain_segment_non_linear_parent_roots() {
let harness = get_harness(VALIDATOR_COUNT, false);
let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Fullnode);
let (chain_segment, chain_segment_blobs) = get_chain_segment().await;
harness
@@ -386,7 +387,7 @@ async fn chain_segment_non_linear_parent_roots() {
#[tokio::test]
async fn chain_segment_non_linear_slots() {
let harness = get_harness(VALIDATOR_COUNT, false);
let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Fullnode);
let (chain_segment, chain_segment_blobs) = get_chain_segment().await;
harness
.chain
@@ -528,7 +529,7 @@ async fn assert_invalid_signature(
async fn get_invalid_sigs_harness(
chain_segment: &[BeaconSnapshot<E>],
) -> BeaconChainHarness<EphemeralHarnessType<E>> {
let harness = get_harness(VALIDATOR_COUNT, false);
let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Fullnode);
harness
.chain
.slot_clock
@@ -986,7 +987,7 @@ fn unwrap_err<T, U>(result: Result<T, U>) -> U {
#[tokio::test]
async fn block_gossip_verification() {
let harness = get_harness(VALIDATOR_COUNT, false);
let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Fullnode);
let (chain_segment, chain_segment_blobs) = get_chain_segment().await;
let block_index = CHAIN_SEGMENT_LENGTH - 2;
@@ -1389,7 +1390,7 @@ async fn verify_block_for_gossip_slashing_detection() {
#[tokio::test]
async fn verify_block_for_gossip_doppelganger_detection() {
let harness = get_harness(VALIDATOR_COUNT, false);
let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Fullnode);
let state = harness.get_current_state();
let ((block, _), _) = harness.make_block(state.clone(), Slot::new(1)).await;

View File

@@ -0,0 +1,117 @@
#![cfg(not(debug_assertions))]
use beacon_chain::custody_context::NodeCustodyType;
use beacon_chain::test_utils::{
AttestationStrategy, BeaconChainHarness, BlockStrategy, EphemeralHarnessType,
generate_data_column_sidecars_from_block, test_spec,
};
use beacon_chain::{
AvailabilityProcessingStatus, BlockError, ChainConfig, InvalidSignature, NotifyExecutionLayer,
block_verification_types::AsBlock,
};
use logging::create_test_tracing_subscriber;
use std::sync::{Arc, LazyLock};
use types::*;
type E = MainnetEthSpec;
// Should ideally be divisible by 3.
const VALIDATOR_COUNT: usize = 24;
/// A cached set of keys.
static KEYPAIRS: LazyLock<Vec<Keypair>> =
LazyLock::new(|| types::test_utils::generate_deterministic_keypairs(VALIDATOR_COUNT));
fn get_harness(
validator_count: usize,
spec: Arc<ChainSpec>,
node_custody_type: NodeCustodyType,
) -> BeaconChainHarness<EphemeralHarnessType<E>> {
create_test_tracing_subscriber();
let harness = BeaconChainHarness::builder(MainnetEthSpec)
.spec(spec)
.chain_config(ChainConfig {
reconstruct_historic_states: true,
..ChainConfig::default()
})
.keypairs(KEYPAIRS[0..validator_count].to_vec())
.node_custody_type(node_custody_type)
.fresh_ephemeral_store()
.mock_execution_layer()
.build();
harness.advance_slot();
harness
}
// Regression test for https://github.com/sigp/lighthouse/issues/7650
#[tokio::test]
async fn rpc_columns_with_invalid_header_signature() {
let spec = Arc::new(test_spec::<E>());
// Only run this test if columns are enabled.
if !spec.is_fulu_scheduled() {
return;
}
let harness = get_harness(VALIDATOR_COUNT, spec, NodeCustodyType::Supernode);
let num_blocks = E::slots_per_epoch() as usize;
// Add some chain depth.
harness
.extend_chain(
num_blocks,
BlockStrategy::OnCanonicalHead,
AttestationStrategy::AllValidators,
)
.await;
// Produce a block with blobs.
harness.execution_block_generator().set_min_blob_count(1);
let head_state = harness.get_current_state();
let slot = head_state.slot() + 1;
let ((signed_block, opt_blobs), _) = harness.make_block(head_state, slot).await;
let (_, blobs) = opt_blobs.unwrap();
assert!(!blobs.is_empty());
let block_root = signed_block.canonical_root();
// Process the block without blobs so that it doesn't become available.
harness.advance_slot();
let rpc_block = harness
.build_rpc_block_from_blobs(block_root, signed_block.clone(), None)
.unwrap();
let availability = harness
.chain
.process_block(
block_root,
rpc_block,
NotifyExecutionLayer::Yes,
BlockImportSource::RangeSync,
|| Ok(()),
)
.await
.unwrap();
assert_eq!(
availability,
AvailabilityProcessingStatus::MissingComponents(slot, block_root)
);
// Build blob sidecars with invalid signatures in the block header.
let mut corrupt_block = (*signed_block).clone();
*corrupt_block.signature_mut() = Signature::infinity().unwrap();
let data_column_sidecars =
generate_data_column_sidecars_from_block(&corrupt_block, &harness.chain.spec);
let err = harness
.chain
.process_rpc_custody_columns(data_column_sidecars)
.await
.unwrap_err();
assert!(matches!(
err,
BlockError::InvalidSignature(InvalidSignature::ProposerSignature)
));
}

View File

@@ -1,15 +1,13 @@
use beacon_chain::blob_verification::GossipVerifiedBlob;
use beacon_chain::data_column_verification::GossipVerifiedDataColumn;
use beacon_chain::test_utils::{BeaconChainHarness, TEST_DATA_COLUMN_SIDECARS_SSZ};
use beacon_chain::test_utils::{BeaconChainHarness, generate_data_column_sidecars_from_block};
use eth2::types::{EventKind, SseBlobSidecar, SseDataColumnSidecar};
use rand::SeedableRng;
use rand::rngs::StdRng;
use std::sync::Arc;
use types::blob_sidecar::FixedBlobSidecarList;
use types::test_utils::TestRandom;
use types::{
BlobSidecar, DataColumnSidecar, EthSpec, ForkName, MinimalEthSpec, RuntimeVariableList, Slot,
};
use types::{BlobSidecar, DataColumnSidecar, EthSpec, ForkName, MinimalEthSpec, Slot};
type E = MinimalEthSpec;
@@ -108,19 +106,18 @@ async fn blob_sidecar_event_on_process_rpc_blobs() {
let mut blob_event_receiver = event_handler.subscribe_blob_sidecar();
// build and process multiple rpc blobs
let kzg = harness.chain.kzg.as_ref();
let mut rng = StdRng::seed_from_u64(0xDEADBEEF0BAD5EEDu64);
harness.execution_block_generator().set_min_blob_count(2);
let mut blob_1 = BlobSidecar::random_valid(&mut rng, kzg).unwrap();
let mut blob_2 = BlobSidecar {
index: 1,
..BlobSidecar::random_valid(&mut rng, kzg).unwrap()
};
let parent_root = harness.chain.head().head_block_root();
blob_1.signed_block_header.message.parent_root = parent_root;
blob_2.signed_block_header.message.parent_root = parent_root;
let blob_1 = Arc::new(blob_1);
let blob_2 = Arc::new(blob_2);
let head_state = harness.get_current_state();
let slot = head_state.slot() + 1;
let ((signed_block, opt_blobs), _) = harness.make_block(head_state, slot).await;
let (kzg_proofs, blobs) = opt_blobs.unwrap();
assert!(blobs.len() > 2);
let blob_1 =
Arc::new(BlobSidecar::new(0, blobs[0].clone(), &signed_block, kzg_proofs[0]).unwrap());
let blob_2 =
Arc::new(BlobSidecar::new(1, blobs[1].clone(), &signed_block, kzg_proofs[1]).unwrap());
let blobs = FixedBlobSidecarList::new(vec![Some(blob_1.clone()), Some(blob_2.clone())]);
let expected_sse_blobs = vec![
@@ -130,7 +127,7 @@ async fn blob_sidecar_event_on_process_rpc_blobs() {
let _ = harness
.chain
.process_rpc_blobs(blob_1.slot(), blob_1.block_root(), blobs)
.process_rpc_blobs(slot, blob_1.block_root(), blobs)
.await
.unwrap();
@@ -159,20 +156,24 @@ async fn data_column_sidecar_event_on_process_rpc_columns() {
let event_handler = harness.chain.event_handler.as_ref().unwrap();
let mut data_column_event_receiver = event_handler.subscribe_data_column_sidecar();
// build a valid block
harness.execution_block_generator().set_min_blob_count(1);
let head_state = harness.get_current_state();
let slot = head_state.slot() + 1;
let ((signed_block, opt_blobs), _) = harness.make_block(head_state, slot).await;
let (_, blobs) = opt_blobs.unwrap();
assert!(!blobs.is_empty());
// load the precomputed column sidecar to avoid computing them for every block in the tests.
let mut sidecar = RuntimeVariableList::<DataColumnSidecar<E>>::from_ssz_bytes(
TEST_DATA_COLUMN_SIDECARS_SSZ,
E::number_of_columns(),
)
.unwrap()[0]
.clone();
let parent_root = harness.chain.head().head_block_root();
sidecar.signed_block_header.message.parent_root = parent_root;
let data_column_sidecars =
generate_data_column_sidecars_from_block(&signed_block, &harness.chain.spec);
let sidecar = data_column_sidecars[0].clone();
let expected_sse_data_column = SseDataColumnSidecar::from_data_column_sidecar(&sidecar);
let _ = harness
.chain
.process_rpc_custody_columns(vec![Arc::new(sidecar)])
.process_rpc_custody_columns(vec![sidecar])
.await
.unwrap();

View File

@@ -1,8 +1,10 @@
mod attestation_production;
mod attestation_verification;
mod bellatrix;
mod blob_verification;
mod block_verification;
mod capella;
mod column_verification;
mod events;
mod op_verification;
mod payload_invalidation;

File diff suppressed because it is too large Load Diff