resolve merge conflicts between untstable and release-v7.0.0

This commit is contained in:
Eitan Seri-Levi
2025-03-23 11:09:02 -06:00
63 changed files with 1422 additions and 242 deletions

View File

@@ -36,6 +36,9 @@ pub const VALIDATOR_COUNT: usize = 256;
pub const CAPELLA_FORK_EPOCH: usize = 1;
// When set to true, cache any states fetched from the db.
pub const CACHE_STATE_IN_TESTS: bool = true;
/// A cached set of keys.
static KEYPAIRS: LazyLock<Vec<Keypair>> =
LazyLock::new(|| types::test_utils::generate_deterministic_keypairs(VALIDATOR_COUNT));
@@ -1225,7 +1228,11 @@ async fn attestation_that_skips_epochs() {
let mut state = harness
.chain
.get_state(&earlier_block.state_root(), Some(earlier_slot))
.get_state(
&earlier_block.state_root(),
Some(earlier_slot),
CACHE_STATE_IN_TESTS,
)
.expect("should not error getting state")
.expect("should find state");
@@ -1329,9 +1336,14 @@ async fn attestation_validator_receive_proposer_reward_and_withdrawals() {
.await;
let current_slot = harness.get_current_slot();
let mut state = harness
.chain
.get_state(&earlier_block.state_root(), Some(earlier_slot))
.get_state(
&earlier_block.state_root(),
Some(earlier_slot),
CACHE_STATE_IN_TESTS,
)
.expect("should not error getting state")
.expect("should find state");
@@ -1399,7 +1411,11 @@ async fn attestation_to_finalized_block() {
let mut state = harness
.chain
.get_state(&earlier_block.state_root(), Some(earlier_slot))
.get_state(
&earlier_block.state_root(),
Some(earlier_slot),
CACHE_STATE_IN_TESTS,
)
.expect("should not error getting state")
.expect("should find state");

View File

@@ -18,6 +18,9 @@ use types::{ChainSpec, ForkName, Slot};
pub const VALIDATOR_COUNT: usize = 64;
// When set to true, cache any states fetched from the db.
pub const CACHE_STATE_IN_TESTS: bool = true;
type E = MinimalEthSpec;
static KEYPAIRS: LazyLock<Vec<Keypair>> =
@@ -114,8 +117,13 @@ async fn test_sync_committee_rewards() {
.get_blinded_block(&block.parent_root())
.unwrap()
.unwrap();
let parent_state = chain
.get_state(&parent_block.state_root(), Some(parent_block.slot()))
.get_state(
&parent_block.state_root(),
Some(parent_block.slot()),
CACHE_STATE_IN_TESTS,
)
.unwrap()
.unwrap();

View File

@@ -39,6 +39,9 @@ use types::*;
pub const LOW_VALIDATOR_COUNT: usize = 24;
pub const HIGH_VALIDATOR_COUNT: usize = 64;
// When set to true, cache any states fetched from the db.
pub const CACHE_STATE_IN_TESTS: bool = true;
/// A cached set of keys.
static KEYPAIRS: LazyLock<Vec<Keypair>> =
LazyLock::new(|| types::test_utils::generate_deterministic_keypairs(HIGH_VALIDATOR_COUNT));
@@ -756,6 +759,7 @@ async fn delete_blocks_and_states() {
.get_state(
&faulty_head_block.state_root(),
Some(faulty_head_block.slot()),
CACHE_STATE_IN_TESTS,
)
.expect("no db error")
.expect("faulty head state exists");
@@ -769,7 +773,12 @@ async fn delete_blocks_and_states() {
break;
}
store.delete_state(&state_root, slot).unwrap();
assert_eq!(store.get_state(&state_root, Some(slot)).unwrap(), None);
assert_eq!(
store
.get_state(&state_root, Some(slot), CACHE_STATE_IN_TESTS)
.unwrap(),
None
);
}
// Double-deleting should also be OK (deleting non-existent things is fine)
@@ -1053,7 +1062,11 @@ fn get_state_for_block(harness: &TestHarness, block_root: Hash256) -> BeaconStat
.unwrap();
harness
.chain
.get_state(&head_block.state_root(), Some(head_block.slot()))
.get_state(
&head_block.state_root(),
Some(head_block.slot()),
CACHE_STATE_IN_TESTS,
)
.unwrap()
.unwrap()
}
@@ -1890,7 +1903,10 @@ fn check_all_states_exist<'a>(
states: impl Iterator<Item = &'a BeaconStateHash>,
) {
for &state_hash in states {
let state = harness.chain.get_state(&state_hash.into(), None).unwrap();
let state = harness
.chain
.get_state(&state_hash.into(), None, CACHE_STATE_IN_TESTS)
.unwrap();
assert!(
state.is_some(),
"expected state {:?} to be in DB",
@@ -1908,7 +1924,7 @@ fn check_no_states_exist<'a>(
assert!(
harness
.chain
.get_state(&state_root.into(), None)
.get_state(&state_root.into(), None, CACHE_STATE_IN_TESTS)
.unwrap()
.is_none(),
"state {:?} should not be in the DB",
@@ -2342,7 +2358,7 @@ async fn weak_subjectivity_sync_test(slots: Vec<Slot>, checkpoint_slot: Slot) {
.get_or_reconstruct_blobs(&wss_block_root)
.unwrap();
let wss_state = full_store
.get_state(&wss_state_root, Some(checkpoint_slot))
.get_state(&wss_state_root, Some(checkpoint_slot), CACHE_STATE_IN_TESTS)
.unwrap()
.unwrap();
@@ -2454,7 +2470,7 @@ async fn weak_subjectivity_sync_test(slots: Vec<Slot>, checkpoint_slot: Slot) {
// Check that the new block's state can be loaded correctly.
let mut state = beacon_chain
.store
.get_state(&state_root, Some(slot))
.get_state(&state_root, Some(slot), CACHE_STATE_IN_TESTS)
.unwrap()
.unwrap();
assert_eq!(state.update_tree_hash_cache().unwrap(), state_root);
@@ -2584,7 +2600,10 @@ async fn weak_subjectivity_sync_test(slots: Vec<Slot>, checkpoint_slot: Slot) {
.unwrap()
.map(Result::unwrap)
{
let mut state = store.get_state(&state_root, Some(slot)).unwrap().unwrap();
let mut state = store
.get_state(&state_root, Some(slot), CACHE_STATE_IN_TESTS)
.unwrap()
.unwrap();
assert_eq!(state.slot(), slot);
assert_eq!(state.canonical_root().unwrap(), state_root);
}
@@ -3410,9 +3429,10 @@ async fn prune_historic_states() {
let store = get_store(&db_path);
let harness = get_harness(store.clone(), LOW_VALIDATOR_COUNT);
let genesis_state_root = harness.chain.genesis_state_root;
let genesis_state = harness
.chain
.get_state(&genesis_state_root, None)
.get_state(&genesis_state_root, None, CACHE_STATE_IN_TESTS)
.unwrap()
.unwrap();
@@ -3433,7 +3453,10 @@ async fn prune_historic_states() {
.map(Result::unwrap)
.collect::<Vec<_>>();
for &(state_root, slot) in &first_epoch_state_roots {
assert!(store.get_state(&state_root, Some(slot)).unwrap().is_some());
assert!(store
.get_state(&state_root, Some(slot), CACHE_STATE_IN_TESTS)
.unwrap()
.is_some());
}
store
@@ -3448,7 +3471,10 @@ async fn prune_historic_states() {
// Ensure all epoch 0 states other than the genesis have been pruned.
for &(state_root, slot) in &first_epoch_state_roots {
assert_eq!(
store.get_state(&state_root, Some(slot)).unwrap().is_some(),
store
.get_state(&state_root, Some(slot), CACHE_STATE_IN_TESTS)
.unwrap()
.is_some(),
slot == 0
);
}
@@ -3574,7 +3600,7 @@ fn check_chain_dump(harness: &TestHarness, expected_len: u64) {
harness
.chain
.store
.get_state(&checkpoint.beacon_state_root(), None)
.get_state(&checkpoint.beacon_state_root(), None, CACHE_STATE_IN_TESTS)
.expect("no error")
.expect("state exists")
.slot(),
@@ -3636,7 +3662,7 @@ fn check_iterators(harness: &TestHarness) {
harness
.chain
.store
.get_state(&state_root, Some(slot))
.get_state(&state_root, Some(slot), CACHE_STATE_IN_TESTS)
.unwrap()
.is_some(),
"state {:?} from canonical chain should be in DB",

View File

@@ -21,6 +21,9 @@ pub type E = MainnetEthSpec;
pub const VALIDATOR_COUNT: usize = 256;
// When set to true, cache any states fetched from the db.
pub const CACHE_STATE_IN_TESTS: bool = true;
/// A cached set of keys.
static KEYPAIRS: LazyLock<Vec<Keypair>> =
LazyLock::new(|| types::test_utils::generate_deterministic_keypairs(VALIDATOR_COUNT));
@@ -755,7 +758,10 @@ async fn unaggregated_gossip_verification() {
// Load the block and state for the given root.
let block = chain.get_block(&root).await.unwrap().unwrap();
let mut state = chain.get_state(&block.state_root(), None).unwrap().unwrap();
let mut state = chain
.get_state(&block.state_root(), None, CACHE_STATE_IN_TESTS)
.unwrap()
.unwrap();
// Advance the state to simulate a pre-state for block production.
let slot = valid_sync_committee_message.slot + 1;

View File

@@ -12,10 +12,12 @@ use operation_pool::PersistedOperationPool;
use state_processing::{per_slot_processing, per_slot_processing::Error as SlotProcessingError};
use std::sync::LazyLock;
use types::{
BeaconState, BeaconStateError, BlockImportSource, EthSpec, Hash256, Keypair, MinimalEthSpec,
RelativeEpoch, Slot,
BeaconState, BeaconStateError, BlockImportSource, Checkpoint, EthSpec, Hash256, Keypair,
MinimalEthSpec, RelativeEpoch, Slot,
};
type E = MinimalEthSpec;
// Should ideally be divisible by 3.
pub const VALIDATOR_COUNT: usize = 48;
@@ -24,12 +26,22 @@ static KEYPAIRS: LazyLock<Vec<Keypair>> =
LazyLock::new(|| types::test_utils::generate_deterministic_keypairs(VALIDATOR_COUNT));
fn get_harness(validator_count: usize) -> BeaconChainHarness<EphemeralHarnessType<MinimalEthSpec>> {
get_harness_with_config(
validator_count,
ChainConfig {
reconstruct_historic_states: true,
..Default::default()
},
)
}
fn get_harness_with_config(
validator_count: usize,
chain_config: ChainConfig,
) -> BeaconChainHarness<EphemeralHarnessType<MinimalEthSpec>> {
let harness = BeaconChainHarness::builder(MinimalEthSpec)
.default_spec()
.chain_config(ChainConfig {
reconstruct_historic_states: true,
..ChainConfig::default()
})
.chain_config(chain_config)
.keypairs(KEYPAIRS[0..validator_count].to_vec())
.fresh_ephemeral_store()
.mock_execution_layer()
@@ -869,3 +881,165 @@ async fn block_roots_skip_slot_behaviour() {
"WhenSlotSkipped::Prev should return None on a future slot"
);
}
async fn pseudo_finalize_test_generic(
epochs_per_migration: u64,
expect_true_finalization_migration: bool,
) {
// This test ensures that after pseudo finalization, we can still finalize the chain without issues
let num_blocks_produced = MinimalEthSpec::slots_per_epoch() * 5;
let chain_config = ChainConfig {
reconstruct_historic_states: true,
epochs_per_migration,
..Default::default()
};
let harness = get_harness_with_config(VALIDATOR_COUNT, chain_config);
let one_third = VALIDATOR_COUNT / 3;
let attesters = (0..one_third).collect();
// extend the chain, but don't finalize
harness
.extend_chain(
num_blocks_produced as usize,
BlockStrategy::OnCanonicalHead,
AttestationStrategy::SomeValidators(attesters),
)
.await;
harness.advance_slot();
let head = harness.chain.head_snapshot();
let state = &head.beacon_state;
let split = harness.chain.store.get_split_info();
assert_eq!(
state.slot(),
num_blocks_produced,
"head should be at the current slot"
);
assert_eq!(
state.current_epoch(),
num_blocks_produced / MinimalEthSpec::slots_per_epoch(),
"head should be at the expected epoch"
);
assert_eq!(
state.current_justified_checkpoint().epoch,
0,
"There should be no justified checkpoint"
);
assert_eq!(
state.finalized_checkpoint().epoch,
0,
"There should be no finalized checkpoint"
);
assert_eq!(split.slot, 0, "Our split point should be unset");
let checkpoint = Checkpoint {
epoch: head.beacon_state.current_epoch(),
root: head.beacon_block_root,
};
// pseudo finalize
harness
.chain
.manually_finalize_state(head.beacon_state_root(), checkpoint)
.unwrap();
let split = harness.chain.store.get_split_info();
let pseudo_finalized_slot = split.slot;
assert_eq!(
state.current_justified_checkpoint().epoch,
0,
"We pseudo finalized, but our justified checkpoint should still be unset"
);
assert_eq!(
state.finalized_checkpoint().epoch,
0,
"We pseudo finalized, but our finalized checkpoint should still be unset"
);
assert_eq!(
split.slot,
head.beacon_state.slot(),
"We pseudo finalized, our split point should be at the current head slot"
);
// finalize the chain
harness
.extend_chain(
num_blocks_produced as usize,
BlockStrategy::OnCanonicalHead,
AttestationStrategy::AllValidators,
)
.await;
harness.advance_slot();
let head = harness.chain.head_snapshot();
let state = &head.beacon_state;
let split = harness.chain.store.get_split_info();
assert_eq!(
state.slot(),
num_blocks_produced * 2,
"head should be at the current slot"
);
assert_eq!(
state.current_epoch(),
(num_blocks_produced * 2) / MinimalEthSpec::slots_per_epoch(),
"head should be at the expected epoch"
);
assert_eq!(
state.current_justified_checkpoint().epoch,
state.current_epoch() - 1,
"the head should be justified one behind the current epoch"
);
let finalized_epoch = state.finalized_checkpoint().epoch;
assert_eq!(
finalized_epoch,
state.current_epoch() - 2,
"the head should be finalized two behind the current epoch"
);
let expected_split_slot = if pseudo_finalized_slot.epoch(E::slots_per_epoch())
+ epochs_per_migration
> finalized_epoch
{
pseudo_finalized_slot
} else {
finalized_epoch.start_slot(E::slots_per_epoch())
};
assert_eq!(
split.slot, expected_split_slot,
"We finalized, our split point should be updated according to epochs_per_migration"
);
// In the case that we did not process the true finalization migration (due to
// epochs_per_migration), check that the chain finalized *despite* the absence of the split
// block in fork choice.
// This is a regression test for https://github.com/sigp/lighthouse/pull/7105
if !expect_true_finalization_migration {
assert_eq!(expected_split_slot, pseudo_finalized_slot);
assert!(!harness
.chain
.canonical_head
.fork_choice_read_lock()
.contains_block(&split.block_root));
}
}
#[tokio::test]
async fn pseudo_finalize_basic() {
let epochs_per_migration = 0;
let expect_true_migration = true;
pseudo_finalize_test_generic(epochs_per_migration, expect_true_migration).await;
}
#[tokio::test]
async fn pseudo_finalize_with_lagging_split_update() {
let epochs_per_migration = 10;
let expect_true_migration = false;
pseudo_finalize_test_generic(epochs_per_migration, expect_true_migration).await;
}