Realized unrealized experimentation (#3322)

## Issue Addressed

Add a flag that optionally enables unrealized vote tracking.  Would like to test out on testnets and benchmark differences in methods of vote tracking. This PR includes a DB schema upgrade to enable to new vote tracking style.


Co-authored-by: realbigsean <sean@sigmaprime.io>
Co-authored-by: Paul Hauner <paul@paulhauner.com>
Co-authored-by: sean <seananderson33@gmail.com>
Co-authored-by: Mac L <mjladson@pm.me>
This commit is contained in:
realbigsean
2022-07-25 23:53:26 +00:00
parent bb5a6d2cca
commit 20ebf1f3c1
47 changed files with 1254 additions and 338 deletions

View File

@@ -4,6 +4,7 @@ use beacon_chain::test_utils::{
AttestationStrategy, BeaconChainHarness, BlockStrategy, EphemeralHarnessType,
};
use beacon_chain::{BeaconSnapshot, BlockError, ChainSegmentResult};
use fork_choice::CountUnrealized;
use lazy_static::lazy_static;
use logging::test_logger;
use slasher::{Config as SlasherConfig, Slasher};
@@ -147,14 +148,14 @@ async fn chain_segment_full_segment() {
// Sneak in a little check to ensure we can process empty chain segments.
harness
.chain
.process_chain_segment(vec![])
.process_chain_segment(vec![], CountUnrealized::True)
.await
.into_block_error()
.expect("should import empty chain segment");
harness
.chain
.process_chain_segment(blocks.clone())
.process_chain_segment(blocks.clone(), CountUnrealized::True)
.await
.into_block_error()
.expect("should import chain segment");
@@ -187,7 +188,7 @@ async fn chain_segment_varying_chunk_size() {
for chunk in blocks.chunks(*chunk_size) {
harness
.chain
.process_chain_segment(chunk.to_vec())
.process_chain_segment(chunk.to_vec(), CountUnrealized::True)
.await
.into_block_error()
.unwrap_or_else(|_| panic!("should import chain segment of len {}", chunk_size));
@@ -227,7 +228,7 @@ async fn chain_segment_non_linear_parent_roots() {
matches!(
harness
.chain
.process_chain_segment(blocks)
.process_chain_segment(blocks, CountUnrealized::True)
.await
.into_block_error(),
Err(BlockError::NonLinearParentRoots)
@@ -247,7 +248,7 @@ async fn chain_segment_non_linear_parent_roots() {
matches!(
harness
.chain
.process_chain_segment(blocks)
.process_chain_segment(blocks, CountUnrealized::True)
.await
.into_block_error(),
Err(BlockError::NonLinearParentRoots)
@@ -278,7 +279,7 @@ async fn chain_segment_non_linear_slots() {
matches!(
harness
.chain
.process_chain_segment(blocks)
.process_chain_segment(blocks, CountUnrealized::True)
.await
.into_block_error(),
Err(BlockError::NonLinearSlots)
@@ -299,7 +300,7 @@ async fn chain_segment_non_linear_slots() {
matches!(
harness
.chain
.process_chain_segment(blocks)
.process_chain_segment(blocks, CountUnrealized::True)
.await
.into_block_error(),
Err(BlockError::NonLinearSlots)
@@ -325,7 +326,7 @@ async fn assert_invalid_signature(
matches!(
harness
.chain
.process_chain_segment(blocks)
.process_chain_segment(blocks, CountUnrealized::True)
.await
.into_block_error(),
Err(BlockError::InvalidSignature)
@@ -342,12 +343,18 @@ async fn assert_invalid_signature(
.collect();
// We don't care if this fails, we just call this to ensure that all prior blocks have been
// imported prior to this test.
let _ = harness.chain.process_chain_segment(ancestor_blocks).await;
let _ = harness
.chain
.process_chain_segment(ancestor_blocks, CountUnrealized::True)
.await;
assert!(
matches!(
harness
.chain
.process_block(snapshots[block_index].beacon_block.clone())
.process_block(
snapshots[block_index].beacon_block.clone(),
CountUnrealized::True
)
.await,
Err(BlockError::InvalidSignature)
),
@@ -397,7 +404,7 @@ async fn invalid_signature_gossip_block() {
.collect();
harness
.chain
.process_chain_segment(ancestor_blocks)
.process_chain_segment(ancestor_blocks, CountUnrealized::True)
.await
.into_block_error()
.expect("should import all blocks prior to the one being tested");
@@ -405,10 +412,10 @@ async fn invalid_signature_gossip_block() {
matches!(
harness
.chain
.process_block(Arc::new(SignedBeaconBlock::from_block(
block,
junk_signature()
)))
.process_block(
Arc::new(SignedBeaconBlock::from_block(block, junk_signature())),
CountUnrealized::True
)
.await,
Err(BlockError::InvalidSignature)
),
@@ -441,7 +448,7 @@ async fn invalid_signature_block_proposal() {
matches!(
harness
.chain
.process_chain_segment(blocks)
.process_chain_segment(blocks, CountUnrealized::True)
.await
.into_block_error(),
Err(BlockError::InvalidSignature)
@@ -639,7 +646,7 @@ async fn invalid_signature_deposit() {
!matches!(
harness
.chain
.process_chain_segment(blocks)
.process_chain_segment(blocks, CountUnrealized::True)
.await
.into_block_error(),
Err(BlockError::InvalidSignature)
@@ -716,11 +723,18 @@ async fn block_gossip_verification() {
harness
.chain
.process_block(gossip_verified)
.process_block(gossip_verified, CountUnrealized::True)
.await
.expect("should import valid gossip verified block");
}
// Recompute the head to ensure we cache the latest view of fork choice.
harness
.chain
.recompute_head_at_current_slot()
.await
.unwrap();
/*
* This test ensures that:
*
@@ -978,7 +992,11 @@ async fn verify_block_for_gossip_slashing_detection() {
.verify_block_for_gossip(Arc::new(block1))
.await
.unwrap();
harness.chain.process_block(verified_block).await.unwrap();
harness
.chain
.process_block(verified_block, CountUnrealized::True)
.await
.unwrap();
unwrap_err(
harness
.chain
@@ -1009,7 +1027,11 @@ async fn verify_block_for_gossip_doppelganger_detection() {
.await
.unwrap();
let attestations = verified_block.block.message().body().attestations().clone();
harness.chain.process_block(verified_block).await.unwrap();
harness
.chain
.process_block(verified_block, CountUnrealized::True)
.await
.unwrap();
for att in attestations.iter() {
let epoch = att.data.target.epoch;
@@ -1148,7 +1170,7 @@ async fn add_base_block_to_altair_chain() {
assert!(matches!(
harness
.chain
.process_block(Arc::new(base_block.clone()))
.process_block(Arc::new(base_block.clone()), CountUnrealized::True)
.await
.err()
.expect("should error when processing base block"),
@@ -1162,7 +1184,7 @@ async fn add_base_block_to_altair_chain() {
assert!(matches!(
harness
.chain
.process_chain_segment(vec![Arc::new(base_block)])
.process_chain_segment(vec![Arc::new(base_block)], CountUnrealized::True)
.await,
ChainSegmentResult::Failed {
imported_blocks: 0,
@@ -1276,7 +1298,7 @@ async fn add_altair_block_to_base_chain() {
assert!(matches!(
harness
.chain
.process_block(Arc::new(altair_block.clone()))
.process_block(Arc::new(altair_block.clone()), CountUnrealized::True)
.await
.err()
.expect("should error when processing altair block"),
@@ -1290,7 +1312,7 @@ async fn add_altair_block_to_base_chain() {
assert!(matches!(
harness
.chain
.process_chain_segment(vec![Arc::new(altair_block)])
.process_chain_segment(vec![Arc::new(altair_block)], CountUnrealized::True)
.await,
ChainSegmentResult::Failed {
imported_blocks: 0,

View File

@@ -9,7 +9,9 @@ use execution_layer::{
json_structures::{JsonForkChoiceStateV1, JsonPayloadAttributesV1},
ExecutionLayer, ForkChoiceState, PayloadAttributes,
};
use fork_choice::{Error as ForkChoiceError, InvalidationOperation, PayloadVerificationStatus};
use fork_choice::{
CountUnrealized, Error as ForkChoiceError, InvalidationOperation, PayloadVerificationStatus,
};
use proto_array::{Error as ProtoArrayError, ExecutionStatus};
use slot_clock::SlotClock;
use std::sync::Arc;
@@ -648,7 +650,7 @@ async fn invalidates_all_descendants() {
let fork_block_root = rig
.harness
.chain
.process_block(Arc::new(fork_block))
.process_block(Arc::new(fork_block), CountUnrealized::True)
.await
.unwrap();
rig.recompute_head().await;
@@ -740,7 +742,7 @@ async fn switches_heads() {
let fork_block_root = rig
.harness
.chain
.process_block(Arc::new(fork_block))
.process_block(Arc::new(fork_block), CountUnrealized::True)
.await
.unwrap();
rig.recompute_head().await;
@@ -984,7 +986,7 @@ async fn invalid_parent() {
// Ensure the block built atop an invalid payload is invalid for import.
assert!(matches!(
rig.harness.chain.process_block(block.clone()).await,
rig.harness.chain.process_block(block.clone(), CountUnrealized::True).await,
Err(BlockError::ParentExecutionPayloadInvalid { parent_root: invalid_root })
if invalid_root == parent_root
));
@@ -998,7 +1000,8 @@ async fn invalid_parent() {
Duration::from_secs(0),
&state,
PayloadVerificationStatus::Optimistic,
&rig.harness.chain.spec
&rig.harness.chain.spec,
CountUnrealized::True,
),
Err(ForkChoiceError::ProtoArrayError(message))
if message.contains(&format!(

View File

@@ -10,6 +10,7 @@ use beacon_chain::{
BeaconChainError, BeaconChainTypes, BeaconSnapshot, ChainConfig, ServerSentEventHandler,
WhenSlotSkipped,
};
use fork_choice::CountUnrealized;
use lazy_static::lazy_static;
use logging::test_logger;
use maplit::hashset;
@@ -2124,7 +2125,7 @@ async fn weak_subjectivity_sync() {
beacon_chain.slot_clock.set_slot(block.slot().as_u64());
beacon_chain
.process_block(Arc::new(full_block))
.process_block(Arc::new(full_block), CountUnrealized::True)
.await
.unwrap();
beacon_chain.recompute_head_at_current_slot().await.unwrap();

View File

@@ -8,6 +8,7 @@ use beacon_chain::{
},
BeaconChain, StateSkipConfig, WhenSlotSkipped,
};
use fork_choice::CountUnrealized;
use lazy_static::lazy_static;
use operation_pool::PersistedOperationPool;
use state_processing::{
@@ -499,7 +500,7 @@ async fn unaggregated_attestations_added_to_fork_choice_some_none() {
// Move forward a slot so all queued attestations can be processed.
harness.advance_slot();
fork_choice
.update_time(harness.chain.slot().unwrap())
.update_time(harness.chain.slot().unwrap(), &harness.chain.spec)
.unwrap();
let validator_slots: Vec<(usize, Slot)> = (0..VALIDATOR_COUNT)
@@ -613,7 +614,7 @@ async fn unaggregated_attestations_added_to_fork_choice_all_updated() {
// Move forward a slot so all queued attestations can be processed.
harness.advance_slot();
fork_choice
.update_time(harness.chain.slot().unwrap())
.update_time(harness.chain.slot().unwrap(), &harness.chain.spec)
.unwrap();
let validators: Vec<usize> = (0..VALIDATOR_COUNT).collect();
@@ -683,7 +684,10 @@ async fn run_skip_slot_test(skip_slots: u64) {
assert_eq!(
harness_b
.chain
.process_block(harness_a.chain.head_snapshot().beacon_block.clone())
.process_block(
harness_a.chain.head_snapshot().beacon_block.clone(),
CountUnrealized::True
)
.await
.unwrap(),
harness_a.chain.head_snapshot().beacon_block_root