Tree hash caching and optimisations for Altair (#2459)

## Proposed Changes

Remove the remaining Altair `FIXME`s from consensus land.

1. Implement tree hash caching for the participation lists. This required some light type manipulation, including removing the `TreeHash` bound from `CachedTreeHash` which was purely descriptive.
2. Plumb the proposer index through Altair attestation processing, to avoid calculating it for _every_ attestation (potentially 128ms on large networks). This duplicates some work from #2431, but with the aim of getting it in sooner, particularly for the Altair devnets.
3. Removes two FIXMEs related to `superstruct` and cloning, which are unlikely to be particularly detrimental and will be tracked here instead: https://github.com/sigp/superstruct/issues/5
This commit is contained in:
Michael Sproul
2021-07-23 00:23:53 +00:00
parent 74aa99c409
commit 84e6d71950
11 changed files with 206 additions and 39 deletions

View File

@@ -3,7 +3,9 @@
#![allow(clippy::indexing_slicing)]
use super::Error;
use crate::{BeaconState, EthSpec, Hash256, Slot, Unsigned, Validator};
use crate::{
BeaconState, EthSpec, Hash256, ParticipationFlags, ParticipationList, Slot, Unsigned, Validator,
};
use cached_tree_hash::{int_log, CacheArena, CachedTreeHash, TreeHashCache};
use rayon::prelude::*;
use ssz_derive::{Decode, Encode};
@@ -139,6 +141,9 @@ pub struct BeaconTreeHashCacheInner<T: EthSpec> {
randao_mixes: TreeHashCache,
slashings: TreeHashCache,
eth1_data_votes: Eth1DataVotesTreeHashCache<T>,
// Participation caches
previous_epoch_participation: ParticipationTreeHashCache,
current_epoch_participation: ParticipationTreeHashCache,
}
impl<T: EthSpec> BeaconTreeHashCacheInner<T> {
@@ -163,6 +168,11 @@ impl<T: EthSpec> BeaconTreeHashCacheInner<T> {
let mut slashings_arena = CacheArena::default();
let slashings = state.slashings().new_tree_hash_cache(&mut slashings_arena);
let previous_epoch_participation =
ParticipationTreeHashCache::new(state, BeaconState::previous_epoch_participation);
let current_epoch_participation =
ParticipationTreeHashCache::new(state, BeaconState::current_epoch_participation);
Self {
previous_state: None,
validators,
@@ -176,6 +186,8 @@ impl<T: EthSpec> BeaconTreeHashCacheInner<T> {
randao_mixes,
slashings,
eth1_data_votes: Eth1DataVotesTreeHashCache::new(state),
previous_epoch_participation,
current_epoch_participation,
}
}
@@ -264,31 +276,25 @@ impl<T: EthSpec> BeaconTreeHashCacheInner<T> {
)?;
// Participation
match state {
BeaconState::Base(state) => {
hasher.write(
state
.previous_epoch_attestations
.tree_hash_root()
.as_bytes(),
)?;
hasher.write(state.current_epoch_attestations.tree_hash_root().as_bytes())?;
}
// FIXME(altair): add a cache to accelerate hashing of these fields
BeaconState::Altair(state) => {
hasher.write(
state
.previous_epoch_participation
.tree_hash_root()
.as_bytes(),
)?;
hasher.write(
state
.current_epoch_participation
.tree_hash_root()
.as_bytes(),
)?;
}
if let BeaconState::Base(state) = state {
hasher.write(
state
.previous_epoch_attestations
.tree_hash_root()
.as_bytes(),
)?;
hasher.write(state.current_epoch_attestations.tree_hash_root().as_bytes())?;
} else {
hasher.write(
self.previous_epoch_participation
.recalculate_tree_hash_root(state.previous_epoch_participation()?)?
.as_bytes(),
)?;
hasher.write(
self.current_epoch_participation
.recalculate_tree_hash_root(state.current_epoch_participation()?)?
.as_bytes(),
)?;
}
hasher.write(state.justification_bits().tree_hash_root().as_bytes())?;
@@ -506,6 +512,60 @@ impl ParallelValidatorTreeHash {
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct ParticipationTreeHashCache {
inner: Option<ParticipationTreeHashCacheInner>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct ParticipationTreeHashCacheInner {
arena: CacheArena,
tree_hash_cache: TreeHashCache,
}
impl ParticipationTreeHashCache {
/// Initialize a new cache for the participation list returned by `field` (if any).
fn new<T: EthSpec>(
state: &BeaconState<T>,
field: impl FnOnce(
&BeaconState<T>,
) -> Result<
&VariableList<ParticipationFlags, T::ValidatorRegistryLimit>,
Error,
>,
) -> Self {
let inner = field(state).map(ParticipationTreeHashCacheInner::new).ok();
Self { inner }
}
/// Compute the tree hash root for the given `epoch_participation`.
///
/// This function will initialize the inner cache if necessary (e.g. when crossing the fork).
fn recalculate_tree_hash_root<N: Unsigned>(
&mut self,
epoch_participation: &VariableList<ParticipationFlags, N>,
) -> Result<Hash256, Error> {
let cache = self
.inner
.get_or_insert_with(|| ParticipationTreeHashCacheInner::new(epoch_participation));
ParticipationList::new(epoch_participation)
.recalculate_tree_hash_root(&mut cache.arena, &mut cache.tree_hash_cache)
.map_err(Into::into)
}
}
impl ParticipationTreeHashCacheInner {
fn new<N: Unsigned>(epoch_participation: &VariableList<ParticipationFlags, N>) -> Self {
let mut arena = CacheArena::default();
let tree_hash_cache =
ParticipationList::new(epoch_participation).new_tree_hash_cache(&mut arena);
ParticipationTreeHashCacheInner {
arena,
tree_hash_cache,
}
}
}
#[cfg(feature = "arbitrary-fuzz")]
impl<T: EthSpec> arbitrary::Arbitrary for BeaconTreeHashCache<T> {
fn arbitrary(_u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> {
@@ -516,6 +576,7 @@ impl<T: EthSpec> arbitrary::Arbitrary for BeaconTreeHashCache<T> {
#[cfg(test)]
mod test {
use super::*;
use crate::MainnetEthSpec;
#[test]
fn validator_node_count() {
@@ -524,4 +585,29 @@ mod test {
let _cache = v.new_tree_hash_cache(&mut arena);
assert_eq!(arena.backing_len(), NODES_PER_VALIDATOR);
}
#[test]
fn participation_flags() {
type N = <MainnetEthSpec as EthSpec>::ValidatorRegistryLimit;
let len = 65;
let mut test_flag = ParticipationFlags::default();
test_flag.add_flag(0).unwrap();
let epoch_participation = VariableList::<_, N>::new(vec![test_flag; len]).unwrap();
let mut cache = ParticipationTreeHashCache { inner: None };
let cache_root = cache
.recalculate_tree_hash_root(&epoch_participation)
.unwrap();
let recalc_root = cache
.recalculate_tree_hash_root(&epoch_participation)
.unwrap();
assert_eq!(cache_root, recalc_root, "recalculated root should match");
assert_eq!(
cache_root,
epoch_participation.tree_hash_root(),
"cached root should match uncached"
);
}
}

View File

@@ -62,6 +62,7 @@ pub mod voluntary_exit;
pub mod slot_epoch_macros;
pub mod config_and_preset;
pub mod participation_flags;
pub mod participation_list;
pub mod preset;
pub mod slot_epoch;
pub mod subnet_id;
@@ -113,6 +114,7 @@ pub use crate::graffiti::{Graffiti, GRAFFITI_BYTES_LEN};
pub use crate::historical_batch::HistoricalBatch;
pub use crate::indexed_attestation::IndexedAttestation;
pub use crate::participation_flags::ParticipationFlags;
pub use crate::participation_list::ParticipationList;
pub use crate::pending_attestation::PendingAttestation;
pub use crate::preset::{AltairPreset, BasePreset};
pub use crate::proposer_slashing::ProposerSlashing;

View File

@@ -28,6 +28,10 @@ impl ParticipationFlags {
let mask = 1u8.safe_shl(flag_index as u32)?;
Ok(self.bits & mask == mask)
}
pub fn into_u8(self) -> u8 {
self.bits
}
}
/// Decode implementation that transparently behaves like the inner `u8`.

View File

@@ -0,0 +1,55 @@
#![allow(clippy::integer_arithmetic)]
use crate::{Hash256, ParticipationFlags, Unsigned, VariableList};
use cached_tree_hash::{int_log, CacheArena, CachedTreeHash, Error, TreeHashCache};
use tree_hash::{mix_in_length, BYTES_PER_CHUNK};
/// Wrapper type allowing the implementation of `CachedTreeHash`.
#[derive(Debug)]
pub struct ParticipationList<'a, N: Unsigned> {
pub inner: &'a VariableList<ParticipationFlags, N>,
}
impl<'a, N: Unsigned> ParticipationList<'a, N> {
pub fn new(inner: &'a VariableList<ParticipationFlags, N>) -> Self {
Self { inner }
}
}
impl<'a, N: Unsigned> CachedTreeHash<TreeHashCache> for ParticipationList<'a, N> {
fn new_tree_hash_cache(&self, arena: &mut CacheArena) -> TreeHashCache {
TreeHashCache::new(
arena,
int_log(N::to_usize() / BYTES_PER_CHUNK),
leaf_count(self.inner.len()),
)
}
fn recalculate_tree_hash_root(
&self,
arena: &mut CacheArena,
cache: &mut TreeHashCache,
) -> Result<Hash256, Error> {
Ok(mix_in_length(
&cache.recalculate_merkle_root(arena, leaf_iter(&self.inner))?,
self.inner.len(),
))
}
}
pub fn leaf_count(len: usize) -> usize {
(len + BYTES_PER_CHUNK - 1) / BYTES_PER_CHUNK
}
pub fn leaf_iter(
values: &[ParticipationFlags],
) -> impl Iterator<Item = [u8; BYTES_PER_CHUNK]> + ExactSizeIterator + '_ {
values.chunks(BYTES_PER_CHUNK).map(|xs| {
// Zero-pad chunks on the right.
let mut chunk = [0u8; BYTES_PER_CHUNK];
for (byte, x) in chunk.iter_mut().zip(xs) {
*byte = x.into_u8();
}
chunk
})
}