Memory usage reduction (#1522)

## Issue Addressed

NA

## Proposed Changes

- Adds a new function to allow getting a state with a bad state root history for attestation verification. This reduces unnecessary tree hashing during attestation processing, which accounted for 23% of memory allocations (by bytes) in a recent `heaptrack` observation.
- Don't clone caches on intermediate epoch-boundary states during block processing.
- Reject blocks that are known to fork choice earlier during gossip processing, instead of waiting until after state has been loaded (this only happens in edge-case).
- Avoid multiple re-allocations by creating a "forced" exact size iterator.

## Additional Info

NA
This commit is contained in:
Paul Hauner
2020-08-17 08:05:13 +00:00
parent 3c689a6837
commit 61d5b592cb
4 changed files with 122 additions and 20 deletions

View File

@@ -7,6 +7,7 @@ use rayon::prelude::*;
use ssz_derive::{Decode, Encode};
use ssz_types::VariableList;
use std::cmp::Ordering;
use std::iter::ExactSizeIterator;
use tree_hash::{mix_in_length, MerkleHasher, TreeHash};
/// The number of fields on a beacon state.
@@ -288,17 +289,17 @@ impl ValidatorsListTreeHashCache {
fn recalculate_tree_hash_root(&mut self, validators: &[Validator]) -> Result<Hash256, Error> {
let mut list_arena = std::mem::take(&mut self.list_arena);
let leaves = self
.values
.leaves(validators)?
.into_iter()
.flatten()
.map(|h| h.to_fixed_bytes())
.collect::<Vec<_>>();
let leaves = self.values.leaves(validators)?;
let num_leaves = leaves.iter().map(|arena| arena.len()).sum();
let leaves_iter = ForcedExactSizeIterator {
iter: leaves.into_iter().flatten().map(|h| h.to_fixed_bytes()),
len: num_leaves,
};
let list_root = self
.list_cache
.recalculate_merkle_root(&mut list_arena, leaves.into_iter())?;
.recalculate_merkle_root(&mut list_arena, leaves_iter)?;
self.list_arena = list_arena;
@@ -306,6 +307,29 @@ impl ValidatorsListTreeHashCache {
}
}
/// Provides a wrapper around some `iter` if the number of items in the iterator is known to the
/// programmer but not the compiler. This allows use of `ExactSizeIterator` in some occasions.
///
/// Care should be taken to ensure `len` is accurate.
struct ForcedExactSizeIterator<I> {
iter: I,
len: usize,
}
impl<V, I: Iterator<Item = V>> Iterator for ForcedExactSizeIterator<I> {
type Item = V;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
impl<V, I: Iterator<Item = V>> ExactSizeIterator for ForcedExactSizeIterator<I> {
fn len(&self) -> usize {
self.len
}
}
/// Provides a cache for each of the `Validator` objects in `state.validators` and computes the
/// roots of these using Rayon parallelization.
#[derive(Debug, PartialEq, Clone, Default, Encode, Decode)]