Tree hash cache arena (#836)

* Start adding interop genesis state to lcli

* Use more efficient method to generate genesis state

* Remove duplicate int_to_bytes32

* Add lcli command to change state genesis time

* Add option to allow VC to start with unsynced BN

* Set VC to do parallel key loading

* Don't default to dummy eth1 backend

* Add endpoint to dump operation pool

* Add metrics for op pool

* Remove state clone for slot notifier

* Add mem size approximation for tree hash cache

* Avoid cloning tree hash when getting head

* Avoid cloning tree hash when getting head

* Add working arena-based cached tree hash

* Add another benchmark

* Add pre-allocation for caches

* Make cache nullable

* Fix bugs in cache tree hash

* Add validator tree hash optimization

* Optimize hash_concat

* Make hash32_concat return fixed-len array

* Fix failing API tests

* Add new beacon state cache struct

* Add validator-specific cache

* Separate list and values arenas

* Add parallel validator registry hashing

* Remove MultiTreeHashCache

* Remove cached tree hash macro

* Fix failing tree hash test

* Address Michael's comments

* Add CachedTreeHash impl for ef tests

* Fix messy merge conflict

* Rename cache struct, add comments

* Rename cache struct, add comments

* Remove unnecessary mutability

* Wrap iter in result

* Tidy cached tree hash

* Address Michael comments

* Address more comments

* Use ring::Context
This commit is contained in:
Paul Hauner
2020-02-07 12:42:49 +11:00
committed by GitHub
parent f267bf2afe
commit c3182e3c1c
20 changed files with 1341 additions and 378 deletions

View File

@@ -1,5 +1,5 @@
use crate::impls::hash256_iter;
use crate::{CachedTreeHash, Error, Hash256, TreeHashCache};
use crate::{CacheArena, CachedTreeHash, Error, Hash256, TreeHashCache};
use eth2_hashing::ZERO_HASHES;
use quickcheck_macros::quickcheck;
use ssz_types::{
@@ -18,46 +18,49 @@ type Vector16u64 = FixedVector<u64, U16>;
#[test]
fn max_leaves() {
let arena = &mut CacheArena::default();
let depth = 4;
let max_len = 2u64.pow(depth as u32);
let mut cache = TreeHashCache::new(depth);
let mut cache = TreeHashCache::new(arena, depth, 2);
assert!(cache
.recalculate_merkle_root(hash256_iter(&int_hashes(0, max_len - 1)))
.recalculate_merkle_root(arena, hash256_iter(&int_hashes(0, max_len - 1)))
.is_ok());
assert!(cache
.recalculate_merkle_root(hash256_iter(&int_hashes(0, max_len)))
.recalculate_merkle_root(arena, hash256_iter(&int_hashes(0, max_len)))
.is_ok());
assert_eq!(
cache.recalculate_merkle_root(hash256_iter(&int_hashes(0, max_len + 1))),
cache.recalculate_merkle_root(arena, hash256_iter(&int_hashes(0, max_len + 1))),
Err(Error::TooManyLeaves)
);
assert_eq!(
cache.recalculate_merkle_root(hash256_iter(&int_hashes(0, max_len * 2))),
cache.recalculate_merkle_root(arena, hash256_iter(&int_hashes(0, max_len * 2))),
Err(Error::TooManyLeaves)
);
}
#[test]
fn cannot_shrink() {
let arena = &mut CacheArena::default();
let init_len = 12;
let list1 = List16::new(int_hashes(0, init_len)).unwrap();
let list2 = List16::new(int_hashes(0, init_len - 1)).unwrap();
let mut cache = List16::new_tree_hash_cache();
assert!(list1.recalculate_tree_hash_root(&mut cache).is_ok());
let mut cache = list1.new_tree_hash_cache(arena);
assert!(list1.recalculate_tree_hash_root(arena, &mut cache).is_ok());
assert_eq!(
list2.recalculate_tree_hash_root(&mut cache),
list2.recalculate_tree_hash_root(arena, &mut cache),
Err(Error::CannotShrink)
);
}
#[test]
fn empty_leaves() {
let arena = &mut CacheArena::default();
let depth = 20;
let mut cache = TreeHashCache::new(depth);
let mut cache = TreeHashCache::new(arena, depth, 0);
assert_eq!(
cache
.recalculate_merkle_root(vec![].into_iter())
.recalculate_merkle_root(arena, vec![].into_iter())
.unwrap()
.as_bytes(),
&ZERO_HASHES[depth][..]
@@ -66,40 +69,43 @@ fn empty_leaves() {
#[test]
fn fixed_vector_hash256() {
let arena = &mut CacheArena::default();
let len = 16;
let vec = Vector16::new(int_hashes(0, len)).unwrap();
let mut cache = Vector16::new_tree_hash_cache();
let mut cache = vec.new_tree_hash_cache(arena);
assert_eq!(
Hash256::from_slice(&vec.tree_hash_root()),
vec.recalculate_tree_hash_root(&mut cache).unwrap()
vec.recalculate_tree_hash_root(arena, &mut cache).unwrap()
);
}
#[test]
fn fixed_vector_u64() {
let arena = &mut CacheArena::default();
let len = 16;
let vec = Vector16u64::new((0..len).collect()).unwrap();
let mut cache = Vector16u64::new_tree_hash_cache();
let mut cache = vec.new_tree_hash_cache(arena);
assert_eq!(
Hash256::from_slice(&vec.tree_hash_root()),
vec.recalculate_tree_hash_root(&mut cache).unwrap()
vec.recalculate_tree_hash_root(arena, &mut cache).unwrap()
);
}
#[test]
fn variable_list_hash256() {
let arena = &mut CacheArena::default();
let len = 13;
let list = List16::new(int_hashes(0, len)).unwrap();
let mut cache = List16::new_tree_hash_cache();
let mut cache = list.new_tree_hash_cache(arena);
assert_eq!(
Hash256::from_slice(&list.tree_hash_root()),
list.recalculate_tree_hash_root(&mut cache).unwrap()
list.recalculate_tree_hash_root(arena, &mut cache).unwrap()
);
}
@@ -119,6 +125,7 @@ fn quickcheck_variable_list_h256_257(leaves_and_skips: Vec<(u64, bool)>) -> bool
}
fn variable_list_h256_test<Len: Unsigned>(leaves_and_skips: Vec<(u64, bool)>) -> bool {
let arena = &mut CacheArena::default();
let leaves: Vec<_> = leaves_and_skips
.iter()
.map(|(l, _)| Hash256::from_low_u64_be(*l))
@@ -126,14 +133,15 @@ fn variable_list_h256_test<Len: Unsigned>(leaves_and_skips: Vec<(u64, bool)>) ->
.collect();
let mut list: VariableList<Hash256, Len>;
let mut cache = VariableList::<Hash256, Len>::new_tree_hash_cache();
let init: VariableList<Hash256, Len> = VariableList::new(vec![]).unwrap();
let mut cache = init.new_tree_hash_cache(arena);
for (end, (_, update_cache)) in leaves_and_skips.into_iter().enumerate() {
list = VariableList::new(leaves[..end].to_vec()).unwrap();
if update_cache
&& list
.recalculate_tree_hash_root(&mut cache)
.recalculate_tree_hash_root(arena, &mut cache)
.unwrap()
.as_bytes()
!= &list.tree_hash_root()[..]