Merge branch 'gloas-containers' into gloas-envelope-processing

This commit is contained in:
Mark Mackey
2025-11-28 14:40:55 -06:00
101 changed files with 3693 additions and 3689 deletions

View File

@@ -1,17 +0,0 @@
[package]
name = "context_deserialize"
version = "0.1.0"
edition = "2021"
[features]
default = ["derive"]
derive = ["dep:context_deserialize_derive"]
milhouse = ["dep:milhouse"]
ssz = ["dep:ssz_types"]
all = ["derive", "milhouse", "ssz"]
[dependencies]
context_deserialize_derive = { version = "0.1.0", path = "../context_deserialize_derive", optional = true }
milhouse = { workspace = true, optional = true }
serde = { workspace = true }
ssz_types = { workspace = true, optional = true }

View File

@@ -1,103 +0,0 @@
use crate::ContextDeserialize;
use serde::de::{Deserialize, DeserializeSeed, Deserializer, SeqAccess, Visitor};
use std::marker::PhantomData;
use std::sync::Arc;
impl<'de, C, T> ContextDeserialize<'de, T> for Arc<C>
where
C: ContextDeserialize<'de, T>,
{
fn context_deserialize<D>(deserializer: D, context: T) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Arc::new(C::context_deserialize(deserializer, context)?))
}
}
impl<'de, T, C> ContextDeserialize<'de, C> for Vec<T>
where
T: ContextDeserialize<'de, C>,
C: Clone,
{
fn context_deserialize<D>(deserializer: D, context: C) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
// Our Visitor, which owns one copy of the context T
struct ContextVisitor<C, T> {
context: T,
_marker: PhantomData<C>,
}
impl<'de, C, T> Visitor<'de> for ContextVisitor<C, T>
where
C: ContextDeserialize<'de, T>,
T: Clone,
{
type Value = Vec<C>;
fn expecting(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str("a sequence of contextdeserialized elements")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Vec<C>, A::Error>
where
A: SeqAccess<'de>,
{
let mut out = Vec::with_capacity(seq.size_hint().unwrap_or(0));
// for each element, we clone the context and hand it to the seed
while let Some(elem) = seq.next_element_seed(ContextSeed {
context: self.context.clone(),
_marker: PhantomData,
})? {
out.push(elem);
}
Ok(out)
}
}
// A little seed that hands the deserializer + context into C::context_deserialize
struct ContextSeed<T, C> {
context: C,
_marker: PhantomData<T>,
}
impl<'de, T, C> DeserializeSeed<'de> for ContextSeed<T, C>
where
T: ContextDeserialize<'de, C>,
C: Clone,
{
type Value = T;
fn deserialize<D>(self, deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
{
T::context_deserialize(deserializer, self.context)
}
}
deserializer.deserialize_seq(ContextVisitor {
context,
_marker: PhantomData,
})
}
}
macro_rules! trivial_deserialize {
($($t:ty),* $(,)?) => {
$(
impl<'de, T> ContextDeserialize<'de, T> for $t {
fn context_deserialize<D>(deserializer: D, _context: T) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
<$t>::deserialize(deserializer)
}
}
)*
};
}
trivial_deserialize!(bool, u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, f32, f64);

View File

@@ -1,45 +0,0 @@
use crate::ContextDeserialize;
use milhouse::{List, Value, Vector};
use serde::de::Deserializer;
use ssz_types::typenum::Unsigned;
impl<'de, C, T, N> ContextDeserialize<'de, C> for List<T, N>
where
T: ContextDeserialize<'de, C> + Value,
N: Unsigned,
C: Clone,
{
fn context_deserialize<D>(deserializer: D, context: C) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
// First deserialize as a Vec.
// This is not the most efficient implementation as it allocates a temporary Vec. In future
// we could write a more performant implementation using `List::builder()`.
let vec = Vec::<T>::context_deserialize(deserializer, context)?;
// Then convert to List, which will check the length.
List::new(vec)
.map_err(|e| serde::de::Error::custom(format!("Failed to create List: {:?}", e)))
}
}
impl<'de, C, T, N> ContextDeserialize<'de, C> for Vector<T, N>
where
T: ContextDeserialize<'de, C> + Value,
N: Unsigned,
C: Clone,
{
fn context_deserialize<D>(deserializer: D, context: C) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
// First deserialize as a List
let list = List::<T, N>::context_deserialize(deserializer, context)?;
// Then convert to Vector, which will check the length
Vector::try_from(list).map_err(|e| {
serde::de::Error::custom(format!("Failed to convert List to Vector: {:?}", e))
})
}
}

View File

@@ -1,7 +0,0 @@
mod core;
#[cfg(feature = "milhouse")]
mod milhouse;
#[cfg(feature = "ssz")]
mod ssz;

View File

@@ -1,51 +0,0 @@
use crate::ContextDeserialize;
use serde::{
de::{Deserializer, Error},
Deserialize,
};
use ssz_types::{
length::{Fixed, Variable},
typenum::Unsigned,
Bitfield, FixedVector,
};
impl<'de, C, T, N> ContextDeserialize<'de, C> for FixedVector<T, N>
where
T: ContextDeserialize<'de, C>,
N: Unsigned,
C: Clone,
{
fn context_deserialize<D>(deserializer: D, context: C) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let vec = Vec::<T>::context_deserialize(deserializer, context)?;
FixedVector::new(vec).map_err(|e| D::Error::custom(format!("{:?}", e)))
}
}
impl<'de, C, N> ContextDeserialize<'de, C> for Bitfield<Variable<N>>
where
N: Unsigned + Clone,
{
fn context_deserialize<D>(deserializer: D, _context: C) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Bitfield::<Variable<N>>::deserialize(deserializer)
.map_err(|e| D::Error::custom(format!("{:?}", e)))
}
}
impl<'de, C, N> ContextDeserialize<'de, C> for Bitfield<Fixed<N>>
where
N: Unsigned + Clone,
{
fn context_deserialize<D>(deserializer: D, _context: C) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Bitfield::<Fixed<N>>::deserialize(deserializer)
.map_err(|e| D::Error::custom(format!("{:?}", e)))
}
}

View File

@@ -1,13 +0,0 @@
mod impls;
#[cfg(feature = "derive")]
pub use context_deserialize_derive::context_deserialize;
use serde::de::Deserializer;
/// General-purpose deserialization trait that accepts extra context `C`.
pub trait ContextDeserialize<'de, C>: Sized {
fn context_deserialize<D>(deserializer: D, context: C) -> Result<Self, D::Error>
where
D: Deserializer<'de>;
}

View File

@@ -1,16 +0,0 @@
[package]
name = "context_deserialize_derive"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
quote = { workspace = true }
syn = { workspace = true }
[dev-dependencies]
context_deserialize = { path = "../context_deserialize" }
serde = { workspace = true }
serde_json = "1.0"

View File

@@ -1,118 +0,0 @@
extern crate proc_macro;
extern crate quote;
extern crate syn;
use proc_macro::TokenStream;
use quote::quote;
use syn::{
parse_macro_input, AttributeArgs, DeriveInput, GenericParam, LifetimeDef, Meta, NestedMeta,
WhereClause,
};
#[proc_macro_attribute]
pub fn context_deserialize(attr: TokenStream, item: TokenStream) -> TokenStream {
let args = parse_macro_input!(attr as AttributeArgs);
let input = parse_macro_input!(item as DeriveInput);
let ident = &input.ident;
let mut ctx_types = Vec::new();
let mut explicit_where: Option<WhereClause> = None;
for meta in args {
match meta {
NestedMeta::Meta(Meta::Path(p)) => {
ctx_types.push(p);
}
NestedMeta::Meta(Meta::NameValue(nv)) if nv.path.is_ident("bound") => {
if let syn::Lit::Str(lit_str) = &nv.lit {
let where_string = format!("where {}", lit_str.value());
match syn::parse_str::<WhereClause>(&where_string) {
Ok(where_clause) => {
explicit_where = Some(where_clause);
}
Err(err) => {
return syn::Error::new_spanned(
lit_str,
format!("Invalid where clause '{}': {}", lit_str.value(), err),
)
.to_compile_error()
.into();
}
}
} else {
return syn::Error::new_spanned(
&nv,
"Expected a string literal for `bound` value",
)
.to_compile_error()
.into();
}
}
_ => {
return syn::Error::new_spanned(
&meta,
"Expected paths or `bound = \"...\"` in #[context_deserialize(...)]",
)
.to_compile_error()
.into();
}
}
}
if ctx_types.is_empty() {
return quote! {
compile_error!("Usage: #[context_deserialize(Type1, Type2, ..., bound = \"...\")]");
}
.into();
}
let original_generics = input.generics.clone();
// Clone and clean generics for impl use (remove default params)
let mut impl_generics = input.generics.clone();
for param in impl_generics.params.iter_mut() {
if let GenericParam::Type(ty) = param {
ty.eq_token = None;
ty.default = None;
}
}
// Ensure 'de lifetime exists in impl generics
let has_de = impl_generics
.lifetimes()
.any(|LifetimeDef { lifetime, .. }| lifetime.ident == "de");
if !has_de {
impl_generics.params.insert(0, syn::parse_quote! { 'de });
}
let (_, ty_generics, _) = original_generics.split_for_impl();
let (impl_gens, _, _) = impl_generics.split_for_impl();
// Generate: no `'de` applied to the type name
let mut impls = quote! {};
for ctx in ctx_types {
impls.extend(quote! {
impl #impl_gens context_deserialize::ContextDeserialize<'de, #ctx>
for #ident #ty_generics
#explicit_where
{
fn context_deserialize<D>(
deserializer: D,
_context: #ctx,
) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
<Self as serde::Deserialize>::deserialize(deserializer)
}
}
});
}
quote! {
#input
#impls
}
.into()
}

View File

@@ -1,93 +0,0 @@
use context_deserialize::{context_deserialize, ContextDeserialize};
use serde::{Deserialize, Serialize};
#[test]
fn test_context_deserialize_derive() {
type TestContext = ();
#[context_deserialize(TestContext)]
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Test {
field: String,
}
let test = Test {
field: "test".to_string(),
};
let serialized = serde_json::to_string(&test).unwrap();
let deserialized =
Test::context_deserialize(&mut serde_json::Deserializer::from_str(&serialized), ())
.unwrap();
assert_eq!(test, deserialized);
}
#[test]
fn test_context_deserialize_derive_multiple_types() {
#[allow(dead_code)]
struct TestContext1(u64);
#[allow(dead_code)]
struct TestContext2(String);
// This will derive:
// - ContextDeserialize<TestContext1> for Test
// - ContextDeserialize<TestContext2> for Test
// by just leveraging the Deserialize impl
#[context_deserialize(TestContext1, TestContext2)]
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Test {
field: String,
}
let test = Test {
field: "test".to_string(),
};
let serialized = serde_json::to_string(&test).unwrap();
let deserialized = Test::context_deserialize(
&mut serde_json::Deserializer::from_str(&serialized),
TestContext1(1),
)
.unwrap();
assert_eq!(test, deserialized);
let deserialized = Test::context_deserialize(
&mut serde_json::Deserializer::from_str(&serialized),
TestContext2("2".to_string()),
)
.unwrap();
assert_eq!(test, deserialized);
}
#[test]
fn test_context_deserialize_derive_bound() {
use std::fmt::Debug;
struct TestContext;
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Inner {
value: u64,
}
#[context_deserialize(
TestContext,
bound = "T: Serialize + for<'a> Deserialize<'a> + Debug + PartialEq"
)]
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Wrapper<T> {
inner: T,
}
let val = Wrapper {
inner: Inner { value: 42 },
};
let serialized = serde_json::to_string(&val).unwrap();
let deserialized = Wrapper::<Inner>::context_deserialize(
&mut serde_json::Deserializer::from_str(&serialized),
TestContext,
)
.unwrap();
assert_eq!(val, deserialized);
}

View File

@@ -627,7 +627,7 @@ where
op: &InvalidationOperation,
) -> Result<(), Error<T::Error>> {
self.proto_array
.process_execution_payload_invalidation::<E>(op)
.process_execution_payload_invalidation::<E>(op, self.finalized_checkpoint())
.map_err(Error::FailedToProcessInvalidExecutionPayload)
}
@@ -908,6 +908,8 @@ where
unrealized_finalized_checkpoint: Some(unrealized_finalized_checkpoint),
},
current_slot,
self.justified_checkpoint(),
self.finalized_checkpoint(),
)?;
Ok(())
@@ -1288,7 +1290,7 @@ where
/// Return `true` if `block_root` is equal to the finalized checkpoint, or a known descendant of it.
pub fn is_finalized_checkpoint_or_descendant(&self, block_root: Hash256) -> bool {
self.proto_array
.is_finalized_checkpoint_or_descendant::<E>(block_root)
.is_finalized_checkpoint_or_descendant::<E>(block_root, self.finalized_checkpoint())
}
pub fn is_descendant(&self, ancestor_root: Hash256, descendant_root: Hash256) -> bool {
@@ -1508,7 +1510,9 @@ where
/// be instantiated again later.
pub fn to_persisted(&self) -> PersistedForkChoice {
PersistedForkChoice {
proto_array: self.proto_array().as_ssz_container(),
proto_array: self
.proto_array()
.as_ssz_container(self.justified_checkpoint(), self.finalized_checkpoint()),
queued_attestations: self.queued_attestations().to_vec(),
}
}

View File

@@ -14,5 +14,4 @@ fixed_bytes = { workspace = true }
safe_arith = { workspace = true }
[dev-dependencies]
quickcheck = { workspace = true }
quickcheck_macros = { workspace = true }
proptest = { workspace = true }

View File

@@ -413,50 +413,70 @@ impl From<InvalidSnapshot> for MerkleTreeError {
#[cfg(test)]
mod tests {
use super::*;
use quickcheck::TestResult;
use quickcheck_macros::quickcheck;
/// Check that we can:
/// 1. Build a MerkleTree from arbitrary leaves and an arbitrary depth.
/// 2. Generate valid proofs for all of the leaves of this MerkleTree.
#[quickcheck]
fn quickcheck_create_and_verify(int_leaves: Vec<u64>, depth: usize) -> TestResult {
if depth > MAX_TREE_DEPTH || int_leaves.len() > 2usize.pow(depth as u32) {
return TestResult::discard();
}
use proptest::prelude::*;
let leaves: Vec<_> = int_leaves.into_iter().map(H256::from_low_u64_be).collect();
let merkle_tree = MerkleTree::create(&leaves, depth);
let merkle_root = merkle_tree.hash();
// Limit test depth to avoid generating huge trees. Depth 10 = 1024 max leaves.
const TEST_MAX_DEPTH: usize = 10;
let proofs_ok = (0..leaves.len()).all(|i| {
let (leaf, branch) = merkle_tree
.generate_proof(i, depth)
.expect("should generate proof");
leaf == leaves[i] && verify_merkle_proof(leaf, &branch, depth, i, merkle_root)
});
TestResult::from_bool(proofs_ok)
fn merkle_leaves_strategy(max_depth: usize) -> impl Strategy<Value = (Vec<u64>, usize)> {
(0..=max_depth).prop_flat_map(|depth| {
let max_leaves = 2usize.pow(depth as u32);
(
proptest::collection::vec(any::<u64>(), 0..=max_leaves),
Just(depth),
)
})
}
#[quickcheck]
fn quickcheck_push_leaf_and_verify(int_leaves: Vec<u64>, depth: usize) -> TestResult {
if depth == 0 || depth > MAX_TREE_DEPTH || int_leaves.len() > 2usize.pow(depth as u32) {
return TestResult::discard();
fn merkle_leaves_strategy_min_depth(
max_depth: usize,
min_depth: usize,
) -> impl Strategy<Value = (Vec<u64>, usize)> {
(min_depth..=max_depth).prop_flat_map(|depth| {
let max_leaves = 2usize.pow(depth as u32);
(
proptest::collection::vec(any::<u64>(), 0..=max_leaves),
Just(depth),
)
})
}
proptest::proptest! {
/// Check that we can:
/// 1. Build a MerkleTree from arbitrary leaves and an arbitrary depth.
/// 2. Generate valid proofs for all of the leaves of this MerkleTree.
#[test]
fn proptest_create_and_verify((int_leaves, depth) in merkle_leaves_strategy(TEST_MAX_DEPTH)) {
let leaves: Vec<_> = int_leaves.into_iter().map(H256::from_low_u64_be).collect();
let merkle_tree = MerkleTree::create(&leaves, depth);
let merkle_root = merkle_tree.hash();
let proofs_ok = (0..leaves.len()).all(|i| {
let (leaf, branch) = merkle_tree
.generate_proof(i, depth)
.expect("should generate proof");
leaf == leaves[i] && verify_merkle_proof(leaf, &branch, depth, i, merkle_root)
});
proptest::prop_assert!(proofs_ok);
}
let leaves_iter = int_leaves.into_iter().map(H256::from_low_u64_be);
let mut merkle_tree = MerkleTree::create(&[], depth);
#[test]
fn proptest_push_leaf_and_verify((int_leaves, depth) in merkle_leaves_strategy_min_depth(TEST_MAX_DEPTH, 1)) {
let leaves_iter = int_leaves.into_iter().map(H256::from_low_u64_be);
let mut merkle_tree = MerkleTree::create(&[], depth);
let proofs_ok = leaves_iter.enumerate().all(|(i, leaf)| {
assert_eq!(merkle_tree.push_leaf(leaf, depth), Ok(()));
let (stored_leaf, branch) = merkle_tree
.generate_proof(i, depth)
.expect("should generate proof");
stored_leaf == leaf && verify_merkle_proof(leaf, &branch, depth, i, merkle_tree.hash())
});
let proofs_ok = leaves_iter.enumerate().all(|(i, leaf)| {
assert_eq!(merkle_tree.push_leaf(leaf, depth), Ok(()));
let (stored_leaf, branch) = merkle_tree
.generate_proof(i, depth)
.expect("should generate proof");
stored_leaf == leaf && verify_merkle_proof(leaf, &branch, depth, i, merkle_tree.hash())
});
TestResult::from_bool(proofs_ok)
proptest::prop_assert!(proofs_ok);
}
}
#[test]

View File

@@ -212,7 +212,12 @@ impl ForkChoiceTestDefinition {
unrealized_finalized_checkpoint: None,
};
fork_choice
.process_block::<MainnetEthSpec>(block, slot)
.process_block::<MainnetEthSpec>(
block,
slot,
self.justified_checkpoint,
self.finalized_checkpoint,
)
.unwrap_or_else(|e| {
panic!(
"process_block op at index {} returned error: {:?}",
@@ -272,7 +277,10 @@ impl ForkChoiceTestDefinition {
}
};
fork_choice
.process_execution_payload_invalidation::<MainnetEthSpec>(&op)
.process_execution_payload_invalidation::<MainnetEthSpec>(
&op,
self.finalized_checkpoint,
)
.unwrap()
}
Operation::AssertWeight { block_root, weight } => assert_eq!(
@@ -305,7 +313,8 @@ fn get_checkpoint(i: u64) -> Checkpoint {
}
fn check_bytes_round_trip(original: &ProtoArrayForkChoice) {
let bytes = original.as_bytes();
// The checkpoint are ignored `ProtoArrayForkChoice::from_bytes` so any value is ok
let bytes = original.as_bytes(Checkpoint::default(), Checkpoint::default());
let decoded = ProtoArrayForkChoice::from_bytes(&bytes, original.balances.clone())
.expect("fork choice should decode from bytes");
assert!(

View File

@@ -130,8 +130,6 @@ pub struct ProtoArray {
/// Do not attempt to prune the tree unless it has at least this many nodes. Small prunes
/// simply waste time.
pub prune_threshold: usize,
pub justified_checkpoint: Checkpoint,
pub finalized_checkpoint: Checkpoint,
pub nodes: Vec<ProtoNode>,
pub indices: HashMap<Hash256, usize>,
pub previous_proposer_boost: ProposerBoost,
@@ -155,8 +153,8 @@ impl ProtoArray {
pub fn apply_score_changes<E: EthSpec>(
&mut self,
mut deltas: Vec<i64>,
justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
best_justified_checkpoint: Checkpoint,
best_finalized_checkpoint: Checkpoint,
new_justified_balances: &JustifiedBalances,
proposer_boost_root: Hash256,
current_slot: Slot,
@@ -169,13 +167,6 @@ impl ProtoArray {
});
}
if justified_checkpoint != self.justified_checkpoint
|| finalized_checkpoint != self.finalized_checkpoint
{
self.justified_checkpoint = justified_checkpoint;
self.finalized_checkpoint = finalized_checkpoint;
}
// Default the proposer boost score to zero.
let mut proposer_score = 0;
@@ -296,6 +287,8 @@ impl ProtoArray {
parent_index,
node_index,
current_slot,
best_justified_checkpoint,
best_finalized_checkpoint,
)?;
}
}
@@ -306,7 +299,13 @@ impl ProtoArray {
/// Register a block with the fork choice.
///
/// It is only sane to supply a `None` parent for the genesis block.
pub fn on_block<E: EthSpec>(&mut self, block: Block, current_slot: Slot) -> Result<(), Error> {
pub fn on_block<E: EthSpec>(
&mut self,
block: Block,
current_slot: Slot,
best_justified_checkpoint: Checkpoint,
best_finalized_checkpoint: Checkpoint,
) -> Result<(), Error> {
// If the block is already known, simply ignore it.
if self.indices.contains_key(&block.root) {
return Ok(());
@@ -357,6 +356,8 @@ impl ProtoArray {
parent_index,
node_index,
current_slot,
best_justified_checkpoint,
best_finalized_checkpoint,
)?;
if matches!(block.execution_status, ExecutionStatus::Valid(_)) {
@@ -439,6 +440,7 @@ impl ProtoArray {
pub fn propagate_execution_payload_invalidation<E: EthSpec>(
&mut self,
op: &InvalidationOperation,
best_finalized_checkpoint: Checkpoint,
) -> Result<(), Error> {
let mut invalidated_indices: HashSet<usize> = <_>::default();
let head_block_root = op.block_root();
@@ -467,7 +469,10 @@ impl ProtoArray {
let latest_valid_ancestor_is_descendant =
latest_valid_ancestor_root.is_some_and(|ancestor_root| {
self.is_descendant(ancestor_root, head_block_root)
&& self.is_finalized_checkpoint_or_descendant::<E>(ancestor_root)
&& self.is_finalized_checkpoint_or_descendant::<E>(
ancestor_root,
best_finalized_checkpoint,
)
});
// Collect all *ancestors* which were declared invalid since they reside between the
@@ -630,6 +635,8 @@ impl ProtoArray {
&self,
justified_root: &Hash256,
current_slot: Slot,
best_justified_checkpoint: Checkpoint,
best_finalized_checkpoint: Checkpoint,
) -> Result<Hash256, Error> {
let justified_index = self
.indices
@@ -663,12 +670,17 @@ impl ProtoArray {
.ok_or(Error::InvalidBestDescendant(best_descendant_index))?;
// Perform a sanity check that the node is indeed valid to be the head.
if !self.node_is_viable_for_head::<E>(best_node, current_slot) {
if !self.node_is_viable_for_head::<E>(
best_node,
current_slot,
best_justified_checkpoint,
best_finalized_checkpoint,
) {
return Err(Error::InvalidBestNode(Box::new(InvalidBestNodeInfo {
current_slot,
start_root: *justified_root,
justified_checkpoint: self.justified_checkpoint,
finalized_checkpoint: self.finalized_checkpoint,
justified_checkpoint: best_justified_checkpoint,
finalized_checkpoint: best_finalized_checkpoint,
head_root: best_node.root,
head_justified_checkpoint: best_node.justified_checkpoint,
head_finalized_checkpoint: best_node.finalized_checkpoint,
@@ -765,6 +777,8 @@ impl ProtoArray {
parent_index: usize,
child_index: usize,
current_slot: Slot,
best_justified_checkpoint: Checkpoint,
best_finalized_checkpoint: Checkpoint,
) -> Result<(), Error> {
let child = self
.nodes
@@ -776,8 +790,12 @@ impl ProtoArray {
.get(parent_index)
.ok_or(Error::InvalidNodeIndex(parent_index))?;
let child_leads_to_viable_head =
self.node_leads_to_viable_head::<E>(child, current_slot)?;
let child_leads_to_viable_head = self.node_leads_to_viable_head::<E>(
child,
current_slot,
best_justified_checkpoint,
best_finalized_checkpoint,
)?;
// These three variables are aliases to the three options that we may set the
// `parent.best_child` and `parent.best_descendant` to.
@@ -806,8 +824,12 @@ impl ProtoArray {
.get(best_child_index)
.ok_or(Error::InvalidBestDescendant(best_child_index))?;
let best_child_leads_to_viable_head =
self.node_leads_to_viable_head::<E>(best_child, current_slot)?;
let best_child_leads_to_viable_head = self.node_leads_to_viable_head::<E>(
best_child,
current_slot,
best_justified_checkpoint,
best_finalized_checkpoint,
)?;
if child_leads_to_viable_head && !best_child_leads_to_viable_head {
// The child leads to a viable head, but the current best-child doesn't.
@@ -856,6 +878,8 @@ impl ProtoArray {
&self,
node: &ProtoNode,
current_slot: Slot,
best_justified_checkpoint: Checkpoint,
best_finalized_checkpoint: Checkpoint,
) -> Result<bool, Error> {
let best_descendant_is_viable_for_head =
if let Some(best_descendant_index) = node.best_descendant {
@@ -864,13 +888,23 @@ impl ProtoArray {
.get(best_descendant_index)
.ok_or(Error::InvalidBestDescendant(best_descendant_index))?;
self.node_is_viable_for_head::<E>(best_descendant, current_slot)
self.node_is_viable_for_head::<E>(
best_descendant,
current_slot,
best_justified_checkpoint,
best_finalized_checkpoint,
)
} else {
false
};
Ok(best_descendant_is_viable_for_head
|| self.node_is_viable_for_head::<E>(node, current_slot))
|| self.node_is_viable_for_head::<E>(
node,
current_slot,
best_justified_checkpoint,
best_finalized_checkpoint,
))
}
/// This is the equivalent to the `filter_block_tree` function in the eth2 spec:
@@ -879,7 +913,13 @@ impl ProtoArray {
///
/// Any node that has a different finalized or justified epoch should not be viable for the
/// head.
fn node_is_viable_for_head<E: EthSpec>(&self, node: &ProtoNode, current_slot: Slot) -> bool {
fn node_is_viable_for_head<E: EthSpec>(
&self,
node: &ProtoNode,
current_slot: Slot,
best_justified_checkpoint: Checkpoint,
best_finalized_checkpoint: Checkpoint,
) -> bool {
if node.execution_status.is_invalid() {
return false;
}
@@ -901,12 +941,13 @@ impl ProtoArray {
node_justified_checkpoint
};
let correct_justified = self.justified_checkpoint.epoch == genesis_epoch
|| voting_source.epoch == self.justified_checkpoint.epoch
let correct_justified = best_justified_checkpoint.epoch == genesis_epoch
|| voting_source.epoch == best_justified_checkpoint.epoch
|| voting_source.epoch + 2 >= current_epoch;
let correct_finalized = self.finalized_checkpoint.epoch == genesis_epoch
|| self.is_finalized_checkpoint_or_descendant::<E>(node.root);
let correct_finalized = best_finalized_checkpoint.epoch == genesis_epoch
|| self
.is_finalized_checkpoint_or_descendant::<E>(node.root, best_finalized_checkpoint);
correct_justified && correct_finalized
}
@@ -961,10 +1002,13 @@ impl ProtoArray {
///
/// Notably, this function is checking ancestory of the finalized
/// *checkpoint* not the finalized *block*.
pub fn is_finalized_checkpoint_or_descendant<E: EthSpec>(&self, root: Hash256) -> bool {
let finalized_root = self.finalized_checkpoint.root;
let finalized_slot = self
.finalized_checkpoint
pub fn is_finalized_checkpoint_or_descendant<E: EthSpec>(
&self,
root: Hash256,
best_finalized_checkpoint: Checkpoint,
) -> bool {
let finalized_root = best_finalized_checkpoint.root;
let finalized_slot = best_finalized_checkpoint
.epoch
.start_slot(E::slots_per_epoch());
@@ -987,7 +1031,7 @@ impl ProtoArray {
// If the conditions don't match for this node then they're unlikely to
// start matching for its ancestors.
for checkpoint in &[node.finalized_checkpoint, node.justified_checkpoint] {
if checkpoint == &self.finalized_checkpoint {
if checkpoint == &best_finalized_checkpoint {
return true;
}
}
@@ -996,7 +1040,7 @@ impl ProtoArray {
node.unrealized_finalized_checkpoint,
node.unrealized_justified_checkpoint,
] {
if checkpoint.is_some_and(|cp| cp == self.finalized_checkpoint) {
if checkpoint.is_some_and(|cp| cp == best_finalized_checkpoint) {
return true;
}
}
@@ -1044,12 +1088,18 @@ impl ProtoArray {
/// For informational purposes like the beacon HTTP API, we use this as the list of known heads,
/// even though some of them might not be viable. We do this to maintain consistency between the
/// definition of "head" used by pruning (which does not consider viability) and fork choice.
pub fn heads_descended_from_finalization<E: EthSpec>(&self) -> Vec<&ProtoNode> {
pub fn heads_descended_from_finalization<E: EthSpec>(
&self,
best_finalized_checkpoint: Checkpoint,
) -> Vec<&ProtoNode> {
self.nodes
.iter()
.filter(|node| {
node.best_child.is_none()
&& self.is_finalized_checkpoint_or_descendant::<E>(node.root)
&& self.is_finalized_checkpoint_or_descendant::<E>(
node.root,
best_finalized_checkpoint,
)
})
.collect()
}

View File

@@ -424,8 +424,6 @@ impl ProtoArrayForkChoice {
) -> Result<Self, String> {
let mut proto_array = ProtoArray {
prune_threshold: DEFAULT_PRUNE_THRESHOLD,
justified_checkpoint,
finalized_checkpoint,
nodes: Vec::with_capacity(1),
indices: HashMap::with_capacity(1),
previous_proposer_boost: ProposerBoost::default(),
@@ -449,7 +447,12 @@ impl ProtoArrayForkChoice {
};
proto_array
.on_block::<E>(block, current_slot)
.on_block::<E>(
block,
current_slot,
justified_checkpoint,
finalized_checkpoint,
)
.map_err(|e| format!("Failed to add finalized block to proto_array: {:?}", e))?;
Ok(Self {
@@ -473,9 +476,10 @@ impl ProtoArrayForkChoice {
pub fn process_execution_payload_invalidation<E: EthSpec>(
&mut self,
op: &InvalidationOperation,
finalized_checkpoint: Checkpoint,
) -> Result<(), String> {
self.proto_array
.propagate_execution_payload_invalidation::<E>(op)
.propagate_execution_payload_invalidation::<E>(op, finalized_checkpoint)
.map_err(|e| format!("Failed to process invalid payload: {:?}", e))
}
@@ -499,13 +503,20 @@ impl ProtoArrayForkChoice {
&mut self,
block: Block,
current_slot: Slot,
justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
) -> Result<(), String> {
if block.parent_root.is_none() {
return Err("Missing parent root".to_string());
}
self.proto_array
.on_block::<E>(block, current_slot)
.on_block::<E>(
block,
current_slot,
justified_checkpoint,
finalized_checkpoint,
)
.map_err(|e| format!("process_block_error: {:?}", e))
}
@@ -547,7 +558,12 @@ impl ProtoArrayForkChoice {
*old_balances = new_balances.clone();
self.proto_array
.find_head::<E>(&justified_checkpoint.root, current_slot)
.find_head::<E>(
&justified_checkpoint.root,
current_slot,
justified_checkpoint,
finalized_checkpoint,
)
.map_err(|e| format!("find_head failed: {:?}", e))
}
@@ -884,9 +900,10 @@ impl ProtoArrayForkChoice {
pub fn is_finalized_checkpoint_or_descendant<E: EthSpec>(
&self,
descendant_root: Hash256,
best_finalized_checkpoint: Checkpoint,
) -> bool {
self.proto_array
.is_finalized_checkpoint_or_descendant::<E>(descendant_root)
.is_finalized_checkpoint_or_descendant::<E>(descendant_root, best_finalized_checkpoint)
}
pub fn latest_message(&self, validator_index: usize) -> Option<(Hash256, Epoch)> {
@@ -916,12 +933,21 @@ impl ProtoArrayForkChoice {
self.proto_array.iter_block_roots(block_root)
}
pub fn as_ssz_container(&self) -> SszContainer {
SszContainer::from(self)
pub fn as_ssz_container(
&self,
justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
) -> SszContainer {
SszContainer::from_proto_array(self, justified_checkpoint, finalized_checkpoint)
}
pub fn as_bytes(&self) -> Vec<u8> {
SszContainer::from(self).as_ssz_bytes()
pub fn as_bytes(
&self,
justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
) -> Vec<u8> {
self.as_ssz_container(justified_checkpoint, finalized_checkpoint)
.as_ssz_bytes()
}
pub fn from_bytes(bytes: &[u8], balances: JustifiedBalances) -> Result<Self, String> {
@@ -954,8 +980,12 @@ impl ProtoArrayForkChoice {
}
/// Returns all nodes that have zero children and are descended from the finalized checkpoint.
pub fn heads_descended_from_finalization<E: EthSpec>(&self) -> Vec<&ProtoNode> {
self.proto_array.heads_descended_from_finalization::<E>()
pub fn heads_descended_from_finalization<E: EthSpec>(
&self,
best_finalized_checkpoint: Checkpoint,
) -> Vec<&ProtoNode> {
self.proto_array
.heads_descended_from_finalization::<E>(best_finalized_checkpoint)
}
}
@@ -1125,6 +1155,8 @@ mod test_compute_deltas {
unrealized_finalized_checkpoint: Some(genesis_checkpoint),
},
genesis_slot + 1,
genesis_checkpoint,
genesis_checkpoint,
)
.unwrap();
@@ -1148,6 +1180,8 @@ mod test_compute_deltas {
unrealized_finalized_checkpoint: None,
},
genesis_slot + 1,
genesis_checkpoint,
genesis_checkpoint,
)
.unwrap();
@@ -1161,10 +1195,24 @@ mod test_compute_deltas {
assert!(!fc.is_descendant(finalized_root, not_finalized_desc));
assert!(!fc.is_descendant(finalized_root, unknown));
assert!(fc.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(finalized_root));
assert!(fc.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(finalized_desc));
assert!(!fc.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(not_finalized_desc));
assert!(!fc.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(unknown));
assert!(fc.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(
finalized_root,
genesis_checkpoint
));
assert!(fc.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(
finalized_desc,
genesis_checkpoint
));
assert!(!fc.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(
not_finalized_desc,
genesis_checkpoint
));
assert!(
!fc.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(
unknown,
genesis_checkpoint
)
);
assert!(!fc.is_descendant(finalized_desc, not_finalized_desc));
assert!(fc.is_descendant(finalized_desc, finalized_desc));
@@ -1260,6 +1308,8 @@ mod test_compute_deltas {
unrealized_finalized_checkpoint: Some(genesis_checkpoint),
},
Slot::from(block.slot),
genesis_checkpoint,
genesis_checkpoint,
)
.unwrap();
};
@@ -1314,29 +1364,34 @@ mod test_compute_deltas {
// Set the finalized checkpoint to finalize the first slot of epoch 1 on
// the canonical chain.
fc.proto_array.finalized_checkpoint = Checkpoint {
let finalized_checkpoint = Checkpoint {
root: finalized_root,
epoch: Epoch::new(1),
};
assert!(
fc.proto_array
.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(finalized_root),
.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(
finalized_root,
finalized_checkpoint
),
"the finalized checkpoint is the finalized checkpoint"
);
assert!(
fc.proto_array
.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(get_block_root(
canonical_slot
)),
.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(
get_block_root(canonical_slot),
finalized_checkpoint
),
"the canonical block is a descendant of the finalized checkpoint"
);
assert!(
!fc.proto_array
.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(get_block_root(
non_canonical_slot
)),
.is_finalized_checkpoint_or_descendant::<MainnetEthSpec>(
get_block_root(non_canonical_slot),
finalized_checkpoint
),
"although the non-canonical block is a descendant of the finalized block, \
it's not a descendant of the finalized checkpoint"
);

View File

@@ -26,22 +26,28 @@ pub struct SszContainer {
#[superstruct(only(V17))]
pub balances: Vec<u64>,
pub prune_threshold: usize,
pub justified_checkpoint: Checkpoint,
pub finalized_checkpoint: Checkpoint,
// Deprecated, remove in a future schema migration
justified_checkpoint: Checkpoint,
// Deprecated, remove in a future schema migration
finalized_checkpoint: Checkpoint,
pub nodes: Vec<ProtoNodeV17>,
pub indices: Vec<(Hash256, usize)>,
pub previous_proposer_boost: ProposerBoost,
}
impl From<&ProtoArrayForkChoice> for SszContainer {
fn from(from: &ProtoArrayForkChoice) -> Self {
impl SszContainer {
pub fn from_proto_array(
from: &ProtoArrayForkChoice,
justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
) -> Self {
let proto_array = &from.proto_array;
Self {
votes: from.votes.0.clone(),
prune_threshold: proto_array.prune_threshold,
justified_checkpoint: proto_array.justified_checkpoint,
finalized_checkpoint: proto_array.finalized_checkpoint,
justified_checkpoint,
finalized_checkpoint,
nodes: proto_array.nodes.clone(),
indices: proto_array.indices.iter().map(|(k, v)| (*k, *v)).collect(),
previous_proposer_boost: proto_array.previous_proposer_boost,
@@ -55,8 +61,6 @@ impl TryFrom<(SszContainer, JustifiedBalances)> for ProtoArrayForkChoice {
fn try_from((from, balances): (SszContainer, JustifiedBalances)) -> Result<Self, Error> {
let proto_array = ProtoArray {
prune_threshold: from.prune_threshold,
justified_checkpoint: from.justified_checkpoint,
finalized_checkpoint: from.finalized_checkpoint,
nodes: from.nodes,
indices: from.indices.into_iter().collect::<HashMap<_, _>>(),
previous_proposer_boost: from.previous_proposer_boost,

View File

@@ -25,7 +25,7 @@ portable = ["bls/supranational-portable"]
[dependencies]
alloy-primitives = { workspace = true }
alloy-rlp = { version = "0.3.4", features = ["derive"] }
alloy-rlp = { workspace = true, features = ["derive"] }
arbitrary = { workspace = true, features = ["derive"], optional = true }
bls = { workspace = true }
compare_fields = { workspace = true }

View File

@@ -42,7 +42,7 @@ pub fn get_custody_groups(
///
/// # Returns
/// Vector of custody group indices in computation order or error if parameters are invalid
pub fn get_custody_groups_ordered(
fn get_custody_groups_ordered(
raw_node_id: [u8; 32],
custody_group_count: u64,
spec: &ChainSpec,
@@ -76,6 +76,27 @@ pub fn get_custody_groups_ordered(
Ok(custody_groups)
}
/// Returns a deterministically ordered list of custody columns assigned to a node,
/// preserving the order in which they were computed during iteration.
///
/// # Arguments
/// * `raw_node_id` - 32-byte node identifier
/// * `spec` - Chain specification containing custody parameters
pub fn compute_ordered_custody_column_indices<E: EthSpec>(
raw_node_id: [u8; 32],
spec: &ChainSpec,
) -> Result<Vec<ColumnIndex>, DataColumnCustodyGroupError> {
let all_custody_groups_ordered =
get_custody_groups_ordered(raw_node_id, spec.number_of_custody_groups, spec)?;
let mut ordered_custody_columns = vec![];
for custody_index in all_custody_groups_ordered {
let columns = compute_columns_for_custody_group::<E>(custody_index, spec)?;
ordered_custody_columns.extend(columns);
}
Ok(ordered_custody_columns)
}
/// Returns the columns that are associated with a given custody group.
///
/// spec: https://github.com/ethereum/consensus-specs/blob/8e0d0d48e81d6c7c5a8253ab61340f5ea5bac66a/specs/fulu/das-core.md#compute_columns_for_custody_group

View File

@@ -8,6 +8,7 @@ use ssz_derive::{Decode, Encode};
use std::fmt;
use superstruct::superstruct;
use test_random_derive::TestRandom;
use tracing::instrument;
use tree_hash::TreeHash;
use tree_hash_derive::TreeHash;
@@ -253,6 +254,7 @@ impl<E: EthSpec, Payload: AbstractExecPayload<E>> SignedBeaconBlock<E, Payload>
}
/// Produce a signed beacon block header corresponding to this block.
#[instrument(level = "debug", skip_all)]
pub fn signed_block_header(&self) -> SignedBeaconBlockHeader {
SignedBeaconBlockHeader {
message: self.message().block_header(),