Add Gloas data column support (#8682)

Co-Authored-By: Eitan Seri-Levi <eserilev@ucsc.edu>

Co-Authored-By: Eitan Seri- Levi <eserilev@gmail.com>
This commit is contained in:
Eitan Seri-Levi
2026-01-27 20:52:12 -08:00
committed by GitHub
parent 0f57fc9d8e
commit 9bec8df37a
44 changed files with 1507 additions and 680 deletions

View File

@@ -81,10 +81,12 @@ async fn get_chain_segment() -> (Vec<BeaconSnapshot<E>>, Vec<Option<DataSidecars
beacon_state: snapshot.beacon_state,
});
let fork_name = snapshot.beacon_block.fork_name_unchecked();
let data_sidecars = if harness.spec.is_peer_das_enabled_for_epoch(block_epoch) {
harness
.chain
.get_data_columns(&snapshot.beacon_block_root)
.get_data_columns(&snapshot.beacon_block_root, fork_name)
.unwrap()
.map(|columns| {
columns
@@ -244,18 +246,18 @@ fn update_data_column_signed_header<E: EthSpec>(
) {
for old_custody_column_sidecar in data_columns.as_mut_slice() {
let old_column_sidecar = old_custody_column_sidecar.as_data_column();
let new_column_sidecar = Arc::new(DataColumnSidecar::<E> {
index: old_column_sidecar.index,
column: old_column_sidecar.column.clone(),
kzg_commitments: old_column_sidecar.kzg_commitments.clone(),
kzg_proofs: old_column_sidecar.kzg_proofs.clone(),
let new_column_sidecar = Arc::new(DataColumnSidecar::Fulu(DataColumnSidecarFulu {
index: *old_column_sidecar.index(),
column: old_column_sidecar.column().clone(),
kzg_commitments: old_column_sidecar.kzg_commitments().clone(),
kzg_proofs: old_column_sidecar.kzg_proofs().clone(),
signed_block_header: signed_block.signed_block_header(),
kzg_commitments_inclusion_proof: signed_block
.message()
.body()
.kzg_commitments_merkle_proof()
.unwrap(),
});
}));
*old_custody_column_sidecar = CustodyDataColumn::from_asserted_custody(new_column_sidecar);
}
}

View File

@@ -9,7 +9,10 @@ use rand::rngs::StdRng;
use std::sync::Arc;
use types::data::FixedBlobSidecarList;
use types::test_utils::TestRandom;
use types::{BlobSidecar, DataColumnSidecar, EthSpec, MinimalEthSpec, Slot};
use types::{
BlobSidecar, DataColumnSidecar, DataColumnSidecarFulu, DataColumnSidecarGloas, EthSpec,
MinimalEthSpec, Slot,
};
type E = MinimalEthSpec;
@@ -73,13 +76,22 @@ async fn data_column_sidecar_event_on_process_gossip_data_column() {
// build and process a gossip verified data column
let mut rng = StdRng::seed_from_u64(0xDEADBEEF0BAD5EEDu64);
let sidecar = {
// DA checker only accepts sampling columns, so we need to create one with a sampling index.
let mut random_sidecar = DataColumnSidecar::random_for_test(&mut rng);
let slot = Slot::new(10);
let epoch = slot.epoch(E::slots_per_epoch());
random_sidecar.signed_block_header.message.slot = slot;
random_sidecar.index = harness.chain.sampling_columns_for_epoch(epoch)[0];
random_sidecar
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
// DA checker only accepts sampling columns, so we need to create one with a sampling index.
if fork_name.gloas_enabled() {
let mut random_sidecar = DataColumnSidecarGloas::random_for_test(&mut rng);
let epoch = slot.epoch(E::slots_per_epoch());
random_sidecar.slot = slot;
random_sidecar.index = harness.chain.sampling_columns_for_epoch(epoch)[0];
DataColumnSidecar::Gloas(random_sidecar)
} else {
let mut random_sidecar = DataColumnSidecarFulu::random_for_test(&mut rng);
let epoch = slot.epoch(E::slots_per_epoch());
random_sidecar.signed_block_header.message.slot = slot;
random_sidecar.index = harness.chain.sampling_columns_for_epoch(epoch)[0];
DataColumnSidecar::Fulu(random_sidecar)
}
};
let gossip_verified_data_column =
GossipVerifiedDataColumn::__new_for_testing(Arc::new(sidecar));

View File

@@ -3381,7 +3381,7 @@ async fn test_import_historical_data_columns_batch() {
.await;
harness.advance_slot();
let block_root_iter = harness
let block_root_and_slot = harness
.chain
.forwards_iter_block_roots_until(start_slot, end_slot)
.unwrap();
@@ -3389,9 +3389,14 @@ async fn test_import_historical_data_columns_batch() {
let mut data_columns_list = vec![];
// Get all data columns for epoch 0
for block in block_root_iter {
let (block_root, _) = block.unwrap();
let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap();
for block_root_and_slot in block_root_and_slot {
let (block_root, slot) = block_root_and_slot.unwrap();
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
let data_columns = harness
.chain
.store
.get_data_columns(&block_root, fork_name)
.unwrap();
for data_column in data_columns.unwrap_or_default() {
data_columns_list.push(data_column);
}
@@ -3416,15 +3421,20 @@ async fn test_import_historical_data_columns_batch() {
.try_prune_blobs(true, Epoch::new(2))
.unwrap();
let block_root_iter = harness
let block_root_and_slot_iter = harness
.chain
.forwards_iter_block_roots_until(start_slot, end_slot)
.unwrap();
// Assert that data columns no longer exist for epoch 0
for block in block_root_iter {
let (block_root, _) = block.unwrap();
let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap();
for block_root_and_slot in block_root_and_slot_iter {
let (block_root, slot) = block_root_and_slot.unwrap();
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
let data_columns = harness
.chain
.store
.get_data_columns(&block_root, fork_name)
.unwrap();
assert!(data_columns.is_none())
}
@@ -3434,14 +3444,14 @@ async fn test_import_historical_data_columns_batch() {
.import_historical_data_column_batch(Epoch::new(0), data_columns_list, cgc)
.unwrap();
let block_root_iter = harness
let block_root_and_slot_iter = harness
.chain
.forwards_iter_block_roots_until(start_slot, end_slot)
.unwrap();
// Assert that data columns now exist for epoch 0
for block in block_root_iter {
let (block_root, _) = block.unwrap();
for block_root_and_slot in block_root_and_slot_iter {
let (block_root, slot) = block_root_and_slot.unwrap();
if !harness
.get_block(block_root.into())
.unwrap()
@@ -3451,7 +3461,12 @@ async fn test_import_historical_data_columns_batch() {
.unwrap()
.is_empty()
{
let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap();
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
let data_columns = harness
.chain
.store
.get_data_columns(&block_root, fork_name)
.unwrap();
assert!(data_columns.is_some())
};
}
@@ -3479,7 +3494,7 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() {
.await;
harness.advance_slot();
let block_root_iter = harness
let block_root_and_slot_iter = harness
.chain
.forwards_iter_block_roots_until(start_slot, end_slot)
.unwrap();
@@ -3488,14 +3503,23 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() {
// Get all data columns from start_slot to end_slot
// and mutate the data columns with an invalid block root
for block in block_root_iter {
let (block_root, _) = block.unwrap();
let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap();
for block_root_and_slot in block_root_and_slot_iter {
let (block_root, slot) = block_root_and_slot.unwrap();
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
let data_columns = harness
.chain
.store
.get_data_columns(&block_root, fork_name)
.unwrap();
for data_column in data_columns.unwrap_or_default() {
let mut data_column = (*data_column).clone();
if data_column.index % 2 == 0 {
data_column.signed_block_header.message.body_root = Hash256::ZERO;
if data_column.index() % 2 == 0 {
data_column
.signed_block_header_mut()
.unwrap()
.message
.body_root = Hash256::ZERO;
}
data_columns_list.push(Arc::new(data_column));
@@ -3520,15 +3544,20 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() {
.try_prune_blobs(true, Epoch::new(2))
.unwrap();
let block_root_iter = harness
let block_root_and_slot_iter = harness
.chain
.forwards_iter_block_roots_until(start_slot, end_slot)
.unwrap();
// Assert there are no columns between start_slot and end_slot
for block in block_root_iter {
let (block_root, _) = block.unwrap();
let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap();
for block_root_and_slot in block_root_and_slot_iter {
let (block_root, slot) = block_root_and_slot.unwrap();
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
let data_columns = harness
.chain
.store
.get_data_columns(&block_root, fork_name)
.unwrap();
assert!(data_columns.is_none())
}
@@ -3574,20 +3603,29 @@ async fn test_import_historical_data_columns_batch_no_block_found() {
.await;
harness.advance_slot();
let block_root_iter = harness
let block_root_and_slot_iter = harness
.chain
.forwards_iter_block_roots_until(start_slot, end_slot)
.unwrap();
let mut data_columns_list = vec![];
for block in block_root_iter {
let (block_root, _) = block.unwrap();
let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap();
for block_root_and_slot in block_root_and_slot_iter {
let (block_root, slot) = block_root_and_slot.unwrap();
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
let data_columns = harness
.chain
.store
.get_data_columns(&block_root, fork_name)
.unwrap();
for data_column in data_columns.unwrap_or_default() {
let mut data_column = (*data_column).clone();
data_column.signed_block_header.message.body_root = Hash256::ZERO;
data_column
.signed_block_header_mut()
.unwrap()
.message
.body_root = Hash256::ZERO;
data_columns_list.push(Arc::new(data_column));
}
}
@@ -3610,14 +3648,19 @@ async fn test_import_historical_data_columns_batch_no_block_found() {
.try_prune_blobs(true, Epoch::new(2))
.unwrap();
let block_root_iter = harness
let block_root_and_slot_iter = harness
.chain
.forwards_iter_block_roots_until(start_slot, end_slot)
.unwrap();
for block in block_root_iter {
let (block_root, _) = block.unwrap();
let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap();
for block_root_and_slot in block_root_and_slot_iter {
let (block_root, slot) = block_root_and_slot.unwrap();
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
let data_columns = harness
.chain
.store
.get_data_columns(&block_root, fork_name)
.unwrap();
assert!(data_columns.is_none())
}
@@ -4996,7 +5039,13 @@ fn check_data_column_existence(
.unwrap()
.map(Result::unwrap)
{
if let Some(columns) = harness.chain.store.get_data_columns(&block_root).unwrap() {
let fork_name = harness.spec.fork_name_at_slot::<E>(slot);
if let Some(columns) = harness
.chain
.store
.get_data_columns(&block_root, fork_name)
.unwrap()
{
assert!(should_exist, "columns at slot {slot} exist but should not");
columns_seen += columns.len();
} else {