Merge remote-tracking branch 'origin/deneb-free-blobs' into tree-states

This commit is contained in:
Michael Sproul
2023-09-29 16:34:29 +10:00
253 changed files with 21791 additions and 3122 deletions

View File

@@ -18,6 +18,12 @@ mod fork;
mod fork_choice;
mod genesis_initialization;
mod genesis_validity;
mod kzg_blob_to_kzg_commitment;
mod kzg_compute_blob_kzg_proof;
mod kzg_compute_kzg_proof;
mod kzg_verify_blob_kzg_proof;
mod kzg_verify_blob_kzg_proof_batch;
mod kzg_verify_kzg_proof;
mod merkle_proof_validity;
mod operations;
mod rewards;
@@ -42,6 +48,12 @@ pub use epoch_processing::*;
pub use fork::ForkTest;
pub use genesis_initialization::*;
pub use genesis_validity::*;
pub use kzg_blob_to_kzg_commitment::*;
pub use kzg_compute_blob_kzg_proof::*;
pub use kzg_compute_kzg_proof::*;
pub use kzg_verify_blob_kzg_proof::*;
pub use kzg_verify_blob_kzg_proof_batch::*;
pub use kzg_verify_kzg_proof::*;
pub use merkle_proof_validity::*;
pub use operations::*;
pub use rewards::RewardsTest;

View File

@@ -64,8 +64,9 @@ pub fn previous_fork(fork_name: ForkName) -> ForkName {
match fork_name {
ForkName::Base => ForkName::Base,
ForkName::Altair => ForkName::Base,
ForkName::Merge => ForkName::Altair, // TODO: Check this when tests are released..
ForkName::Capella => ForkName::Merge, // TODO: Check this when tests are released..
ForkName::Merge => ForkName::Altair,
ForkName::Capella => ForkName::Merge,
ForkName::Deneb => ForkName::Capella,
}
}

View File

@@ -106,7 +106,10 @@ impl<E: EthSpec> EpochTransition<E> for JustificationAndFinalization {
justification_and_finalization_state.apply_changes_to_state(state);
Ok(())
}
BeaconState::Altair(_) | BeaconState::Merge(_) | BeaconState::Capella(_) => {
BeaconState::Altair(_)
| BeaconState::Merge(_)
| BeaconState::Capella(_)
| BeaconState::Deneb(_) => {
initialize_progressive_balances_cache(state, None, spec)?;
let justification_and_finalization_state =
altair::process_justification_and_finalization(state)?;
@@ -125,9 +128,10 @@ impl<E: EthSpec> EpochTransition<E> for RewardsAndPenalties {
validator_statuses.process_attestations(state)?;
base::process_rewards_and_penalties(state, &validator_statuses, spec)
}
BeaconState::Altair(_) | BeaconState::Merge(_) | BeaconState::Capella(_) => {
altair::process_rewards_and_penalties_slow(state, spec)
}
BeaconState::Altair(_)
| BeaconState::Merge(_)
| BeaconState::Capella(_)
| BeaconState::Deneb(_) => altair::process_rewards_and_penalties_slow(state, spec),
}
}
}
@@ -156,7 +160,10 @@ impl<E: EthSpec> EpochTransition<E> for Slashings {
spec,
)?;
}
BeaconState::Altair(_) | BeaconState::Merge(_) | BeaconState::Capella(_) => {
BeaconState::Altair(_)
| BeaconState::Merge(_)
| BeaconState::Capella(_)
| BeaconState::Deneb(_) => {
process_slashings_slow(state, spec)?;
}
};
@@ -206,7 +213,9 @@ impl<E: EthSpec> EpochTransition<E> for HistoricalRootsUpdate {
impl<E: EthSpec> EpochTransition<E> for HistoricalSummariesUpdate {
fn run(state: &mut BeaconState<E>, _spec: &ChainSpec) -> Result<(), EpochProcessingError> {
match state {
BeaconState::Capella(_) => process_historical_summaries_update(state),
BeaconState::Capella(_) | BeaconState::Deneb(_) => {
process_historical_summaries_update(state)
}
_ => Ok(()),
}
}
@@ -226,9 +235,10 @@ impl<E: EthSpec> EpochTransition<E> for SyncCommitteeUpdates {
fn run(state: &mut BeaconState<E>, spec: &ChainSpec) -> Result<(), EpochProcessingError> {
match state {
BeaconState::Base(_) => Ok(()),
BeaconState::Altair(_) | BeaconState::Merge(_) | BeaconState::Capella(_) => {
altair::process_sync_committee_updates(state, spec)
}
BeaconState::Altair(_)
| BeaconState::Merge(_)
| BeaconState::Capella(_)
| BeaconState::Deneb(_) => altair::process_sync_committee_updates(state, spec),
}
}
}
@@ -237,9 +247,10 @@ impl<E: EthSpec> EpochTransition<E> for InactivityUpdates {
fn run(state: &mut BeaconState<E>, spec: &ChainSpec) -> Result<(), EpochProcessingError> {
match state {
BeaconState::Base(_) => Ok(()),
BeaconState::Altair(_) | BeaconState::Merge(_) | BeaconState::Capella(_) => {
altair::process_inactivity_updates_slow(state, spec)
}
BeaconState::Altair(_)
| BeaconState::Merge(_)
| BeaconState::Capella(_)
| BeaconState::Deneb(_) => altair::process_inactivity_updates_slow(state, spec),
}
}
}
@@ -248,9 +259,10 @@ impl<E: EthSpec> EpochTransition<E> for ParticipationFlagUpdates {
fn run(state: &mut BeaconState<E>, _: &ChainSpec) -> Result<(), EpochProcessingError> {
match state {
BeaconState::Base(_) => Ok(()),
BeaconState::Altair(_) | BeaconState::Merge(_) | BeaconState::Capella(_) => {
altair::process_participation_flag_updates(state)
}
BeaconState::Altair(_)
| BeaconState::Merge(_)
| BeaconState::Capella(_)
| BeaconState::Deneb(_) => altair::process_participation_flag_updates(state),
}
}
}
@@ -301,7 +313,7 @@ impl<E: EthSpec, T: EpochTransition<E>> Case for EpochProcessing<E, T> {
T::name() != "participation_record_updates"
&& T::name() != "historical_summaries_update"
}
ForkName::Capella => {
ForkName::Capella | ForkName::Deneb => {
T::name() != "participation_record_updates"
&& T::name() != "historical_roots_update"
}

View File

@@ -3,7 +3,9 @@ use crate::case_result::compare_beacon_state_results_without_caches;
use crate::cases::common::previous_fork;
use crate::decode::{ssz_decode_state, yaml_decode_file};
use serde_derive::Deserialize;
use state_processing::upgrade::{upgrade_to_altair, upgrade_to_bellatrix, upgrade_to_capella};
use state_processing::upgrade::{
upgrade_to_altair, upgrade_to_bellatrix, upgrade_to_capella, upgrade_to_deneb,
};
use types::{BeaconState, ForkName};
#[derive(Debug, Clone, Default, Deserialize)]
@@ -62,6 +64,7 @@ impl<E: EthSpec> Case for ForkTest<E> {
ForkName::Altair => upgrade_to_altair(&mut result_state, spec).map(|_| result_state),
ForkName::Merge => upgrade_to_bellatrix(&mut result_state, spec).map(|_| result_state),
ForkName::Capella => upgrade_to_capella(&mut result_state, spec).map(|_| result_state),
ForkName::Deneb => upgrade_to_deneb(&mut result_state, spec).map(|_| result_state),
};
compare_beacon_state_results_without_caches(&mut result, &mut expected)

View File

@@ -6,8 +6,9 @@ use beacon_chain::{
attestation_verification::{
obtain_indexed_attestation_and_committees_per_slot, VerifiedAttestation,
},
blob_verification::GossipVerifiedBlob,
test_utils::{BeaconChainHarness, EphemeralHarnessType},
BeaconChainTypes, CachedHead, ChainConfig, NotifyExecutionLayer,
AvailabilityProcessingStatus, BeaconChainTypes, CachedHead, ChainConfig, NotifyExecutionLayer,
};
use execution_layer::{json_structures::JsonPayloadStatusV1Status, PayloadStatusV1};
use serde::Deserialize;
@@ -17,9 +18,9 @@ use std::future::Future;
use std::sync::Arc;
use std::time::Duration;
use types::{
Attestation, AttesterSlashing, BeaconBlock, BeaconState, Checkpoint, EthSpec,
ExecutionBlockHash, ForkName, Hash256, IndexedAttestation, ProgressiveBalancesMode,
SignedBeaconBlock, Slot, Uint256,
Attestation, AttesterSlashing, BeaconBlock, BeaconState, BlobSidecar, BlobsList, Checkpoint,
EthSpec, ExecutionBlockHash, ForkName, Hash256, IndexedAttestation, KzgProof,
ProgressiveBalancesMode, Signature, SignedBeaconBlock, SignedBlobSidecar, Slot, Uint256,
};
#[derive(Default, Debug, PartialEq, Clone, Deserialize, Decode)]
@@ -71,25 +72,27 @@ impl From<PayloadStatus> for PayloadStatusV1 {
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged, deny_unknown_fields)]
pub enum Step<B, A, AS, P> {
pub enum Step<TBlock, TBlobs, TAttestation, TAttesterSlashing, TPowBlock> {
Tick {
tick: u64,
},
ValidBlock {
block: B,
block: TBlock,
},
MaybeValidBlock {
block: B,
block: TBlock,
blobs: Option<TBlobs>,
proofs: Option<Vec<KzgProof>>,
valid: bool,
},
Attestation {
attestation: A,
attestation: TAttestation,
},
AttesterSlashing {
attester_slashing: AS,
attester_slashing: TAttesterSlashing,
},
PowBlock {
pow_block: P,
pow_block: TPowBlock,
},
OnPayloadInfo {
block_hash: ExecutionBlockHash,
@@ -113,7 +116,9 @@ pub struct ForkChoiceTest<E: EthSpec> {
pub anchor_state: BeaconState<E>,
pub anchor_block: BeaconBlock<E>,
#[allow(clippy::type_complexity)]
pub steps: Vec<Step<SignedBeaconBlock<E>, Attestation<E>, AttesterSlashing<E>, PowBlock>>,
pub steps: Vec<
Step<SignedBeaconBlock<E>, BlobsList<E>, Attestation<E>, AttesterSlashing<E>, PowBlock>,
>,
}
impl<E: EthSpec> LoadCase for ForkChoiceTest<E> {
@@ -126,7 +131,7 @@ impl<E: EthSpec> LoadCase for ForkChoiceTest<E> {
.expect("path must be valid OsStr")
.to_string();
let spec = &testing_spec::<E>(fork_name);
let steps: Vec<Step<String, String, String, String>> =
let steps: Vec<Step<String, String, String, String, String>> =
yaml_decode_file(&path.join("steps.yaml"))?;
// Resolve the object names in `steps.yaml` into actual decoded block/attestation objects.
let steps = steps
@@ -139,11 +144,25 @@ impl<E: EthSpec> LoadCase for ForkChoiceTest<E> {
})
.map(|block| Step::ValidBlock { block })
}
Step::MaybeValidBlock { block, valid } => {
ssz_decode_file_with(&path.join(format!("{}.ssz_snappy", block)), |bytes| {
SignedBeaconBlock::from_ssz_bytes(bytes, spec)
Step::MaybeValidBlock {
block,
blobs,
proofs,
valid,
} => {
let block =
ssz_decode_file_with(&path.join(format!("{block}.ssz_snappy")), |bytes| {
SignedBeaconBlock::from_ssz_bytes(bytes, spec)
})?;
let blobs = blobs
.map(|blobs| ssz_decode_file(&path.join(format!("{blobs}.ssz_snappy"))))
.transpose()?;
Ok(Step::MaybeValidBlock {
block,
blobs,
proofs,
valid,
})
.map(|block| Step::MaybeValidBlock { block, valid })
}
Step::Attestation { attestation } => {
ssz_decode_file(&path.join(format!("{}.ssz_snappy", attestation)))
@@ -204,10 +223,15 @@ impl<E: EthSpec> Case for ForkChoiceTest<E> {
for step in &self.steps {
match step {
Step::Tick { tick } => tester.set_tick(*tick),
Step::ValidBlock { block } => tester.process_block(block.clone(), true)?,
Step::MaybeValidBlock { block, valid } => {
tester.process_block(block.clone(), *valid)?
Step::ValidBlock { block } => {
tester.process_block(block.clone(), None, None, true)?
}
Step::MaybeValidBlock {
block,
blobs,
proofs,
valid,
} => tester.process_block(block.clone(), blobs.clone(), proofs.clone(), *valid)?,
Step::Attestation { attestation } => tester.process_attestation(attestation)?,
Step::AttesterSlashing { attester_slashing } => {
tester.process_attester_slashing(attester_slashing)
@@ -300,7 +324,7 @@ impl<E: EthSpec> Tester<E> {
));
}
let harness = BeaconChainHarness::builder(E::default())
let harness = BeaconChainHarness::<EphemeralHarnessType<E>>::builder(E::default())
.spec(spec.clone())
.keypairs(vec![])
.chain_config(ChainConfig {
@@ -380,16 +404,72 @@ impl<E: EthSpec> Tester<E> {
.unwrap();
}
pub fn process_block(&self, block: SignedBeaconBlock<E>, valid: bool) -> Result<(), Error> {
pub fn process_block(
&self,
block: SignedBeaconBlock<E>,
blobs: Option<BlobsList<E>>,
kzg_proofs: Option<Vec<KzgProof>>,
valid: bool,
) -> Result<(), Error> {
let block_root = block.canonical_root();
// Convert blobs and kzg_proofs into sidecars, then plumb them into the availability tracker
if let Some(blobs) = blobs.clone() {
let proofs = kzg_proofs.unwrap();
let commitments = block
.message()
.body()
.blob_kzg_commitments()
.unwrap()
.clone();
// Zipping will stop when any of the zipped lists runs out, which is what we want. Some
// of the tests don't provide enough proofs/blobs, and should fail the availability
// check.
for (i, ((blob, kzg_proof), kzg_commitment)) in blobs
.into_iter()
.zip(proofs)
.zip(commitments.into_iter())
.enumerate()
{
let signed_sidecar = SignedBlobSidecar {
message: Arc::new(BlobSidecar {
block_root,
index: i as u64,
slot: block.slot(),
block_parent_root: block.parent_root(),
proposer_index: block.message().proposer_index(),
blob,
kzg_commitment,
kzg_proof,
}),
signature: Signature::empty(),
_phantom: Default::default(),
};
let result = self.block_on_dangerous(
self.harness
.chain
.check_gossip_blob_availability_and_import(
GossipVerifiedBlob::__assumed_valid(signed_sidecar),
),
)?;
if valid {
assert!(result.is_ok());
}
}
};
let block = Arc::new(block);
let result = self.block_on_dangerous(self.harness.chain.process_block(
block_root,
block.clone(),
NotifyExecutionLayer::Yes,
|| Ok(()),
))?;
if result.is_ok() != valid {
let result: Result<Result<Hash256, ()>, _> = self
.block_on_dangerous(self.harness.chain.process_block(
block_root,
block.clone(),
NotifyExecutionLayer::Yes,
|| Ok(()),
))?
.map(|avail: AvailabilityProcessingStatus| avail.try_into());
let success = result.as_ref().map_or(false, |inner| inner.is_ok());
if success != valid {
return Err(Error::DidntFail(format!(
"block with root {} was valid={} whilst test expects valid={}. result: {:?}",
block_root,
@@ -401,8 +481,8 @@ impl<E: EthSpec> Tester<E> {
// Apply invalid blocks directly against the fork choice `on_block` function. This ensures
// that the block is being rejected by `on_block`, not just some upstream block processing
// function.
if !valid {
// function. When blobs exist, we don't do this.
if !valid && blobs.is_none() {
// A missing parent block whilst `valid == false` means the test should pass.
if let Some(parent_block) = self
.harness

View File

@@ -0,0 +1,47 @@
use super::*;
use crate::case_result::compare_result;
use beacon_chain::kzg_utils::blob_to_kzg_commitment;
use kzg::KzgCommitment;
use serde_derive::Deserialize;
use std::marker::PhantomData;
#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct KZGBlobToKZGCommitmentInput {
pub blob: String,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec", deny_unknown_fields)]
pub struct KZGBlobToKZGCommitment<E: EthSpec> {
pub input: KZGBlobToKZGCommitmentInput,
pub output: Option<String>,
#[serde(skip)]
_phantom: PhantomData<E>,
}
impl<E: EthSpec> LoadCase for KZGBlobToKZGCommitment<E> {
fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result<Self, Error> {
decode::yaml_decode_file(path.join("data.yaml").as_path())
}
}
impl<E: EthSpec> Case for KZGBlobToKZGCommitment<E> {
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name == ForkName::Deneb
}
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let kzg = get_kzg::<E::Kzg>()?;
let commitment = parse_blob::<E>(&self.input.blob).and_then(|blob| {
blob_to_kzg_commitment::<E>(&kzg, &blob).map_err(|e| {
Error::InternalError(format!("Failed to compute kzg commitment: {:?}", e))
})
});
let expected = self.output.as_ref().and_then(|s| parse_commitment(s).ok());
compare_result::<KzgCommitment, _>(&commitment, &expected)
}
}

View File

@@ -0,0 +1,52 @@
use super::*;
use crate::case_result::compare_result;
use beacon_chain::kzg_utils::compute_blob_kzg_proof;
use kzg::KzgProof;
use serde_derive::Deserialize;
use std::marker::PhantomData;
#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct KZGComputeBlobKZGProofInput {
pub blob: String,
pub commitment: String,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec", deny_unknown_fields)]
pub struct KZGComputeBlobKZGProof<E: EthSpec> {
pub input: KZGComputeBlobKZGProofInput,
pub output: Option<String>,
#[serde(skip)]
_phantom: PhantomData<E>,
}
impl<E: EthSpec> LoadCase for KZGComputeBlobKZGProof<E> {
fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result<Self, Error> {
decode::yaml_decode_file(path.join("data.yaml").as_path())
}
}
impl<E: EthSpec> Case for KZGComputeBlobKZGProof<E> {
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name == ForkName::Deneb
}
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let parse_input = |input: &KZGComputeBlobKZGProofInput| -> Result<_, Error> {
let blob = parse_blob::<E>(&input.blob)?;
let commitment = parse_commitment(&input.commitment)?;
Ok((blob, commitment))
};
let kzg = get_kzg::<E::Kzg>()?;
let proof = parse_input(&self.input).and_then(|(blob, commitment)| {
compute_blob_kzg_proof::<E>(&kzg, &blob, commitment)
.map_err(|e| Error::InternalError(format!("Failed to compute kzg proof: {:?}", e)))
});
let expected = self.output.as_ref().and_then(|s| parse_proof(s).ok());
compare_result::<KzgProof, _>(&proof, &expected)
}
}

View File

@@ -0,0 +1,62 @@
use super::*;
use crate::case_result::compare_result;
use beacon_chain::kzg_utils::compute_kzg_proof;
use kzg::KzgProof;
use serde_derive::Deserialize;
use std::marker::PhantomData;
use std::str::FromStr;
use types::Hash256;
pub fn parse_point(point: &str) -> Result<Hash256, Error> {
Hash256::from_str(&point[2..])
.map_err(|e| Error::FailedToParseTest(format!("Failed to parse point: {:?}", e)))
}
#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct KZGComputeKZGProofInput {
pub blob: String,
pub z: String,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec", deny_unknown_fields)]
pub struct KZGComputeKZGProof<E: EthSpec> {
pub input: KZGComputeKZGProofInput,
pub output: Option<(String, Hash256)>,
#[serde(skip)]
_phantom: PhantomData<E>,
}
impl<E: EthSpec> LoadCase for KZGComputeKZGProof<E> {
fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result<Self, Error> {
decode::yaml_decode_file(path.join("data.yaml").as_path())
}
}
impl<E: EthSpec> Case for KZGComputeKZGProof<E> {
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name == ForkName::Deneb
}
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let parse_input = |input: &KZGComputeKZGProofInput| -> Result<_, Error> {
let blob = parse_blob::<E>(&input.blob)?;
let z = parse_point(&input.z)?;
Ok((blob, z))
};
let kzg = get_kzg::<E::Kzg>()?;
let proof = parse_input(&self.input).and_then(|(blob, z)| {
compute_kzg_proof::<E>(&kzg, &blob, z)
.map_err(|e| Error::InternalError(format!("Failed to compute kzg proof: {:?}", e)))
});
let expected = self
.output
.as_ref()
.and_then(|(s, z)| parse_proof(s).ok().map(|proof| (proof, *z)));
compare_result::<(KzgProof, Hash256), _>(&proof, &expected)
}
}

View File

@@ -0,0 +1,100 @@
use super::*;
use crate::case_result::compare_result;
use beacon_chain::kzg_utils::validate_blob;
use eth2_network_config::get_trusted_setup;
use kzg::{Kzg, KzgCommitment, KzgPreset, KzgProof, TrustedSetup};
use serde_derive::Deserialize;
use std::convert::TryInto;
use std::marker::PhantomData;
use types::Blob;
pub fn get_kzg<P: KzgPreset>() -> Result<Kzg<P>, Error> {
let trusted_setup: TrustedSetup = serde_json::from_reader(get_trusted_setup::<P>())
.map_err(|e| Error::InternalError(format!("Failed to initialize kzg: {:?}", e)))?;
Kzg::new_from_trusted_setup(trusted_setup)
.map_err(|e| Error::InternalError(format!("Failed to initialize kzg: {:?}", e)))
}
pub fn parse_proof(proof: &str) -> Result<KzgProof, Error> {
hex::decode(strip_0x(proof)?)
.map_err(|e| Error::FailedToParseTest(format!("Failed to parse proof: {:?}", e)))
.and_then(|bytes| {
bytes
.try_into()
.map_err(|e| Error::FailedToParseTest(format!("Failed to parse proof: {:?}", e)))
})
.map(KzgProof)
}
pub fn parse_commitment(commitment: &str) -> Result<KzgCommitment, Error> {
hex::decode(strip_0x(commitment)?)
.map_err(|e| Error::FailedToParseTest(format!("Failed to parse commitment: {:?}", e)))
.and_then(|bytes| {
bytes.try_into().map_err(|e| {
Error::FailedToParseTest(format!("Failed to parse commitment: {:?}", e))
})
})
.map(KzgCommitment)
}
pub fn parse_blob<E: EthSpec>(blob: &str) -> Result<Blob<E>, Error> {
hex::decode(strip_0x(blob)?)
.map_err(|e| Error::FailedToParseTest(format!("Failed to parse blob: {:?}", e)))
.and_then(|bytes| {
Blob::<E>::new(bytes)
.map_err(|e| Error::FailedToParseTest(format!("Failed to parse blob: {:?}", e)))
})
}
fn strip_0x(s: &str) -> Result<&str, Error> {
s.strip_prefix("0x").ok_or(Error::FailedToParseTest(format!(
"Hex is missing 0x prefix: {}",
s
)))
}
#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct KZGVerifyBlobKZGProofInput {
pub blob: String,
pub commitment: String,
pub proof: String,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec", deny_unknown_fields)]
pub struct KZGVerifyBlobKZGProof<E: EthSpec> {
pub input: KZGVerifyBlobKZGProofInput,
pub output: Option<bool>,
#[serde(skip)]
_phantom: PhantomData<E>,
}
impl<E: EthSpec> LoadCase for KZGVerifyBlobKZGProof<E> {
fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result<Self, Error> {
decode::yaml_decode_file(path.join("data.yaml").as_path())
}
}
impl<E: EthSpec> Case for KZGVerifyBlobKZGProof<E> {
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name == ForkName::Deneb
}
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let parse_input = |input: &KZGVerifyBlobKZGProofInput| -> Result<(Blob<E>, KzgCommitment, KzgProof), Error> {
let blob = parse_blob::<E>(&input.blob)?;
let commitment = parse_commitment(&input.commitment)?;
let proof = parse_proof(&input.proof)?;
Ok((blob, commitment, proof))
};
let kzg = get_kzg::<E::Kzg>()?;
let result = parse_input(&self.input).and_then(|(blob, commitment, proof)| {
validate_blob::<E>(&kzg, blob, commitment, proof)
.map_err(|e| Error::InternalError(format!("Failed to validate blob: {:?}", e)))
});
compare_result::<bool, _>(&result, &self.output)
}
}

View File

@@ -0,0 +1,63 @@
use super::*;
use crate::case_result::compare_result;
use beacon_chain::kzg_utils::validate_blobs;
use serde_derive::Deserialize;
use std::marker::PhantomData;
#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct KZGVerifyBlobKZGProofBatchInput {
pub blobs: Vec<String>,
pub commitments: Vec<String>,
pub proofs: Vec<String>,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec", deny_unknown_fields)]
pub struct KZGVerifyBlobKZGProofBatch<E: EthSpec> {
pub input: KZGVerifyBlobKZGProofBatchInput,
pub output: Option<bool>,
#[serde(skip)]
_phantom: PhantomData<E>,
}
impl<E: EthSpec> LoadCase for KZGVerifyBlobKZGProofBatch<E> {
fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result<Self, Error> {
decode::yaml_decode_file(path.join("data.yaml").as_path())
}
}
impl<E: EthSpec> Case for KZGVerifyBlobKZGProofBatch<E> {
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name == ForkName::Deneb
}
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let parse_input = |input: &KZGVerifyBlobKZGProofBatchInput| -> Result<_, Error> {
let blobs = input
.blobs
.iter()
.map(|s| parse_blob::<E>(s))
.collect::<Result<Vec<_>, _>>()?;
let commitments = input
.commitments
.iter()
.map(|s| parse_commitment(s))
.collect::<Result<Vec<_>, _>>()?;
let proofs = input
.proofs
.iter()
.map(|s| parse_proof(s))
.collect::<Result<Vec<_>, _>>()?;
Ok((commitments, blobs, proofs))
};
let kzg = get_kzg::<E::Kzg>()?;
let result = parse_input(&self.input).and_then(|(commitments, blobs, proofs)| {
validate_blobs::<E>(&kzg, &commitments, &blobs, &proofs)
.map_err(|e| Error::InternalError(format!("Failed to validate blobs: {:?}", e)))
});
compare_result::<bool, _>(&result, &self.output)
}
}

View File

@@ -0,0 +1,53 @@
use super::*;
use crate::case_result::compare_result;
use beacon_chain::kzg_utils::verify_kzg_proof;
use serde_derive::Deserialize;
use std::marker::PhantomData;
#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct KZGVerifyKZGProofInput {
pub commitment: String,
pub z: String,
pub y: String,
pub proof: String,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec", deny_unknown_fields)]
pub struct KZGVerifyKZGProof<E: EthSpec> {
pub input: KZGVerifyKZGProofInput,
pub output: Option<bool>,
#[serde(skip)]
_phantom: PhantomData<E>,
}
impl<E: EthSpec> LoadCase for KZGVerifyKZGProof<E> {
fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result<Self, Error> {
decode::yaml_decode_file(path.join("data.yaml").as_path())
}
}
impl<E: EthSpec> Case for KZGVerifyKZGProof<E> {
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name == ForkName::Deneb
}
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let parse_input = |input: &KZGVerifyKZGProofInput| -> Result<_, Error> {
let commitment = parse_commitment(&input.commitment)?;
let z = parse_point(&input.z)?;
let y = parse_point(&input.y)?;
let proof = parse_proof(&input.proof)?;
Ok((commitment, z, y, proof))
};
let kzg = get_kzg::<E::Kzg>()?;
let result = parse_input(&self.input).and_then(|(commitment, z, y, proof)| {
verify_kzg_proof::<E>(&kzg, commitment, proof, z, y)
.map_err(|e| Error::InternalError(format!("Failed to validate proof: {:?}", e)))
});
compare_result::<bool, _>(&result, &self.output)
}
}

View File

@@ -4,6 +4,7 @@ use crate::case_result::{check_state_diff, compare_beacon_state_results_without_
use crate::decode::{ssz_decode_file, ssz_decode_file_with, ssz_decode_state, yaml_decode_file};
use crate::testing_spec;
use serde_derive::Deserialize;
use ssz::Decode;
use state_processing::common::update_progressive_balances_cache::initialize_progressive_balances_cache;
use state_processing::epoch_cache::initialize_epoch_cache;
use state_processing::{
@@ -11,7 +12,7 @@ use state_processing::{
errors::BlockProcessingError,
process_block_header, process_execution_payload,
process_operations::{
altair, base, process_attester_slashings, process_bls_to_execution_changes,
altair_deneb, base, process_attester_slashings, process_bls_to_execution_changes,
process_deposits, process_exits, process_proposer_slashings,
},
process_sync_aggregate, process_withdrawals, VerifyBlockRoot, VerifySignatures,
@@ -21,7 +22,8 @@ use state_processing::{
use std::fmt::Debug;
use std::path::Path;
use types::{
Attestation, AttesterSlashing, BeaconBlock, BeaconState, BlindedPayload, ChainSpec, Deposit,
Attestation, AttesterSlashing, BeaconBlock, BeaconBlockBody, BeaconBlockBodyCapella,
BeaconBlockBodyDeneb, BeaconBlockBodyMerge, BeaconState, BlindedPayload, ChainSpec, Deposit,
EthSpec, ExecutionPayload, ForkName, FullPayload, ProposerSlashing, SignedBlsToExecutionChange,
SignedVoluntaryExit, SyncAggregate,
};
@@ -98,9 +100,19 @@ impl<E: EthSpec> Operation<E> for Attestation<E> {
&mut ctxt,
spec,
),
BeaconState::Altair(_) | BeaconState::Merge(_) | BeaconState::Capella(_) => {
BeaconState::Altair(_)
| BeaconState::Merge(_)
| BeaconState::Capella(_)
| BeaconState::Deneb(_) => {
initialize_progressive_balances_cache(state, None, spec)?;
altair::process_attestation(state, self, 0, &mut ctxt, VerifySignatures::True, spec)
altair_deneb::process_attestation(
state,
self,
0,
&mut ctxt,
VerifySignatures::True,
spec,
)
}
}
}
@@ -262,13 +274,13 @@ impl<E: EthSpec> Operation<E> for SyncAggregate<E> {
}
}
impl<E: EthSpec> Operation<E> for FullPayload<E> {
impl<E: EthSpec> Operation<E> for BeaconBlockBody<E, FullPayload<E>> {
fn handler_name() -> String {
"execution_payload".into()
}
fn filename() -> String {
"execution_payload.ssz_snappy".into()
"body.ssz_snappy".into()
}
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
@@ -277,9 +289,13 @@ impl<E: EthSpec> Operation<E> for FullPayload<E> {
fn decode(path: &Path, fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file_with(path, |bytes| {
ExecutionPayload::from_ssz_bytes(bytes, fork_name)
Ok(match fork_name {
ForkName::Merge => BeaconBlockBody::Merge(<_>::from_ssz_bytes(bytes)?),
ForkName::Capella => BeaconBlockBody::Capella(<_>::from_ssz_bytes(bytes)?),
ForkName::Deneb => BeaconBlockBody::Deneb(<_>::from_ssz_bytes(bytes)?),
_ => panic!(),
})
})
.map(Into::into)
}
fn apply_to(
@@ -299,13 +315,13 @@ impl<E: EthSpec> Operation<E> for FullPayload<E> {
}
}
}
impl<E: EthSpec> Operation<E> for BlindedPayload<E> {
impl<E: EthSpec> Operation<E> for BeaconBlockBody<E, BlindedPayload<E>> {
fn handler_name() -> String {
"execution_payload".into()
}
fn filename() -> String {
"execution_payload.ssz_snappy".into()
"body.ssz_snappy".into()
}
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
@@ -314,9 +330,22 @@ impl<E: EthSpec> Operation<E> for BlindedPayload<E> {
fn decode(path: &Path, fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file_with(path, |bytes| {
ExecutionPayload::from_ssz_bytes(bytes, fork_name)
Ok(match fork_name {
ForkName::Merge => {
let inner = <BeaconBlockBodyMerge<E, FullPayload<E>>>::from_ssz_bytes(bytes)?;
BeaconBlockBody::Merge(inner.clone_as_blinded())
}
ForkName::Capella => {
let inner = <BeaconBlockBodyCapella<E, FullPayload<E>>>::from_ssz_bytes(bytes)?;
BeaconBlockBody::Capella(inner.clone_as_blinded())
}
ForkName::Deneb => {
let inner = <BeaconBlockBodyDeneb<E, FullPayload<E>>>::from_ssz_bytes(bytes)?;
BeaconBlockBody::Deneb(inner.clone_as_blinded())
}
_ => panic!(),
})
})
.map(Into::into)
}
fn apply_to(

View File

@@ -47,6 +47,12 @@ impl<E: EthSpec> LoadCase for TransitionTest<E> {
spec.bellatrix_fork_epoch = Some(Epoch::new(0));
spec.capella_fork_epoch = Some(metadata.fork_epoch);
}
ForkName::Deneb => {
spec.altair_fork_epoch = Some(Epoch::new(0));
spec.bellatrix_fork_epoch = Some(Epoch::new(0));
spec.capella_fork_epoch = Some(Epoch::new(0));
spec.deneb_fork_epoch = Some(metadata.fork_epoch);
}
}
// Load blocks

View File

@@ -210,10 +210,6 @@ impl<T, E> SszStaticHandler<T, E> {
Self::for_forks(vec![ForkName::Altair])
}
pub fn altair_and_later() -> Self {
Self::for_forks(ForkName::list_all()[1..].to_vec())
}
pub fn merge_only() -> Self {
Self::for_forks(vec![ForkName::Merge])
}
@@ -222,9 +218,21 @@ impl<T, E> SszStaticHandler<T, E> {
Self::for_forks(vec![ForkName::Capella])
}
pub fn deneb_only() -> Self {
Self::for_forks(vec![ForkName::Deneb])
}
pub fn altair_and_later() -> Self {
Self::for_forks(ForkName::list_all()[1..].to_vec())
}
pub fn merge_and_later() -> Self {
Self::for_forks(ForkName::list_all()[2..].to_vec())
}
pub fn capella_and_later() -> Self {
Self::for_forks(ForkName::list_all()[3..].to_vec())
}
}
/// Handler for SSZ types that implement `CachedTreeHash`.
@@ -629,6 +637,126 @@ impl<E: EthSpec + TypeName> Handler for GenesisInitializationHandler<E> {
}
}
#[derive(Derivative)]
#[derivative(Default(bound = ""))]
pub struct KZGBlobToKZGCommitmentHandler<E>(PhantomData<E>);
impl<E: EthSpec> Handler for KZGBlobToKZGCommitmentHandler<E> {
type Case = cases::KZGBlobToKZGCommitment<E>;
fn config_name() -> &'static str {
"general"
}
fn runner_name() -> &'static str {
"kzg"
}
fn handler_name(&self) -> String {
"blob_to_kzg_commitment".into()
}
}
#[derive(Derivative)]
#[derivative(Default(bound = ""))]
pub struct KZGComputeBlobKZGProofHandler<E>(PhantomData<E>);
impl<E: EthSpec> Handler for KZGComputeBlobKZGProofHandler<E> {
type Case = cases::KZGComputeBlobKZGProof<E>;
fn config_name() -> &'static str {
"general"
}
fn runner_name() -> &'static str {
"kzg"
}
fn handler_name(&self) -> String {
"compute_blob_kzg_proof".into()
}
}
#[derive(Derivative)]
#[derivative(Default(bound = ""))]
pub struct KZGComputeKZGProofHandler<E>(PhantomData<E>);
impl<E: EthSpec> Handler for KZGComputeKZGProofHandler<E> {
type Case = cases::KZGComputeKZGProof<E>;
fn config_name() -> &'static str {
"general"
}
fn runner_name() -> &'static str {
"kzg"
}
fn handler_name(&self) -> String {
"compute_kzg_proof".into()
}
}
#[derive(Derivative)]
#[derivative(Default(bound = ""))]
pub struct KZGVerifyBlobKZGProofHandler<E>(PhantomData<E>);
impl<E: EthSpec> Handler for KZGVerifyBlobKZGProofHandler<E> {
type Case = cases::KZGVerifyBlobKZGProof<E>;
fn config_name() -> &'static str {
"general"
}
fn runner_name() -> &'static str {
"kzg"
}
fn handler_name(&self) -> String {
"verify_blob_kzg_proof".into()
}
}
#[derive(Derivative)]
#[derivative(Default(bound = ""))]
pub struct KZGVerifyBlobKZGProofBatchHandler<E>(PhantomData<E>);
impl<E: EthSpec> Handler for KZGVerifyBlobKZGProofBatchHandler<E> {
type Case = cases::KZGVerifyBlobKZGProofBatch<E>;
fn config_name() -> &'static str {
"general"
}
fn runner_name() -> &'static str {
"kzg"
}
fn handler_name(&self) -> String {
"verify_blob_kzg_proof_batch".into()
}
}
#[derive(Derivative)]
#[derivative(Default(bound = ""))]
pub struct KZGVerifyKZGProofHandler<E>(PhantomData<E>);
impl<E: EthSpec> Handler for KZGVerifyKZGProofHandler<E> {
type Case = cases::KZGVerifyKZGProof<E>;
fn config_name() -> &'static str {
"general"
}
fn runner_name() -> &'static str {
"kzg"
}
fn handler_name(&self) -> String {
"verify_kzg_proof".into()
}
}
#[derive(Derivative)]
#[derivative(Default(bound = ""))]
pub struct MerkleProofValidityHandler<E>(PhantomData<E>);
@@ -654,7 +782,7 @@ impl<E: EthSpec + TypeName> Handler for MerkleProofValidityHandler<E> {
// spec.
//
// https://github.com/sigp/lighthouse/issues/4022
&& fork_name != ForkName::Capella
&& fork_name != ForkName::Capella && fork_name != ForkName::Deneb
}
}

View File

@@ -1,4 +1,5 @@
//! Mapping from types to canonical string identifiers used in testing.
use types::blob_sidecar::BlobIdentifier;
use types::historical_summary::HistoricalSummary;
use types::*;
@@ -47,8 +48,11 @@ type_name_generic!(BeaconBlockBodyBase, "BeaconBlockBody");
type_name_generic!(BeaconBlockBodyAltair, "BeaconBlockBody");
type_name_generic!(BeaconBlockBodyMerge, "BeaconBlockBody");
type_name_generic!(BeaconBlockBodyCapella, "BeaconBlockBody");
type_name_generic!(BeaconBlockBodyDeneb, "BeaconBlockBody");
type_name!(BeaconBlockHeader);
type_name_generic!(BeaconState);
type_name!(BlobIdentifier);
type_name_generic!(BlobSidecar);
type_name!(Checkpoint);
type_name_generic!(ContributionAndProof);
type_name!(Deposit);
@@ -58,10 +62,12 @@ type_name!(Eth1Data);
type_name_generic!(ExecutionPayload);
type_name_generic!(ExecutionPayloadMerge, "ExecutionPayload");
type_name_generic!(ExecutionPayloadCapella, "ExecutionPayload");
type_name_generic!(ExecutionPayloadDeneb, "ExecutionPayload");
type_name_generic!(FullPayload, "ExecutionPayload");
type_name_generic!(ExecutionPayloadHeader);
type_name_generic!(ExecutionPayloadHeaderMerge, "ExecutionPayloadHeader");
type_name_generic!(ExecutionPayloadHeaderCapella, "ExecutionPayloadHeader");
type_name_generic!(ExecutionPayloadHeaderDeneb, "ExecutionPayloadHeader");
type_name_generic!(BlindedPayload, "ExecutionPayloadHeader");
type_name!(Fork);
type_name!(ForkData);
@@ -72,6 +78,7 @@ type_name!(ProposerSlashing);
type_name_generic!(SignedAggregateAndProof);
type_name_generic!(SignedBeaconBlock);
type_name!(SignedBeaconBlockHeader);
type_name_generic!(SignedBlobSidecar);
type_name_generic!(SignedContributionAndProof);
type_name!(SignedVoluntaryExit);
type_name!(SigningData);