Add PeerDAS KZG lib integration (construction & KZG verification) (#6212)

* Add peerdas KZG library and use it for data column construction and cell kzg verification (#5701, #5941, #6118, #6179)

Co-authored-by: kevaundray <kevtheappdev@gmail.com>

* Update `rust_eth_kzg` crate to published version.

* Update kzg metrics buckets.

* Merge branch 'unstable' into peerdas-kzg

* Update KZG version to fix windows mem allocation.

* Refactor common logic from build sidecar and reconstruction. Remove unnecessary `needless_lifetimes`.

Co-authored-by: realbigsean <sean@sigmaprime.io>

* Copy existing trusted setup into `PeerDASTrustedSetup` for consistency and maintain `--trusted-setup` functionality.

* Merge branch 'unstable' into peerdas-kzg

* Merge branch 'peerdas-kzg' of github.com:jimmygchen/lighthouse into peerdas-kzg

* Merge branch 'unstable' into peerdas-kzg

* Merge branch 'unstable' into peerdas-kzg

* Load PeerDAS KZG only if PeerDAS is enabled.
This commit is contained in:
Jimmy Chen
2024-08-13 10:16:17 +10:00
committed by GitHub
parent ff15c78ced
commit 6dc614fede
11 changed files with 627 additions and 284 deletions

93
Cargo.lock generated
View File

@@ -1029,6 +1029,22 @@ dependencies = [
"zeroize",
]
[[package]]
name = "blstrs"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a8a8ed6fefbeef4a8c7b460e4110e12c5e22a5b7cf32621aae6ad650c4dcf29"
dependencies = [
"blst",
"byte-slice-cast",
"ff 0.13.0",
"group 0.13.0",
"pairing",
"rand_core",
"serde",
"subtle",
]
[[package]]
name = "bollard-stubs"
version = "1.42.0-rc.3"
@@ -1514,6 +1530,52 @@ dependencies = [
"libc",
]
[[package]]
name = "crate_crypto_internal_eth_kzg_bls12_381"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8761b04feb6031ffaf93933c955a0c91a2f3ce15dcac6b9586d2487fe55abf0b"
dependencies = [
"blst",
"blstrs",
"ff 0.13.0",
"group 0.13.0",
"pairing",
"rayon",
]
[[package]]
name = "crate_crypto_internal_eth_kzg_erasure_codes"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eca410dff79524a2babe8a0d9ab5fdce21b16808f8189eb8b6da6159681f8de2"
dependencies = [
"crate_crypto_internal_eth_kzg_bls12_381",
"crate_crypto_internal_eth_kzg_polynomial",
]
[[package]]
name = "crate_crypto_internal_eth_kzg_polynomial"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68be1a5f16bc1c09254dec5209e22278d7d395284443576886a5890e7131234f"
dependencies = [
"crate_crypto_internal_eth_kzg_bls12_381",
]
[[package]]
name = "crate_crypto_kzg_multi_open_fk20"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "702fe5b687fe8c5a46851b8bc624ad49603a339dc93c920d4f7e61592c201ee8"
dependencies = [
"crate_crypto_internal_eth_kzg_bls12_381",
"crate_crypto_internal_eth_kzg_polynomial",
"hex",
"rayon",
"sha2 0.10.8",
]
[[package]]
name = "crc32fast"
version = "1.4.2"
@@ -3001,6 +3063,7 @@ version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449"
dependencies = [
"bitvec 1.0.1",
"rand_core",
"subtle",
]
@@ -3431,7 +3494,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63"
dependencies = [
"ff 0.13.0",
"rand",
"rand_core",
"rand_xorshift",
"subtle",
]
@@ -4342,13 +4407,17 @@ version = "0.1.0"
dependencies = [
"arbitrary",
"c-kzg",
"criterion",
"derivative",
"eth2_network_config",
"ethereum_hashing",
"ethereum_serde_utils",
"ethereum_ssz",
"ethereum_ssz_derive",
"hex",
"rust_eth_kzg",
"serde",
"serde_json",
"tree_hash",
]
@@ -5897,6 +5966,15 @@ dependencies = [
"sha2 0.10.8",
]
[[package]]
name = "pairing"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81fec4625e73cf41ef4bb6846cafa6d44736525f442ba45e407c4a000a13996f"
dependencies = [
"group 0.13.0",
]
[[package]]
name = "parity-scale-codec"
version = "2.3.1"
@@ -7015,6 +7093,21 @@ dependencies = [
"smallvec",
]
[[package]]
name = "rust_eth_kzg"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "013a850c7e131a8f9651ffbb151dc33240234f21dd357b692bd5ff4cdc84bf9a"
dependencies = [
"crate_crypto_internal_eth_kzg_bls12_381",
"crate_crypto_internal_eth_kzg_erasure_codes",
"crate_crypto_kzg_multi_open_fk20",
"hex",
"rayon",
"serde",
"serde_json",
]
[[package]]
name = "rustc-demangle"
version = "0.1.24"

View File

@@ -114,6 +114,7 @@ delay_map = "0.3"
derivative = "2"
dirs = "3"
either = "1.9"
rust_eth_kzg = "0.3.4"
discv5 = { version = "0.4.1", features = ["libp2p"] }
env_logger = "0.9"
error-chain = "0.12"

View File

@@ -2,7 +2,8 @@ use crate::block_verification::{
cheap_state_advance_to_obtain_committees, get_validator_pubkey_cache, process_block_slash_info,
BlockSlashInfo,
};
use crate::{BeaconChain, BeaconChainError, BeaconChainTypes};
use crate::kzg_utils::validate_data_columns;
use crate::{metrics, BeaconChain, BeaconChainError, BeaconChainTypes};
use derivative::Derivative;
use fork_choice::ProtoBlock;
use kzg::{Error as KzgError, Kzg};
@@ -11,6 +12,7 @@ use slasher::test_utils::E;
use slog::debug;
use slot_clock::SlotClock;
use ssz_derive::{Decode, Encode};
use std::iter;
use std::sync::Arc;
use types::data_column_sidecar::{ColumnIndex, DataColumnIdentifier};
use types::{
@@ -255,9 +257,10 @@ impl<E: EthSpec> KzgVerifiedCustodyDataColumn<E> {
/// Returns an error if the kzg verification check fails.
pub fn verify_kzg_for_data_column<E: EthSpec>(
data_column: Arc<DataColumnSidecar<E>>,
_kzg: &Kzg,
kzg: &Kzg,
) -> Result<KzgVerifiedDataColumn<E>, KzgError> {
// TODO(das): KZG verification to be implemented
let _timer = metrics::start_timer(&metrics::KZG_VERIFICATION_DATA_COLUMN_SINGLE_TIMES);
validate_data_columns(kzg, iter::once(&data_column))?;
Ok(KzgVerifiedDataColumn { data: data_column })
}
@@ -267,13 +270,14 @@ pub fn verify_kzg_for_data_column<E: EthSpec>(
/// Note: This function should be preferred over calling `verify_kzg_for_data_column`
/// in a loop since this function kzg verifies a list of data columns more efficiently.
pub fn verify_kzg_for_data_column_list<'a, E: EthSpec, I>(
_data_column_iter: I,
_kzg: &'a Kzg,
data_column_iter: I,
kzg: &'a Kzg,
) -> Result<(), KzgError>
where
I: Iterator<Item = &'a Arc<DataColumnSidecar<E>>> + Clone,
{
// TODO(das): implement KZG verification
let _timer = metrics::start_timer(&metrics::KZG_VERIFICATION_DATA_COLUMN_BATCH_TIMES);
validate_data_columns(kzg, data_column_iter)?;
Ok(())
}

View File

@@ -1,5 +1,15 @@
use kzg::{Blob as KzgBlob, Error as KzgError, Kzg};
use types::{Blob, EthSpec, Hash256, KzgCommitment, KzgProof};
use kzg::{
Blob as KzgBlob, Bytes48, CellRef as KzgCellRef, CellsAndKzgProofs, Error as KzgError, Kzg,
};
use rayon::prelude::*;
use ssz_types::FixedVector;
use std::sync::Arc;
use types::beacon_block_body::KzgCommitments;
use types::data_column_sidecar::{Cell, DataColumn, DataColumnSidecarError};
use types::{
Blob, BlobsList, ChainSpec, ColumnIndex, DataColumnSidecar, DataColumnSidecarList, EthSpec,
Hash256, KzgCommitment, KzgProof, KzgProofs, SignedBeaconBlock, SignedBeaconBlockHeader,
};
/// Converts a blob ssz List object to an array to be used with the kzg
/// crypto library.
@@ -7,6 +17,15 @@ fn ssz_blob_to_crypto_blob<E: EthSpec>(blob: &Blob<E>) -> Result<KzgBlob, KzgErr
KzgBlob::from_bytes(blob.as_ref()).map_err(Into::into)
}
/// Converts a cell ssz List object to an array to be used with the kzg
/// crypto library.
fn ssz_cell_to_crypto_cell<E: EthSpec>(cell: &Cell<E>) -> Result<KzgCellRef, KzgError> {
let cell_bytes: &[u8] = cell.as_ref();
Ok(cell_bytes
.try_into()
.expect("expected cell to have size {BYTES_PER_CELL}. This should be guaranteed by the `FixedVector type"))
}
/// Validate a single blob-commitment-proof triplet from a `BlobSidecar`.
pub fn validate_blob<E: EthSpec>(
kzg: &Kzg,
@@ -19,6 +38,50 @@ pub fn validate_blob<E: EthSpec>(
kzg.verify_blob_kzg_proof(&kzg_blob, kzg_commitment, kzg_proof)
}
/// Validate a batch of `DataColumnSidecar`.
pub fn validate_data_columns<'a, E: EthSpec, I>(
kzg: &Kzg,
data_column_iter: I,
) -> Result<(), KzgError>
where
I: Iterator<Item = &'a Arc<DataColumnSidecar<E>>> + Clone,
{
let cells = data_column_iter
.clone()
.flat_map(|data_column| data_column.column.iter().map(ssz_cell_to_crypto_cell::<E>))
.collect::<Result<Vec<_>, KzgError>>()?;
let proofs = data_column_iter
.clone()
.flat_map(|data_column| {
data_column
.kzg_proofs
.iter()
.map(|&proof| Bytes48::from(proof))
})
.collect::<Vec<_>>();
let column_indices = data_column_iter
.clone()
.flat_map(|data_column| {
let col_index = data_column.index;
data_column.column.iter().map(move |_| col_index)
})
.collect::<Vec<ColumnIndex>>();
let commitments = data_column_iter
.clone()
.flat_map(|data_column| {
data_column
.kzg_commitments
.iter()
.map(|&commitment| Bytes48::from(commitment))
})
.collect::<Vec<_>>();
kzg.verify_cell_proof_batch(&cells, &proofs, column_indices, &commitments)
}
/// Validate a batch of blob-commitment-proof triplets from multiple `BlobSidecars`.
pub fn validate_blobs<E: EthSpec>(
kzg: &Kzg,
@@ -76,3 +139,264 @@ pub fn verify_kzg_proof<E: EthSpec>(
) -> Result<bool, KzgError> {
kzg.verify_kzg_proof(kzg_commitment, &z.0.into(), &y.0.into(), kzg_proof)
}
/// Build data column sidecars from a signed beacon block and its blobs.
pub fn blobs_to_data_column_sidecars<E: EthSpec>(
blobs: &BlobsList<E>,
block: &SignedBeaconBlock<E>,
kzg: &Kzg,
spec: &ChainSpec,
) -> Result<DataColumnSidecarList<E>, DataColumnSidecarError> {
if blobs.is_empty() {
return Ok(vec![]);
}
let kzg_commitments = block
.message()
.body()
.blob_kzg_commitments()
.map_err(|_err| DataColumnSidecarError::PreDeneb)?;
let kzg_commitments_inclusion_proof = block.message().body().kzg_commitments_merkle_proof()?;
let signed_block_header = block.signed_block_header();
// NOTE: assumes blob sidecars are ordered by index
let blob_cells_and_proofs_vec = blobs
.into_par_iter()
.map(|blob| {
let blob = blob
.as_ref()
.try_into()
.expect("blob should have a guaranteed size due to FixedVector");
kzg.compute_cells_and_proofs(blob)
})
.collect::<Result<Vec<_>, KzgError>>()?;
build_data_column_sidecars(
kzg_commitments.clone(),
kzg_commitments_inclusion_proof,
signed_block_header,
blob_cells_and_proofs_vec,
spec,
)
.map_err(DataColumnSidecarError::BuildSidecarFailed)
}
fn build_data_column_sidecars<E: EthSpec>(
kzg_commitments: KzgCommitments<E>,
kzg_commitments_inclusion_proof: FixedVector<Hash256, E::KzgCommitmentsInclusionProofDepth>,
signed_block_header: SignedBeaconBlockHeader,
blob_cells_and_proofs_vec: Vec<CellsAndKzgProofs>,
spec: &ChainSpec,
) -> Result<DataColumnSidecarList<E>, String> {
let number_of_columns = spec.number_of_columns;
let mut columns = vec![Vec::with_capacity(E::max_blobs_per_block()); number_of_columns];
let mut column_kzg_proofs =
vec![Vec::with_capacity(E::max_blobs_per_block()); number_of_columns];
for (blob_cells, blob_cell_proofs) in blob_cells_and_proofs_vec {
// we iterate over each column, and we construct the column from "top to bottom",
// pushing on the cell and the corresponding proof at each column index. we do this for
// each blob (i.e. the outer loop).
for col in 0..number_of_columns {
let cell = blob_cells
.get(col)
.ok_or(format!("Missing blob cell at index {col}"))?;
let cell: Vec<u8> = cell.to_vec();
let cell = Cell::<E>::from(cell);
let proof = blob_cell_proofs
.get(col)
.ok_or(format!("Missing blob cell KZG proof at index {col}"))?;
let column = columns
.get_mut(col)
.ok_or(format!("Missing data column at index {col}"))?;
let column_proofs = column_kzg_proofs
.get_mut(col)
.ok_or(format!("Missing data column proofs at index {col}"))?;
column.push(cell);
column_proofs.push(*proof);
}
}
let sidecars: Vec<Arc<DataColumnSidecar<E>>> = columns
.into_iter()
.zip(column_kzg_proofs)
.enumerate()
.map(|(index, (col, proofs))| {
Arc::new(DataColumnSidecar {
index: index as u64,
column: DataColumn::<E>::from(col),
kzg_commitments: kzg_commitments.clone(),
kzg_proofs: KzgProofs::<E>::from(proofs),
signed_block_header: signed_block_header.clone(),
kzg_commitments_inclusion_proof: kzg_commitments_inclusion_proof.clone(),
})
})
.collect();
Ok(sidecars)
}
/// Reconstruct all data columns from a subset of data column sidecars (requires at least 50%).
pub fn reconstruct_data_columns<E: EthSpec>(
kzg: &Kzg,
data_columns: &[Arc<DataColumnSidecar<E>>],
spec: &ChainSpec,
) -> Result<DataColumnSidecarList<E>, KzgError> {
let first_data_column = data_columns
.first()
.ok_or(KzgError::InconsistentArrayLength(
"data_columns should have at least one element".to_string(),
))?;
let num_of_blobs = first_data_column.kzg_commitments.len();
let blob_cells_and_proofs_vec =
(0..num_of_blobs)
.into_par_iter()
.map(|row_index| {
let mut cells: Vec<KzgCellRef> = vec![];
let mut cell_ids: Vec<u64> = vec![];
for data_column in data_columns {
let cell = data_column.column.get(row_index).ok_or(
KzgError::InconsistentArrayLength(format!(
"Missing data column at index {row_index}"
)),
)?;
cells.push(ssz_cell_to_crypto_cell::<E>(cell)?);
cell_ids.push(data_column.index);
}
kzg.recover_cells_and_compute_kzg_proofs(&cell_ids, &cells)
})
.collect::<Result<Vec<_>, KzgError>>()?;
// Clone sidecar elements from existing data column, no need to re-compute
build_data_column_sidecars(
first_data_column.kzg_commitments.clone(),
first_data_column.kzg_commitments_inclusion_proof.clone(),
first_data_column.signed_block_header.clone(),
blob_cells_and_proofs_vec,
spec,
)
.map_err(KzgError::ReconstructFailed)
}
#[cfg(test)]
mod test {
use crate::kzg_utils::{blobs_to_data_column_sidecars, reconstruct_data_columns};
use bls::Signature;
use eth2_network_config::TRUSTED_SETUP_BYTES;
use kzg::{Kzg, KzgCommitment, TrustedSetup};
use types::{
beacon_block_body::KzgCommitments, BeaconBlock, BeaconBlockDeneb, Blob, BlobsList,
ChainSpec, EmptyBlock, EthSpec, MainnetEthSpec, SignedBeaconBlock,
};
type E = MainnetEthSpec;
// Loading and initializing PeerDAS KZG is expensive and slow, so we group the tests together
// only load it once.
#[test]
fn test_build_data_columns_sidecars() {
let spec = E::default_spec();
let kzg = get_kzg();
test_build_data_columns_empty(&kzg, &spec);
test_build_data_columns(&kzg, &spec);
test_reconstruct_data_columns(&kzg, &spec);
}
#[track_caller]
fn test_build_data_columns_empty(kzg: &Kzg, spec: &ChainSpec) {
let num_of_blobs = 0;
let (signed_block, blob_sidecars) = create_test_block_and_blobs::<E>(num_of_blobs, spec);
let column_sidecars =
blobs_to_data_column_sidecars(&blob_sidecars, &signed_block, kzg, spec).unwrap();
assert!(column_sidecars.is_empty());
}
#[track_caller]
fn test_build_data_columns(kzg: &Kzg, spec: &ChainSpec) {
let num_of_blobs = 6;
let (signed_block, blob_sidecars) = create_test_block_and_blobs::<E>(num_of_blobs, spec);
let column_sidecars =
blobs_to_data_column_sidecars(&blob_sidecars, &signed_block, kzg, spec).unwrap();
let block_kzg_commitments = signed_block
.message()
.body()
.blob_kzg_commitments()
.unwrap()
.clone();
let block_kzg_commitments_inclusion_proof = signed_block
.message()
.body()
.kzg_commitments_merkle_proof()
.unwrap();
assert_eq!(column_sidecars.len(), spec.number_of_columns);
for (idx, col_sidecar) in column_sidecars.iter().enumerate() {
assert_eq!(col_sidecar.index, idx as u64);
assert_eq!(col_sidecar.kzg_commitments.len(), num_of_blobs);
assert_eq!(col_sidecar.column.len(), num_of_blobs);
assert_eq!(col_sidecar.kzg_proofs.len(), num_of_blobs);
assert_eq!(col_sidecar.kzg_commitments, block_kzg_commitments);
assert_eq!(
col_sidecar.kzg_commitments_inclusion_proof,
block_kzg_commitments_inclusion_proof
);
assert!(col_sidecar.verify_inclusion_proof());
}
}
#[track_caller]
fn test_reconstruct_data_columns(kzg: &Kzg, spec: &ChainSpec) {
let num_of_blobs = 6;
let (signed_block, blob_sidecars) = create_test_block_and_blobs::<E>(num_of_blobs, spec);
let column_sidecars =
blobs_to_data_column_sidecars(&blob_sidecars, &signed_block, kzg, spec).unwrap();
// Now reconstruct
let reconstructed_columns = reconstruct_data_columns(
kzg,
&column_sidecars.iter().as_slice()[0..column_sidecars.len() / 2],
spec,
)
.unwrap();
for i in 0..spec.number_of_columns {
assert_eq!(reconstructed_columns.get(i), column_sidecars.get(i), "{i}");
}
}
fn get_kzg() -> Kzg {
let trusted_setup: TrustedSetup = serde_json::from_reader(TRUSTED_SETUP_BYTES)
.map_err(|e| format!("Unable to read trusted setup file: {}", e))
.expect("should have trusted setup");
Kzg::new_from_trusted_setup_das_enabled(trusted_setup).expect("should create kzg")
}
fn create_test_block_and_blobs<E: EthSpec>(
num_of_blobs: usize,
spec: &ChainSpec,
) -> (SignedBeaconBlock<E>, BlobsList<E>) {
let mut block = BeaconBlock::Deneb(BeaconBlockDeneb::empty(spec));
let mut body = block.body_mut();
let blob_kzg_commitments = body.blob_kzg_commitments_mut().unwrap();
*blob_kzg_commitments =
KzgCommitments::<E>::new(vec![KzgCommitment::empty_for_testing(); num_of_blobs])
.unwrap();
let signed_block = SignedBeaconBlock::from_block(block, Signature::empty());
let blobs = (0..num_of_blobs)
.map(|_| Blob::<E>::default())
.collect::<Vec<_>>()
.into();
(signed_block, blobs)
}
}

View File

@@ -1645,6 +1645,13 @@ pub static BLOB_SIDECAR_INCLUSION_PROOF_COMPUTATION: LazyLock<Result<Histogram>>
"Time taken to compute blob sidecar inclusion proof",
)
});
pub static DATA_COLUMN_SIDECAR_COMPUTATION: LazyLock<Result<Histogram>> = LazyLock::new(|| {
try_create_histogram_with_buckets(
"data_column_sidecar_computation_seconds",
"Time taken to compute data column sidecar, including cells, proofs and inclusion proof",
Ok(vec![0.04, 0.05, 0.1, 0.2, 0.3, 0.5, 0.7, 1.0]),
)
});
pub static DATA_COLUMN_SIDECAR_PROCESSING_REQUESTS: LazyLock<Result<IntCounter>> =
LazyLock::new(|| {
try_create_int_counter(
@@ -1785,6 +1792,26 @@ pub static KZG_VERIFICATION_BATCH_TIMES: LazyLock<Result<Histogram>> = LazyLock:
"Runtime of batched kzg verification",
)
});
pub static KZG_VERIFICATION_DATA_COLUMN_SINGLE_TIMES: LazyLock<Result<Histogram>> =
LazyLock::new(|| {
try_create_histogram_with_buckets(
"kzg_verification_data_column_single_seconds",
"Runtime of single data column kzg verification",
Ok(vec![
0.0005, 0.001, 0.0015, 0.002, 0.003, 0.004, 0.005, 0.007, 0.01, 0.02, 0.05,
]),
)
});
pub static KZG_VERIFICATION_DATA_COLUMN_BATCH_TIMES: LazyLock<Result<Histogram>> =
LazyLock::new(|| {
try_create_histogram_with_buckets(
"kzg_verification_data_column_batch_seconds",
"Runtime of batched data column kzg verification",
Ok(vec![
0.002, 0.004, 0.006, 0.008, 0.01, 0.012, 0.015, 0.02, 0.03, 0.05, 0.07,
]),
)
});
pub static BLOCK_PRODUCTION_BLOBS_VERIFICATION_TIMES: LazyLock<Result<Histogram>> = LazyLock::new(
|| {

View File

@@ -10,7 +10,6 @@ use beacon_chain::graffiti_calculator::start_engine_version_cache_refresh_servic
use beacon_chain::otb_verification_service::start_otb_verification_service;
use beacon_chain::proposer_prep_service::start_proposer_prep_service;
use beacon_chain::schema_change::migrate_schema;
use beacon_chain::LightClientProducerEvent;
use beacon_chain::{
builder::{BeaconChainBuilder, Witness},
eth1_chain::{CachingEth1Backend, Eth1Chain},
@@ -19,6 +18,7 @@ use beacon_chain::{
store::{HotColdDB, ItemStore, LevelDB, StoreConfig},
BeaconChain, BeaconChainTypes, Eth1ChainBackend, MigratorConfig, ServerSentEventHandler,
};
use beacon_chain::{Kzg, LightClientProducerEvent};
use beacon_processor::{BeaconProcessor, BeaconProcessorChannels};
use beacon_processor::{BeaconProcessorConfig, BeaconProcessorQueueLengths};
use environment::RuntimeContext;
@@ -505,7 +505,7 @@ where
deposit_snapshot.and_then(|snapshot| match Eth1Service::from_deposit_snapshot(
config.eth1,
context.log().clone(),
spec,
spec.clone(),
&snapshot,
) {
Ok(service) => {
@@ -624,12 +624,15 @@ where
};
let beacon_chain_builder = if let Some(trusted_setup) = config.trusted_setup {
let kzg = trusted_setup
.try_into()
.map(Arc::new)
.map(Some)
.map_err(|e| format!("Failed to load trusted setup: {:?}", e))?;
beacon_chain_builder.kzg(kzg)
let kzg_err_msg = |e| format!("Failed to load trusted setup: {:?}", e);
let kzg = if spec.is_peer_das_scheduled() {
Kzg::new_from_trusted_setup_das_enabled(trusted_setup).map_err(kzg_err_msg)?
} else {
Kzg::new_from_trusted_setup(trusted_setup).map_err(kzg_err_msg)?
};
beacon_chain_builder.kzg(Some(Arc::new(kzg)))
} else {
beacon_chain_builder
};

View File

@@ -1,17 +1,12 @@
use crate::beacon_block_body::{KzgCommitments, BLOB_KZG_COMMITMENTS_INDEX};
use crate::test_utils::TestRandom;
use crate::{
BeaconBlockHeader, ChainSpec, EthSpec, Hash256, KzgProofs, SignedBeaconBlock,
SignedBeaconBlockHeader, Slot,
};
use crate::{BeaconStateError, BlobsList};
use crate::BeaconStateError;
use crate::{BeaconBlockHeader, EthSpec, Hash256, KzgProofs, SignedBeaconBlockHeader, Slot};
use bls::Signature;
use derivative::Derivative;
use kzg::Kzg;
use kzg::{Blob as KzgBlob, Cell as KzgCell, Error as KzgError};
use kzg::Error as KzgError;
use kzg::{KzgCommitment, KzgProof};
use merkle_proof::verify_merkle_proof;
use rayon::prelude::*;
use safe_arith::ArithError;
use serde::{Deserialize, Serialize};
use ssz::Encode;
@@ -60,7 +55,7 @@ pub struct DataColumnSidecar<E: EthSpec> {
pub index: ColumnIndex,
#[serde(with = "ssz_types::serde_utils::list_of_hex_fixed_vec")]
pub column: DataColumn<E>,
/// All of the KZG commitments and proofs associated with the block, used for verifying sample cells.
/// All the KZG commitments and proofs associated with the block, used for verifying sample cells.
pub kzg_commitments: KzgCommitments<E>,
pub kzg_proofs: KzgProofs<E>,
pub signed_block_header: SignedBeaconBlockHeader,
@@ -98,197 +93,6 @@ impl<E: EthSpec> DataColumnSidecar<E> {
)
}
pub fn build_sidecars(
blobs: &BlobsList<E>,
block: &SignedBeaconBlock<E>,
kzg: &Kzg,
spec: &ChainSpec,
) -> Result<DataColumnSidecarList<E>, DataColumnSidecarError> {
let number_of_columns = spec.number_of_columns;
if blobs.is_empty() {
return Ok(vec![]);
}
let kzg_commitments = block
.message()
.body()
.blob_kzg_commitments()
.map_err(|_err| DataColumnSidecarError::PreDeneb)?;
let kzg_commitments_inclusion_proof =
block.message().body().kzg_commitments_merkle_proof()?;
let signed_block_header = block.signed_block_header();
let mut columns = vec![Vec::with_capacity(E::max_blobs_per_block()); number_of_columns];
let mut column_kzg_proofs =
vec![Vec::with_capacity(E::max_blobs_per_block()); number_of_columns];
// NOTE: assumes blob sidecars are ordered by index
let blob_cells_and_proofs_vec = blobs
.into_par_iter()
.map(|blob| {
let blob = KzgBlob::from_bytes(blob).map_err(KzgError::from)?;
kzg.compute_cells_and_proofs(&blob)
})
.collect::<Result<Vec<_>, KzgError>>()?;
for (blob_cells, blob_cell_proofs) in blob_cells_and_proofs_vec {
// we iterate over each column, and we construct the column from "top to bottom",
// pushing on the cell and the corresponding proof at each column index. we do this for
// each blob (i.e. the outer loop).
for col in 0..number_of_columns {
let cell =
blob_cells
.get(col)
.ok_or(DataColumnSidecarError::InconsistentArrayLength(format!(
"Missing blob cell at index {col}"
)))?;
let cell: Vec<u8> = cell.into_inner().into_iter().collect();
let cell = Cell::<E>::from(cell);
let proof = blob_cell_proofs.get(col).ok_or(
DataColumnSidecarError::InconsistentArrayLength(format!(
"Missing blob cell KZG proof at index {col}"
)),
)?;
let column =
columns
.get_mut(col)
.ok_or(DataColumnSidecarError::InconsistentArrayLength(format!(
"Missing data column at index {col}"
)))?;
let column_proofs = column_kzg_proofs.get_mut(col).ok_or(
DataColumnSidecarError::InconsistentArrayLength(format!(
"Missing data column proofs at index {col}"
)),
)?;
column.push(cell);
column_proofs.push(*proof);
}
}
let sidecars: Vec<Arc<DataColumnSidecar<E>>> = columns
.into_iter()
.zip(column_kzg_proofs)
.enumerate()
.map(|(index, (col, proofs))| {
Arc::new(DataColumnSidecar {
index: index as u64,
column: DataColumn::<E>::from(col),
kzg_commitments: kzg_commitments.clone(),
kzg_proofs: KzgProofs::<E>::from(proofs),
signed_block_header: signed_block_header.clone(),
kzg_commitments_inclusion_proof: kzg_commitments_inclusion_proof.clone(),
})
})
.collect();
Ok(sidecars)
}
pub fn reconstruct(
kzg: &Kzg,
data_columns: &[Arc<Self>],
spec: &ChainSpec,
) -> Result<Vec<Arc<Self>>, KzgError> {
let number_of_columns = spec.number_of_columns;
let mut columns = vec![Vec::with_capacity(E::max_blobs_per_block()); number_of_columns];
let mut column_kzg_proofs =
vec![Vec::with_capacity(E::max_blobs_per_block()); number_of_columns];
let first_data_column = data_columns
.first()
.ok_or(KzgError::InconsistentArrayLength(
"data_columns should have at least one element".to_string(),
))?;
let num_of_blobs = first_data_column.kzg_commitments.len();
let blob_cells_and_proofs_vec = (0..num_of_blobs)
.into_par_iter()
.map(|row_index| {
let mut cells: Vec<KzgCell> = vec![];
let mut cell_ids: Vec<u64> = vec![];
for data_column in data_columns {
let cell = data_column.column.get(row_index).ok_or(
KzgError::InconsistentArrayLength(format!(
"Missing data column at index {row_index}"
)),
)?;
cells.push(ssz_cell_to_crypto_cell::<E>(cell)?);
cell_ids.push(data_column.index);
}
// recover_all_cells does not expect sorted
let all_cells = kzg.recover_all_cells(&cell_ids, &cells)?;
let blob = kzg.cells_to_blob(&all_cells)?;
// Note: This function computes all cells and proofs. According to Justin this is okay,
// computing a partial set may be more expensive and requires code paths that don't exist.
// Computing the blobs cells is technically unnecessary but very cheap. It's done here again
// for simplicity.
kzg.compute_cells_and_proofs(&blob)
})
.collect::<Result<Vec<_>, KzgError>>()?;
for (blob_cells, blob_cell_proofs) in blob_cells_and_proofs_vec {
// we iterate over each column, and we construct the column from "top to bottom",
// pushing on the cell and the corresponding proof at each column index. we do this for
// each blob (i.e. the outer loop).
for col in 0..number_of_columns {
let cell = blob_cells
.get(col)
.ok_or(KzgError::InconsistentArrayLength(format!(
"Missing blob cell at index {col}"
)))?;
let cell: Vec<u8> = cell.into_inner().into_iter().collect();
let cell = Cell::<E>::from(cell);
let proof = blob_cell_proofs
.get(col)
.ok_or(KzgError::InconsistentArrayLength(format!(
"Missing blob cell KZG proof at index {col}"
)))?;
let column = columns
.get_mut(col)
.ok_or(KzgError::InconsistentArrayLength(format!(
"Missing data column at index {col}"
)))?;
let column_proofs =
column_kzg_proofs
.get_mut(col)
.ok_or(KzgError::InconsistentArrayLength(format!(
"Missing data column proofs at index {col}"
)))?;
column.push(cell);
column_proofs.push(*proof);
}
}
// Clone sidecar elements from existing data column, no need to re-compute
let kzg_commitments = &first_data_column.kzg_commitments;
let signed_block_header = &first_data_column.signed_block_header;
let kzg_commitments_inclusion_proof = &first_data_column.kzg_commitments_inclusion_proof;
let sidecars: Vec<Arc<DataColumnSidecar<E>>> = columns
.into_iter()
.zip(column_kzg_proofs)
.enumerate()
.map(|(index, (col, proofs))| {
Arc::new(DataColumnSidecar {
index: index as u64,
column: DataColumn::<E>::from(col),
kzg_commitments: kzg_commitments.clone(),
kzg_proofs: KzgProofs::<E>::from(proofs),
signed_block_header: signed_block_header.clone(),
kzg_commitments_inclusion_proof: kzg_commitments_inclusion_proof.clone(),
})
})
.collect();
Ok(sidecars)
}
pub fn min_size() -> usize {
// min size is one cell
Self {
@@ -360,7 +164,7 @@ pub enum DataColumnSidecarError {
MissingBlobSidecars,
PreDeneb,
SszError(SszError),
InconsistentArrayLength(String),
BuildSidecarFailed(String),
}
impl From<ArithError> for DataColumnSidecarError {
@@ -386,9 +190,3 @@ impl From<SszError> for DataColumnSidecarError {
Self::SszError(e)
}
}
/// Converts a cell ssz List object to an array to be used with the kzg
/// crypto library.
fn ssz_cell_to_crypto_cell<E: EthSpec>(cell: &Cell<E>) -> Result<KzgCell, KzgError> {
KzgCell::from_bytes(cell.as_ref()).map_err(Into::into)
}

View File

@@ -17,3 +17,13 @@ ethereum_serde_utils = { workspace = true }
hex = { workspace = true }
ethereum_hashing = { workspace = true }
c-kzg = { workspace = true }
rust_eth_kzg = { workspace = true }
[dev-dependencies]
criterion = { workspace = true }
serde_json = { workspace = true }
eth2_network_config = { workspace = true }
[[bench]]
name = "benchmark"
harness = false

View File

@@ -0,0 +1,31 @@
use c_kzg::KzgSettings;
use criterion::{criterion_group, criterion_main, Criterion};
use eth2_network_config::TRUSTED_SETUP_BYTES;
use kzg::TrustedSetup;
use rust_eth_kzg::{DASContext, TrustedSetup as PeerDASTrustedSetup};
pub fn bench_init_context(c: &mut Criterion) {
let trusted_setup: TrustedSetup = serde_json::from_reader(TRUSTED_SETUP_BYTES)
.map_err(|e| format!("Unable to read trusted setup file: {}", e))
.expect("should have trusted setup");
c.bench_function(&format!("Initialize context rust_eth_kzg"), |b| {
b.iter(|| {
const NUM_THREADS: usize = 1;
let trusted_setup = PeerDASTrustedSetup::from(&trusted_setup);
DASContext::with_threads(&trusted_setup, NUM_THREADS)
})
});
c.bench_function(&format!("Initialize context c-kzg (4844)"), |b| {
b.iter(|| {
let trusted_setup: TrustedSetup = serde_json::from_reader(TRUSTED_SETUP_BYTES)
.map_err(|e| format!("Unable to read trusted setup file: {}", e))
.expect("should have trusted setup");
KzgSettings::load_trusted_setup(&trusted_setup.g1_points(), &trusted_setup.g2_points())
.unwrap()
})
});
}
criterion_group!(benches, bench_init_context);
criterion_main!(benches);

View File

@@ -2,6 +2,7 @@ mod kzg_commitment;
mod kzg_proof;
mod trusted_setup;
use rust_eth_kzg::{CellIndex, DASContext};
use std::fmt::Debug;
pub use crate::{
@@ -9,18 +10,35 @@ pub use crate::{
kzg_proof::KzgProof,
trusted_setup::TrustedSetup,
};
pub use c_kzg::{
Blob, Bytes32, Bytes48, KzgSettings, BYTES_PER_BLOB, BYTES_PER_COMMITMENT,
BYTES_PER_FIELD_ELEMENT, BYTES_PER_PROOF, FIELD_ELEMENTS_PER_BLOB,
};
pub use rust_eth_kzg::{
constants::{BYTES_PER_CELL, CELLS_PER_EXT_BLOB},
Cell, CellIndex as CellID, CellRef, TrustedSetup as PeerDASTrustedSetup,
};
pub type CellsAndKzgProofs = ([Cell; CELLS_PER_EXT_BLOB], [KzgProof; CELLS_PER_EXT_BLOB]);
pub type KzgBlobRef<'a> = &'a [u8; BYTES_PER_BLOB];
#[derive(Debug)]
pub enum Error {
/// An error from the underlying kzg library.
Kzg(c_kzg::Error),
/// A prover/verifier error from the rust-eth-kzg library.
PeerDASKZG(rust_eth_kzg::Error),
/// The kzg verification failed
KzgVerificationFailed,
/// Misc indexing error
InconsistentArrayLength(String),
/// Error reconstructing data columns.
ReconstructFailed(String),
/// Kzg was not initialized with PeerDAS enabled.
DASContextUninitialized,
}
impl From<c_kzg::Error> for Error {
@@ -29,32 +47,11 @@ impl From<c_kzg::Error> for Error {
}
}
pub const CELLS_PER_EXT_BLOB: usize = 128;
// TODO(das): use proper crypto once ckzg merges das branch
#[allow(dead_code)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Cell {
bytes: [u8; 2048usize],
}
impl Cell {
pub fn from_bytes(b: &[u8]) -> Result<Self, Error> {
Ok(Self {
bytes: b
.try_into()
.map_err(|_| Error::Kzg(c_kzg::Error::MismatchLength("".to_owned())))?,
})
}
pub fn into_inner(self) -> [u8; 2048usize] {
self.bytes
}
}
/// A wrapper over a kzg library that holds the trusted setup parameters.
#[derive(Debug)]
pub struct Kzg {
trusted_setup: KzgSettings,
context: Option<DASContext>,
}
impl Kzg {
@@ -65,9 +62,36 @@ impl Kzg {
&trusted_setup.g1_points(),
&trusted_setup.g2_points(),
)?,
context: None,
})
}
pub fn new_from_trusted_setup_das_enabled(trusted_setup: TrustedSetup) -> Result<Self, Error> {
// Initialize the trusted setup using default parameters
//
// Note: One can also use `from_json` to initialize it from the consensus-specs
// json string.
let peerdas_trusted_setup = PeerDASTrustedSetup::from(&trusted_setup);
// Set the number of threads to be used
//
// we set it to 1 to match the c-kzg performance
const NUM_THREADS: usize = 1;
let context = DASContext::with_threads(&peerdas_trusted_setup, NUM_THREADS);
Ok(Self {
trusted_setup: KzgSettings::load_trusted_setup(
&trusted_setup.g1_points(),
&trusted_setup.g2_points(),
)?,
context: Some(context),
})
}
fn context(&self) -> Result<&DASContext, Error> {
self.context.as_ref().ok_or(Error::DASContextUninitialized)
}
/// Compute the kzg proof given a blob and its kzg commitment.
pub fn compute_blob_kzg_proof(
&self,
@@ -167,21 +191,18 @@ impl Kzg {
}
/// Computes the cells and associated proofs for a given `blob` at index `index`.
#[allow(clippy::type_complexity)]
pub fn compute_cells_and_proofs(
&self,
_blob: &Blob,
) -> Result<
(
Box<[Cell; CELLS_PER_EXT_BLOB]>,
Box<[KzgProof; CELLS_PER_EXT_BLOB]>,
),
Error,
> {
// TODO(das): use proper crypto once ckzg merges das branch
let cells = Box::new(core::array::from_fn(|_| Cell { bytes: [0u8; 2048] }));
let proofs = Box::new([KzgProof([0u8; BYTES_PER_PROOF]); CELLS_PER_EXT_BLOB]);
Ok((cells, proofs))
blob: KzgBlobRef<'_>,
) -> Result<CellsAndKzgProofs, Error> {
let (cells, proofs) = self
.context()?
.compute_cells_and_kzg_proofs(blob)
.map_err(Error::PeerDASKZG)?;
// Convert the proof type to a c-kzg proof type
let c_kzg_proof = proofs.map(KzgProof);
Ok((cells, c_kzg_proof))
}
/// Verifies a batch of cell-proof-commitment triplets.
@@ -191,35 +212,43 @@ impl Kzg {
/// to the data column index.
pub fn verify_cell_proof_batch(
&self,
_cells: &[Cell],
_kzg_proofs: &[Bytes48],
_coordinates: &[(u64, u64)],
_kzg_commitments: &[Bytes48],
cells: &[CellRef<'_>],
kzg_proofs: &[Bytes48],
columns: Vec<CellIndex>,
kzg_commitments: &[Bytes48],
) -> Result<(), Error> {
// TODO(das): use proper crypto once ckzg merges das branch
Ok(())
let proofs: Vec<_> = kzg_proofs.iter().map(|proof| proof.as_ref()).collect();
let commitments: Vec<_> = kzg_commitments
.iter()
.map(|commitment| commitment.as_ref())
.collect();
let verification_result = self.context()?.verify_cell_kzg_proof_batch(
commitments.to_vec(),
columns,
cells.to_vec(),
proofs.to_vec(),
);
// Modify the result so it matches roughly what the previous method was doing.
match verification_result {
Ok(_) => Ok(()),
Err(e) if e.invalid_proof() => Err(Error::KzgVerificationFailed),
Err(e) => Err(Error::PeerDASKZG(e)),
}
}
pub fn cells_to_blob(&self, _cells: &[Cell; CELLS_PER_EXT_BLOB]) -> Result<Blob, Error> {
// TODO(das): use proper crypto once ckzg merges das branch
Ok(Blob::new([0u8; 131072usize]))
}
pub fn recover_all_cells(
pub fn recover_cells_and_compute_kzg_proofs(
&self,
_cell_ids: &[u64],
_cells: &[Cell],
) -> Result<Box<[Cell; CELLS_PER_EXT_BLOB]>, Error> {
// TODO(das): use proper crypto once ckzg merges das branch
let cells = Box::new(core::array::from_fn(|_| Cell { bytes: [0u8; 2048] }));
Ok(cells)
}
}
impl TryFrom<TrustedSetup> for Kzg {
type Error = Error;
fn try_from(trusted_setup: TrustedSetup) -> Result<Self, Self::Error> {
Kzg::new_from_trusted_setup(trusted_setup)
cell_ids: &[u64],
cells: &[CellRef<'_>],
) -> Result<CellsAndKzgProofs, Error> {
let (cells, proofs) = self
.context()?
.recover_cells_and_proofs(cell_ids.to_vec(), cells.to_vec())
.map_err(Error::PeerDASKZG)?;
// Convert the proof type to a c-kzg proof type
let c_kzg_proof = proofs.map(KzgProof);
Ok((cells, c_kzg_proof))
}
}

View File

@@ -1,3 +1,4 @@
use crate::PeerDASTrustedSetup;
use c_kzg::{BYTES_PER_G1_POINT, BYTES_PER_G2_POINT};
use serde::{
de::{self, Deserializer, Visitor},
@@ -43,6 +44,28 @@ impl TrustedSetup {
}
}
impl From<&TrustedSetup> for PeerDASTrustedSetup {
fn from(trusted_setup: &TrustedSetup) -> Self {
Self {
g1_monomial: trusted_setup
.g1_monomial_points
.iter()
.map(|g1_point| format!("0x{}", hex::encode(g1_point.0)))
.collect::<Vec<_>>(),
g1_lagrange: trusted_setup
.g1_points
.iter()
.map(|g1_point| format!("0x{}", hex::encode(g1_point.0)))
.collect::<Vec<_>>(),
g2_monomial: trusted_setup
.g2_points
.iter()
.map(|g2_point| format!("0x{}", hex::encode(g2_point.0)))
.collect::<Vec<_>>(),
}
}
}
impl Serialize for G1Point {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where