Migrate from ethereum-types to alloy-primitives (#6078)

* Remove use of ethers_core::RlpStream

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core

* Remove old code

* Simplify keccak call

* Remove unused package

* Merge branch 'unstable' of https://github.com/ethDreamer/lighthouse into remove_use_of_ethers_core

* Merge branch 'unstable' into remove_use_of_ethers_core

* Run clippy

* Merge branch 'remove_use_of_ethers_core' of https://github.com/dospore/lighthouse into remove_use_of_ethers_core

* Check all cargo fmt

* migrate to alloy primitives init

* fix deps

* integrate alloy-primitives

* resolve dep issues

* more changes based on dep changes

* add TODOs

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core

* Revert lock

* Add BeaconBlocksByRange v3

* continue migration

* Revert "Add BeaconBlocksByRange v3"

This reverts commit e3ce7fc5ea.

* impl hash256 extended trait

* revert some uneeded diffs

* merge conflict resolved

* fix subnet id rshift calc

* rename to FixedBytesExtended

* debugging

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* fix failed test

* fixing more tests

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into remove_use_of_ethers_core

* introduce a shim to convert between the two u256 types

* move alloy to wrokspace

* align alloy versions

* update

* update web3signer test certs

* refactor

* resolve failing tests

* linting

* fix graffiti string test

* fmt

* fix ef test

* resolve merge conflicts

* remove udep and revert cert

* cargo patch

* cyclic dep

* fix build error

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* resolve conflicts, update deps

* merge unstable

* fmt

* fix deps

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* resolve merge conflicts

* resolve conflicts, make necessary changes

* Remove patch

* fmt

* remove file

* merge conflicts

* sneaking in a smol change

* bump versions

* Merge remote-tracking branch 'origin/unstable' into migrate-to-alloy-primitives

* Updates for peerDAS

* Update ethereum_hashing to prevent dupe

* updated alloy-consensus, removed TODOs

* cargo update

* endianess fix

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* fmt

* fix merge

* fix test

* fixed_bytes crate

* minor fixes

* convert u256 to i64

* panic free mixin to_low_u64_le

* from_str_radix

* computbe_subnet api and ensuring we use big-endian

* Merge branch 'unstable' of https://github.com/sigp/lighthouse into migrate-to-alloy-primitives

* fix test

* Simplify subnet_id test

* Simplify some more tests

* Add tests to fixed_bytes crate

* Merge branch 'unstable' into migrate-to-alloy-primitives
This commit is contained in:
Eitan Seri-Levi
2024-09-02 01:03:24 -07:00
committed by GitHub
parent 002ca2cdeb
commit 99e53b88c3
152 changed files with 1050 additions and 718 deletions

View File

@@ -0,0 +1,11 @@
[package]
name = "fixed_bytes"
version = "0.1.0"
authors = ["Eitan Seri-Levi <eitan@sigmaprime.io>"]
edition = { workspace = true }
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
alloy-primitives = { workspace = true }
safe_arith = { workspace = true }

View File

@@ -0,0 +1,160 @@
use alloy_primitives::FixedBytes;
use safe_arith::SafeArith;
pub type Hash64 = alloy_primitives::B64;
pub type Hash256 = alloy_primitives::B256;
pub type Uint256 = alloy_primitives::U256;
pub type Address = alloy_primitives::Address;
pub trait UintExtended {
fn to_i64(self) -> i64;
}
pub trait FixedBytesExtended {
fn from_low_u64_be(value: u64) -> Self;
fn from_low_u64_le(value: u64) -> Self;
fn to_low_u64_le(&self) -> u64;
fn zero() -> Self;
}
impl<const N: usize> FixedBytesExtended for FixedBytes<N> {
fn from_low_u64_be(value: u64) -> Self {
let value_bytes = value.to_be_bytes();
let mut buffer = [0x0; N];
let bytes_to_copy = value_bytes.len().min(buffer.len());
// Panic-free because bytes_to_copy <= buffer.len()
let start_index = buffer
.len()
.safe_sub(bytes_to_copy)
.expect("bytes_to_copy <= buffer.len()");
// Panic-free because start_index <= buffer.len()
// and bytes_to_copy <= value_bytes.len()
buffer
.get_mut(start_index..)
.expect("start_index <= buffer.len()")
.copy_from_slice(
value_bytes
.get(..bytes_to_copy)
.expect("bytes_to_copy <= value_byte.len()"),
);
Self::from(buffer)
}
fn from_low_u64_le(value: u64) -> Self {
let value_bytes = value.to_le_bytes();
let mut buffer = [0x0; N];
let bytes_to_copy = value_bytes.len().min(buffer.len());
// Panic-free because bytes_to_copy <= buffer.len(),
// and bytes_to_copy <= value_bytes.len()
buffer
.get_mut(..bytes_to_copy)
.expect("bytes_to_copy <= buffer.len()")
.copy_from_slice(
value_bytes
.get(..bytes_to_copy)
.expect("bytes_to_copy <= value_byte.len()"),
);
Self::from(buffer)
}
fn zero() -> Self {
Self::ZERO
}
/// Trims FixedBytes<N> to its first 8 bytes and converts to u64
fn to_low_u64_le(&self) -> u64 {
let mut result = [0u8; 8];
let bytes = self.as_slice();
// Panic-free because result.len() == bytes[0..8].len()
result.copy_from_slice(&bytes[0..8]);
u64::from_le_bytes(result)
}
}
impl FixedBytesExtended for alloy_primitives::Address {
fn from_low_u64_be(value: u64) -> Self {
FixedBytes::<20>::from_low_u64_be(value).into()
}
fn from_low_u64_le(value: u64) -> Self {
FixedBytes::<20>::from_low_u64_le(value).into()
}
fn zero() -> Self {
FixedBytes::<20>::zero().into()
}
fn to_low_u64_le(&self) -> u64 {
FixedBytes::<20>::to_low_u64_le(self)
}
}
impl UintExtended for Uint256 {
/// Trims the Uint256 to its first 8 bytes and converts to i64
fn to_i64(self) -> i64 {
let mut result = [0u8; 8];
let bytes = self.to_le_bytes::<32>();
// Panic-free because result.len() == bytes[0..8].len()
result.copy_from_slice(&bytes[0..8]);
i64::from_le_bytes(result)
}
}
#[cfg(test)]
mod test {
use super::*;
use alloy_primitives::bytes::Buf;
#[test]
fn from_low_u64_be() {
let values = [0, 1, 0xff, 1 << 16, u64::MAX, u64::MAX - 1];
for value in values {
assert_eq!(
(&Hash256::from_low_u64_be(value).as_slice()[24..]).get_u64(),
value
);
}
}
#[test]
fn from_low_u64_le() {
let values = [0, 1, 0xff, 1 << 16, u64::MAX, u64::MAX - 1];
for value in values {
assert_eq!(
u64::from_le_bytes(
Hash256::from_low_u64_le(value).as_slice()[0..8]
.try_into()
.unwrap()
),
value
);
}
}
#[test]
fn to_low_u64_le() {
let values = [0, 1, 0xff, 1 << 16, u64::MAX, u64::MAX - 1];
for value in values {
assert_eq!(Hash256::from_low_u64_le(value).to_low_u64_le(), value);
}
}
#[test]
fn to_i64_in_range() {
let values = [0, 1, 0xff, 1 << 16, i64::MAX, i64::MAX - 1];
for value in values {
assert_eq!(Uint256::from(value).to_i64(), value);
}
}
#[test]
fn to_i64_out_of_range() {
let values = [u128::MAX, 1 << 70, 1 << 80, i64::MAX as u128 + 1];
for value in values {
assert_eq!(
Uint256::from(value).to_i64(),
i64::from_le_bytes(value.to_le_bytes()[0..8].try_into().unwrap())
);
}
}
}

View File

@@ -16,8 +16,8 @@ use std::time::Duration;
use types::{
consts::bellatrix::INTERVALS_PER_SLOT, AbstractExecPayload, AttestationShufflingId,
AttesterSlashingRef, BeaconBlockRef, BeaconState, BeaconStateError, ChainSpec, Checkpoint,
Epoch, EthSpec, ExecPayload, ExecutionBlockHash, Hash256, IndexedAttestationRef, RelativeEpoch,
SignedBeaconBlock, Slot,
Epoch, EthSpec, ExecPayload, ExecutionBlockHash, FixedBytesExtended, Hash256,
IndexedAttestationRef, RelativeEpoch, SignedBeaconBlock, Slot,
};
#[derive(Debug)]

View File

@@ -16,8 +16,8 @@ use std::time::Duration;
use store::MemoryStore;
use types::{
test_utils::generate_deterministic_keypair, BeaconBlockRef, BeaconState, ChainSpec, Checkpoint,
Epoch, EthSpec, ForkName, Hash256, IndexedAttestation, MainnetEthSpec, RelativeEpoch,
SignedBeaconBlock, Slot, SubnetId,
Epoch, EthSpec, FixedBytesExtended, ForkName, Hash256, IndexedAttestation, MainnetEthSpec,
RelativeEpoch, SignedBeaconBlock, Slot, SubnetId,
};
pub type E = MainnetEthSpec;

View File

@@ -5,13 +5,14 @@ authors = ["Michael Sproul <michael@sigmaprime.io>"]
edition = { workspace = true }
[dependencies]
ethereum-types = { workspace = true }
alloy-primitives = { workspace = true }
ethereum_hashing = { workspace = true }
safe_arith = { workspace = true }
fixed_bytes = { workspace = true }
[dev-dependencies]
quickcheck = { workspace = true }
quickcheck_macros = { workspace = true }
[features]
arbitrary = ["ethereum-types/arbitrary"]
arbitrary = ["alloy-primitives/arbitrary"]

View File

@@ -1,8 +1,10 @@
use ethereum_hashing::{hash, hash32_concat, ZERO_HASHES};
use ethereum_types::H256;
use safe_arith::ArithError;
use std::sync::LazyLock;
type H256 = fixed_bytes::Hash256;
pub use fixed_bytes::FixedBytesExtended;
const MAX_TREE_DEPTH: usize = 32;
const EMPTY_SLICE: &[H256] = &[];
@@ -86,8 +88,8 @@ impl MerkleTree {
let left_subtree = MerkleTree::create(left_leaves, depth - 1);
let right_subtree = MerkleTree::create(right_leaves, depth - 1);
let hash = H256::from_slice(&hash32_concat(
left_subtree.hash().as_bytes(),
right_subtree.hash().as_bytes(),
left_subtree.hash().as_slice(),
right_subtree.hash().as_slice(),
));
Node(hash, Box::new(left_subtree), Box::new(right_subtree))
@@ -143,9 +145,9 @@ impl MerkleTree {
// All other possibilities are invalid MerkleTrees
(_, _) => return Err(MerkleTreeError::Invalid),
};
hash.assign_from_slice(&hash32_concat(
left.hash().as_bytes(),
right.hash().as_bytes(),
hash.copy_from_slice(&hash32_concat(
left.hash().as_slice(),
right.hash().as_slice(),
));
}
Finalized(_) => return Err(MerkleTreeError::FinalizedNodePushed),
@@ -274,8 +276,8 @@ impl MerkleTree {
};
let hash = H256::from_slice(&hash32_concat(
left.hash().as_bytes(),
right.hash().as_bytes(),
left.hash().as_slice(),
right.hash().as_slice(),
));
Ok(MerkleTree::Node(hash, Box::new(left), Box::new(right)))
}
@@ -369,15 +371,15 @@ pub fn verify_merkle_proof(
pub fn merkle_root_from_branch(leaf: H256, branch: &[H256], depth: usize, index: usize) -> H256 {
assert_eq!(branch.len(), depth, "proof length should equal depth");
let mut merkle_root = leaf.as_bytes().to_vec();
let mut merkle_root = leaf.as_slice().to_vec();
for (i, leaf) in branch.iter().enumerate().take(depth) {
let ith_bit = (index >> i) & 0x01;
if ith_bit == 1 {
merkle_root = hash32_concat(leaf.as_bytes(), &merkle_root)[..].to_vec();
merkle_root = hash32_concat(leaf.as_slice(), &merkle_root)[..].to_vec();
} else {
let mut input = merkle_root;
input.extend_from_slice(leaf.as_bytes());
input.extend_from_slice(leaf.as_slice());
merkle_root = hash(&input);
}
}
@@ -433,7 +435,6 @@ mod tests {
}
let leaves_iter = int_leaves.into_iter().map(H256::from_low_u64_be);
let mut merkle_tree = MerkleTree::create(&[], depth);
let proofs_ok = leaves_iter.enumerate().all(|(i, leaf)| {
@@ -465,10 +466,10 @@ mod tests {
let leaf_b10 = H256::from([0xCC; 32]);
let leaf_b11 = H256::from([0xDD; 32]);
let node_b0x = H256::from_slice(&hash32_concat(leaf_b00.as_bytes(), leaf_b01.as_bytes()));
let node_b1x = H256::from_slice(&hash32_concat(leaf_b10.as_bytes(), leaf_b11.as_bytes()));
let node_b0x = H256::from_slice(&hash32_concat(leaf_b00.as_slice(), leaf_b01.as_slice()));
let node_b1x = H256::from_slice(&hash32_concat(leaf_b10.as_slice(), leaf_b11.as_slice()));
let root = H256::from_slice(&hash32_concat(node_b0x.as_bytes(), node_b1x.as_bytes()));
let root = H256::from_slice(&hash32_concat(node_b0x.as_slice(), node_b1x.as_slice()));
let tree = MerkleTree::create(&[leaf_b00, leaf_b01, leaf_b10, leaf_b11], 2);
assert_eq!(tree.hash(), root);
@@ -482,10 +483,10 @@ mod tests {
let leaf_b10 = H256::from([0xCC; 32]);
let leaf_b11 = H256::from([0xDD; 32]);
let node_b0x = H256::from_slice(&hash32_concat(leaf_b00.as_bytes(), leaf_b01.as_bytes()));
let node_b1x = H256::from_slice(&hash32_concat(leaf_b10.as_bytes(), leaf_b11.as_bytes()));
let node_b0x = H256::from_slice(&hash32_concat(leaf_b00.as_slice(), leaf_b01.as_slice()));
let node_b1x = H256::from_slice(&hash32_concat(leaf_b10.as_slice(), leaf_b11.as_slice()));
let root = H256::from_slice(&hash32_concat(node_b0x.as_bytes(), node_b1x.as_bytes()));
let root = H256::from_slice(&hash32_concat(node_b0x.as_slice(), node_b1x.as_slice()));
// Run some proofs
assert!(verify_merkle_proof(

View File

@@ -8,8 +8,8 @@ use crate::{InvalidationOperation, JustifiedBalances};
use serde::{Deserialize, Serialize};
use std::collections::BTreeSet;
use types::{
AttestationShufflingId, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, Hash256,
MainnetEthSpec, Slot,
AttestationShufflingId, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, FixedBytesExtended,
Hash256, MainnetEthSpec, Slot,
};
pub use execution_status::*;

View File

@@ -1,3 +1,5 @@
use types::FixedBytesExtended;
use super::*;
pub fn get_no_votes_test_definition() -> ForkChoiceTestDefinition {

View File

@@ -7,8 +7,8 @@ use ssz_derive::{Decode, Encode};
use std::collections::{HashMap, HashSet};
use superstruct::superstruct;
use types::{
AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, Hash256,
Slot,
AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash,
FixedBytesExtended, Hash256, Slot,
};
// Define a "legacy" implementation of `Option<usize>` which uses four bytes for encoding the union

View File

@@ -15,8 +15,8 @@ use std::{
fmt,
};
use types::{
AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, Hash256,
Slot,
AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash,
FixedBytesExtended, Hash256, Slot,
};
pub const DEFAULT_PRUNE_THRESHOLD: usize = 256;
@@ -993,7 +993,7 @@ fn compute_deltas(
#[cfg(test)]
mod test_compute_deltas {
use super::*;
use types::MainnetEthSpec;
use types::{FixedBytesExtended, MainnetEthSpec};
/// Gives a hash that is not the zero hash (unless i is `usize::MAX)`.
fn hash_from_index(i: usize) -> Hash256 {

View File

@@ -1,6 +1,8 @@
use crate::common::update_progressive_balances_cache::initialize_progressive_balances_cache;
use crate::epoch_cache::initialize_epoch_cache;
use types::{BeaconState, ChainSpec, EpochCacheError, EthSpec, Hash256, RelativeEpoch};
use types::{
BeaconState, ChainSpec, EpochCacheError, EthSpec, FixedBytesExtended, Hash256, RelativeEpoch,
};
/// Mixin trait for the beacon state that provides operations on *all* caches.
///

View File

@@ -3,7 +3,9 @@ use crate::common::base::SqrtTotalActiveBalance;
use crate::common::{altair, base};
use safe_arith::SafeArith;
use types::epoch_cache::{EpochCache, EpochCacheError, EpochCacheKey};
use types::{ActivationQueue, BeaconState, ChainSpec, EthSpec, ForkName, Hash256};
use types::{
ActivationQueue, BeaconState, ChainSpec, EthSpec, FixedBytesExtended, ForkName, Hash256,
};
/// Precursor to an `EpochCache`.
pub struct PreEpochCache {

View File

@@ -30,7 +30,7 @@ pub fn verify_bls_to_execution_change<E: EthSpec>(
verify!(
validator
.withdrawal_credentials
.as_bytes()
.as_slice()
.first()
.map(|byte| *byte == spec.bls_withdrawal_prefix_byte)
.unwrap_or(false),
@@ -41,7 +41,7 @@ pub fn verify_bls_to_execution_change<E: EthSpec>(
// future.
let pubkey_hash = hash(address_change.from_bls_pubkey.as_serialized());
verify!(
validator.withdrawal_credentials.as_bytes().get(1..) == pubkey_hash.get(1..),
validator.withdrawal_credentials.as_slice().get(1..) == pubkey_hash.get(1..),
Invalid::WithdrawalCredentialsMismatch
);

View File

@@ -2,7 +2,7 @@
use crate::per_epoch_processing::process_epoch;
use beacon_chain::test_utils::BeaconChainHarness;
use beacon_chain::types::{EthSpec, MinimalEthSpec};
use bls::Hash256;
use bls::{FixedBytesExtended, Hash256};
use env_logger::{Builder, Env};
use types::Slot;

View File

@@ -5,7 +5,7 @@
//! duplication and protect against some easy-to-make mistakes when performing state advances.
use crate::*;
use types::{BeaconState, ChainSpec, EthSpec, Hash256, Slot};
use types::{BeaconState, ChainSpec, EthSpec, FixedBytesExtended, Hash256, Slot};
#[derive(Debug, PartialEq)]
pub enum Error {

View File

@@ -12,8 +12,10 @@ harness = false
criterion = { workspace = true }
[dependencies]
alloy-primitives = { workspace = true }
ethereum_hashing = { workspace = true }
ethereum-types = { workspace = true }
fixed_bytes = { workspace = true }
[features]
arbitrary = ["ethereum-types/arbitrary"]
arbitrary = ["alloy-primitives/arbitrary"]
getrandom = ["alloy-primitives/getrandom"]

View File

@@ -87,7 +87,7 @@ fn bytes_to_int64(slice: &[u8]) -> u64 {
#[cfg(test)]
mod tests {
use super::*;
use ethereum_types::H256 as Hash256;
use alloy_primitives::B256 as Hash256;
#[test]
#[ignore]

View File

@@ -20,4 +20,4 @@ mod shuffle_list;
pub use compute_shuffled_index::compute_shuffled_index;
pub use shuffle_list::shuffle_list;
type Hash256 = ethereum_types::H256;
type Hash256 = fixed_bytes::Hash256;

View File

@@ -9,14 +9,13 @@ name = "benches"
harness = false
[dependencies]
alloy-primitives = { workspace = true, features = ["rlp"] }
alloy-primitives = { workspace = true, features = ["rlp", "getrandom"] }
merkle_proof = { workspace = true }
bls = { workspace = true, features = ["arbitrary"] }
kzg = { workspace = true }
compare_fields = { workspace = true }
compare_fields_derive = { workspace = true }
eth2_interop_keypairs = { path = "../../common/eth2_interop_keypairs" }
ethereum-types = { workspace = true, features = ["arbitrary"] }
ethereum_hashing = { workspace = true }
hex = { workspace = true }
int_to_bytes = { workspace = true }
@@ -31,7 +30,7 @@ ethereum_ssz_derive = { workspace = true }
ssz_types = { workspace = true, features = ["arbitrary"] }
swap_or_not_shuffle = { workspace = true, features = ["arbitrary"] }
test_random_derive = { path = "../../common/test_random_derive" }
tree_hash = { workspace = true, features = ["arbitrary"] }
tree_hash = { workspace = true }
tree_hash_derive = { workspace = true }
rand_xorshift = "0.3.0"
serde_yaml = { workspace = true }
@@ -53,6 +52,7 @@ maplit = { workspace = true }
alloy-rlp = { version = "0.3.4", features = ["derive"] }
milhouse = { workspace = true }
rpds = { workspace = true }
fixed_bytes = { workspace = true }
[dev-dependencies]
criterion = { workspace = true }

View File

@@ -4,8 +4,8 @@ use rayon::prelude::*;
use ssz::Encode;
use std::sync::Arc;
use types::{
test_utils::generate_deterministic_keypair, BeaconState, Epoch, Eth1Data, EthSpec, Hash256,
MainnetEthSpec, Validator,
test_utils::generate_deterministic_keypair, BeaconState, Epoch, Eth1Data, EthSpec,
FixedBytesExtended, Hash256, MainnetEthSpec, Validator,
};
fn get_state<E: EthSpec>(validator_count: usize) -> BeaconState<E> {

View File

@@ -412,13 +412,7 @@ impl<E: EthSpec> AttestationBase<E> {
pub fn extend_aggregation_bits(
&self,
) -> Result<BitList<E::MaxValidatorsPerSlot>, ssz_types::Error> {
let mut extended_aggregation_bits: BitList<E::MaxValidatorsPerSlot> =
BitList::with_capacity(self.aggregation_bits.len())?;
for (i, bit) in self.aggregation_bits.iter().enumerate() {
extended_aggregation_bits.set(i, bit)?;
}
Ok(extended_aggregation_bits)
self.aggregation_bits.resize::<E::MaxValidatorsPerSlot>()
}
}

View File

@@ -507,7 +507,7 @@ impl<E: EthSpec, Payload: AbstractExecPayload<E>> BeaconBlockCapella<E, Payload>
message: BlsToExecutionChange {
validator_index: 0,
from_bls_pubkey: PublicKeyBytes::empty(),
to_execution_address: Address::zero(),
to_execution_address: Address::ZERO,
},
signature: Signature::empty()
};
@@ -637,7 +637,7 @@ impl<E: EthSpec, Payload: AbstractExecPayload<E>> BeaconBlockElectra<E, Payload>
message: BlsToExecutionChange {
validator_index: 0,
from_bls_pubkey: PublicKeyBytes::empty(),
to_execution_address: Address::zero(),
to_execution_address: Address::ZERO,
},
signature: Signature::empty()
};

View File

@@ -1,6 +1,7 @@
use self::committee_cache::get_active_validator_indices;
use crate::historical_summary::HistoricalSummary;
use crate::test_utils::TestRandom;
use crate::FixedBytesExtended;
use crate::*;
use compare_fields::CompareFields;
use compare_fields_derive::CompareFields;
@@ -1038,7 +1039,7 @@ impl<E: EthSpec> BeaconState<E> {
let epoch = slot.epoch(E::slots_per_epoch());
let mut preimage = self
.get_seed(epoch, Domain::BeaconProposer, spec)?
.as_bytes()
.as_slice()
.to_vec();
preimage.append(&mut int_to_bytes8(slot.as_u64()));
Ok(hash(&preimage))
@@ -1099,14 +1100,14 @@ impl<E: EthSpec> BeaconState<E> {
let shuffled_index = compute_shuffled_index(
i.safe_rem(active_validator_count)?,
active_validator_count,
seed.as_bytes(),
seed.as_slice(),
spec.shuffle_round_count,
)
.ok_or(Error::UnableToShuffle)?;
let candidate_index = *active_validator_indices
.get(shuffled_index)
.ok_or(Error::ShuffleIndexOutOfBounds(shuffled_index))?;
let random_byte = Self::shuffling_random_byte(i, seed.as_bytes())?;
let random_byte = Self::shuffling_random_byte(i, seed.as_slice())?;
let effective_balance = self.get_validator(candidate_index)?.effective_balance;
if effective_balance.safe_mul(MAX_RANDOM_BYTE)?
>= max_effective_balance.safe_mul(u64::from(random_byte))?
@@ -1528,7 +1529,7 @@ impl<E: EthSpec> BeaconState<E> {
let mut preimage = [0; NUM_DOMAIN_BYTES + NUM_EPOCH_BYTES + NUM_MIX_BYTES];
preimage[0..NUM_DOMAIN_BYTES].copy_from_slice(&domain_bytes);
preimage[NUM_DOMAIN_BYTES..MIX_OFFSET].copy_from_slice(&epoch_bytes);
preimage[MIX_OFFSET..].copy_from_slice(mix.as_bytes());
preimage[MIX_OFFSET..].copy_from_slice(mix.as_slice());
Ok(Hash256::from_slice(&hash(&preimage)))
}
@@ -2214,8 +2215,9 @@ impl<E: EthSpec> BeaconState<E> {
.get_mut(validator_index)
.ok_or(Error::UnknownValidator(validator_index))?;
if validator.has_eth1_withdrawal_credential(spec) {
validator.withdrawal_credentials.as_fixed_bytes_mut()[0] =
AsMut::<[u8; 32]>::as_mut(&mut validator.withdrawal_credentials)[0] =
spec.compounding_withdrawal_prefix_byte;
self.queue_excess_active_balance(validator_index, spec)?;
}
Ok(())

View File

@@ -3,8 +3,8 @@ use crate::test_utils::*;
use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType};
use beacon_chain::types::{
test_utils::TestRandom, BeaconState, BeaconStateAltair, BeaconStateBase, BeaconStateError,
ChainSpec, Domain, Epoch, EthSpec, Hash256, Keypair, MainnetEthSpec, MinimalEthSpec,
RelativeEpoch, Slot, Vector,
ChainSpec, Domain, Epoch, EthSpec, FixedBytesExtended, Hash256, Keypair, MainnetEthSpec,
MinimalEthSpec, RelativeEpoch, Slot, Vector,
};
use ssz::Encode;
use std::ops::Mul;

View File

@@ -23,6 +23,7 @@ pub struct BlsToExecutionChange {
#[serde(with = "serde_utils::quoted_u64")]
pub validator_index: u64,
pub from_bls_pubkey: PublicKeyBytes,
#[serde(with = "serde_utils::address_hex")]
pub to_execution_address: Address,
}

View File

@@ -544,7 +544,7 @@ impl ChainSpec {
let mut result = [0; 4];
let root = Self::compute_fork_data_root(current_version, genesis_validators_root);
result.copy_from_slice(
root.as_bytes()
root.as_slice()
.get(0..4)
.expect("root hash is at least 4 bytes"),
);
@@ -564,7 +564,7 @@ impl ChainSpec {
domain[0..4].copy_from_slice(&int_to_bytes4(domain_constant));
domain[4..].copy_from_slice(
Self::compute_fork_data_root(fork_version, genesis_validators_root)
.as_bytes()
.as_slice()
.get(..28)
.expect("fork has is 32 bytes so first 28 bytes should exist"),
);
@@ -754,7 +754,8 @@ impl ChainSpec {
proportional_slashing_multiplier_bellatrix: 3,
bellatrix_fork_version: [0x02, 0x00, 0x00, 0x00],
bellatrix_fork_epoch: Some(Epoch::new(144896)),
terminal_total_difficulty: Uint256::from_dec_str("58750000000000000000000")
terminal_total_difficulty: "58750000000000000000000"
.parse()
.expect("terminal_total_difficulty is a valid integer"),
terminal_block_hash: ExecutionBlockHash::zero(),
terminal_block_hash_activation_epoch: Epoch::new(u64::MAX),
@@ -900,7 +901,7 @@ impl ChainSpec {
.expect("subtraction does not overflow")
// Add 1 since the spec declares `2**256 - 2**10` and we use
// `Uint256::MAX` which is `2*256- 1`.
.checked_add(Uint256::one())
.checked_add(Uint256::from(2u64.pow(0)))
.expect("addition does not overflow"),
// Capella
capella_fork_version: [0x03, 0x00, 0x00, 0x01],
@@ -1074,10 +1075,9 @@ impl ChainSpec {
proportional_slashing_multiplier_bellatrix: 3,
bellatrix_fork_version: [0x02, 0x00, 0x00, 0x64],
bellatrix_fork_epoch: Some(Epoch::new(385536)),
terminal_total_difficulty: Uint256::from_dec_str(
"8626000000000000000000058750000000000000000000",
)
.expect("terminal_total_difficulty is a valid integer"),
terminal_total_difficulty: "8626000000000000000000058750000000000000000000"
.parse()
.expect("terminal_total_difficulty is a valid integer"),
terminal_block_hash: ExecutionBlockHash::zero(),
terminal_block_hash_activation_epoch: Epoch::new(u64::MAX),
safe_slots_to_import_optimistically: 128u64,
@@ -1305,6 +1305,7 @@ pub struct Config {
deposit_chain_id: u64,
#[serde(with = "serde_utils::quoted_u64")]
deposit_network_id: u64,
#[serde(with = "serde_utils::address_hex")]
deposit_contract_address: Address,
#[serde(default = "default_gossip_max_size")]
@@ -1407,7 +1408,7 @@ fn default_electra_fork_version() -> [u8; 4] {
///
/// Taken from https://github.com/ethereum/consensus-specs/blob/d5e4828aecafaf1c57ef67a5f23c4ae7b08c5137/configs/mainnet.yaml#L15-L16
const fn default_terminal_total_difficulty() -> Uint256 {
ethereum_types::U256([
Uint256::from_limbs([
18446744073709550592,
18446744073709551615,
18446744073709551615,
@@ -1925,7 +1926,7 @@ mod tests {
let domain2 = spec.compute_domain(domain_type, version, genesis_validators_root);
assert_eq!(domain1, domain2);
assert_eq!(&domain1.as_bytes()[0..4], &int_to_bytes4(raw_domain)[..]);
assert_eq!(&domain1.as_slice()[0..4], &int_to_bytes4(raw_domain)[..]);
}
}
@@ -2163,9 +2164,8 @@ mod yaml_tests {
fn test_total_terminal_difficulty() {
assert_eq!(
Ok(default_terminal_total_difficulty()),
Uint256::from_dec_str(
"115792089237316195423570985008687907853269984665640564039457584007913129638912"
)
"115792089237316195423570985008687907853269984665640564039457584007913129638912"
.parse()
);
}

View File

@@ -1,7 +1,7 @@
//! Identifies each data column subnet by an integer identifier.
use crate::data_column_sidecar::ColumnIndex;
use crate::{ChainSpec, EthSpec};
use ethereum_types::U256;
use alloy_primitives::U256;
use itertools::Itertools;
use safe_arith::{ArithError, SafeArith};
use serde::{Deserialize, Serialize};
@@ -38,7 +38,7 @@ impl DataColumnSubnetId {
/// Compute required subnets to subscribe to given the node id.
#[allow(clippy::arithmetic_side_effects)]
pub fn compute_custody_subnets<E: EthSpec>(
node_id: U256,
raw_node_id: [u8; 32],
custody_subnet_count: u64,
spec: &ChainSpec,
) -> impl Iterator<Item = DataColumnSubnetId> {
@@ -46,10 +46,10 @@ impl DataColumnSubnetId {
// value, but here we assume it is valid.
let mut subnets: HashSet<u64> = HashSet::new();
let mut current_id = node_id;
let mut current_id = U256::from_be_slice(&raw_node_id);
while (subnets.len() as u64) < custody_subnet_count {
let mut node_id_bytes = [0u8; 32];
current_id.to_little_endian(&mut node_id_bytes);
node_id_bytes.copy_from_slice(current_id.as_le_slice());
let hash = ethereum_hashing::hash_fixed(&node_id_bytes);
let hash_prefix: [u8; 8] = hash[0..8]
.try_into()
@@ -62,19 +62,19 @@ impl DataColumnSubnetId {
}
if current_id == U256::MAX {
current_id = U256::zero()
current_id = U256::ZERO
}
current_id += U256::one()
current_id += U256::from(1u64)
}
subnets.into_iter().map(DataColumnSubnetId::new)
}
pub fn compute_custody_columns<E: EthSpec>(
node_id: U256,
raw_node_id: [u8; 32],
custody_subnet_count: u64,
spec: &ChainSpec,
) -> impl Iterator<Item = ColumnIndex> {
Self::compute_custody_subnets::<E>(node_id, custody_subnet_count, spec)
Self::compute_custody_subnets::<E>(raw_node_id, custody_subnet_count, spec)
.flat_map(|subnet| subnet.columns::<E>(spec))
.sorted()
}
@@ -134,6 +134,7 @@ mod test {
use crate::data_column_subnet_id::DataColumnSubnetId;
use crate::EthSpec;
use crate::MainnetEthSpec;
use crate::Uint256;
type E = MainnetEthSpec;
@@ -153,7 +154,7 @@ mod test {
"103822458477361691467064888613019442068586830412598673713899771287914656699997",
]
.into_iter()
.map(|v| ethereum_types::U256::from_dec_str(v).unwrap())
.map(|v| Uint256::from_str_radix(v, 10).unwrap().to_be_bytes::<32>())
.collect::<Vec<_>>();
let custody_requirement = 4;

View File

@@ -40,7 +40,7 @@ impl Default for DepositTreeSnapshot {
fn default() -> Self {
let mut result = Self {
finalized: vec![],
deposit_root: Hash256::default(),
deposit_root: Hash256::zero(),
deposit_count: 0,
execution_block_hash: Hash256::zero(),
execution_block_height: 0,
@@ -60,7 +60,7 @@ impl DepositTreeSnapshot {
for height in 0..DEPOSIT_TREE_DEPTH {
deposit_root = if (size & 1) == 1 {
index = index.checked_sub(1)?;
hash32_concat(self.finalized.get(index)?.as_bytes(), &deposit_root)
hash32_concat(self.finalized.get(index)?.as_slice(), &deposit_root)
} else {
hash32_concat(&deposit_root, ZERO_HASHES.get(height)?)
};

View File

@@ -1,4 +1,5 @@
use crate::test_utils::TestRandom;
use crate::FixedBytesExtended;
use crate::Hash256;
use derivative::Derivative;
use rand::RngCore;
@@ -20,7 +21,7 @@ use std::fmt;
)]
#[derivative(Debug = "transparent")]
#[serde(transparent)]
pub struct ExecutionBlockHash(pub Hash256);
pub struct ExecutionBlockHash(#[serde(with = "serde_utils::b256_hex")] pub Hash256);
impl ExecutionBlockHash {
pub fn zero() -> Self {

View File

@@ -74,14 +74,14 @@ impl ExecutionBlockHeader {
transactions_root: rlp_transactions_root,
receipts_root: payload.receipts_root(),
logs_bloom: payload.logs_bloom().clone().into(),
difficulty: Uint256::zero(),
number: payload.block_number().into(),
gas_limit: payload.gas_limit().into(),
gas_used: payload.gas_used().into(),
difficulty: Uint256::ZERO,
number: Uint256::saturating_from(payload.block_number()),
gas_limit: Uint256::saturating_from(payload.gas_limit()),
gas_used: Uint256::saturating_from(payload.gas_used()),
timestamp: payload.timestamp(),
extra_data: payload.extra_data().clone().into(),
mix_hash: payload.prev_randao(),
nonce: Hash64::zero(),
nonce: Hash64::ZERO,
base_fee_per_gas: payload.base_fee_per_gas(),
withdrawals_root: rlp_withdrawals_root,
blob_gas_used: rlp_blob_gas_used,
@@ -101,15 +101,15 @@ pub struct EncodableExecutionBlockHeader<'a> {
pub transactions_root: &'a [u8],
pub receipts_root: &'a [u8],
pub logs_bloom: &'a [u8],
pub difficulty: alloy_primitives::U256,
pub number: alloy_primitives::U256,
pub gas_limit: alloy_primitives::U256,
pub gas_used: alloy_primitives::U256,
pub difficulty: Uint256,
pub number: Uint256,
pub gas_limit: Uint256,
pub gas_used: Uint256,
pub timestamp: u64,
pub extra_data: &'a [u8],
pub mix_hash: &'a [u8],
pub nonce: &'a [u8],
pub base_fee_per_gas: alloy_primitives::U256,
pub base_fee_per_gas: Uint256,
pub withdrawals_root: Option<&'a [u8]>,
pub blob_gas_used: Option<u64>,
pub excess_blob_gas: Option<u64>,
@@ -119,45 +119,33 @@ pub struct EncodableExecutionBlockHeader<'a> {
impl<'a> From<&'a ExecutionBlockHeader> for EncodableExecutionBlockHeader<'a> {
fn from(header: &'a ExecutionBlockHeader) -> Self {
let mut encodable = Self {
parent_hash: header.parent_hash.as_bytes(),
ommers_hash: header.ommers_hash.as_bytes(),
beneficiary: header.beneficiary.as_bytes(),
state_root: header.state_root.as_bytes(),
transactions_root: header.transactions_root.as_bytes(),
receipts_root: header.receipts_root.as_bytes(),
parent_hash: header.parent_hash.as_slice(),
ommers_hash: header.ommers_hash.as_slice(),
beneficiary: header.beneficiary.as_slice(),
state_root: header.state_root.as_slice(),
transactions_root: header.transactions_root.as_slice(),
receipts_root: header.receipts_root.as_slice(),
logs_bloom: header.logs_bloom.as_slice(),
difficulty: U256Shim(header.difficulty).into(),
number: U256Shim(header.number).into(),
gas_limit: U256Shim(header.gas_limit).into(),
gas_used: U256Shim(header.gas_used).into(),
difficulty: header.difficulty,
number: header.number,
gas_limit: header.gas_limit,
gas_used: header.gas_used,
timestamp: header.timestamp,
extra_data: header.extra_data.as_slice(),
mix_hash: header.mix_hash.as_bytes(),
nonce: header.nonce.as_bytes(),
base_fee_per_gas: U256Shim(header.base_fee_per_gas).into(),
mix_hash: header.mix_hash.as_slice(),
nonce: header.nonce.as_slice(),
base_fee_per_gas: header.base_fee_per_gas,
withdrawals_root: None,
blob_gas_used: header.blob_gas_used,
excess_blob_gas: header.excess_blob_gas,
parent_beacon_block_root: None,
};
if let Some(withdrawals_root) = &header.withdrawals_root {
encodable.withdrawals_root = Some(withdrawals_root.as_bytes());
encodable.withdrawals_root = Some(withdrawals_root.as_slice());
}
if let Some(parent_beacon_block_root) = &header.parent_beacon_block_root {
encodable.parent_beacon_block_root = Some(parent_beacon_block_root.as_bytes())
encodable.parent_beacon_block_root = Some(parent_beacon_block_root.as_slice())
}
encodable
}
}
// TODO(alloy) this shim can be removed once we fully migrate
// from ethereum types to alloy primitives
struct U256Shim(Uint256);
impl From<U256Shim> for alloy_primitives::U256 {
fn from(value: U256Shim) -> Self {
let mut buffer: [u8; 32] = [0; 32];
value.0.to_little_endian(&mut buffer);
Self::from_le_slice(&buffer)
}
}

View File

@@ -57,6 +57,7 @@ pub struct ExecutionPayload<E: EthSpec> {
#[superstruct(getter(copy))]
pub parent_hash: ExecutionBlockHash,
#[superstruct(getter(copy))]
#[serde(with = "serde_utils::address_hex")]
pub fee_recipient: Address,
#[superstruct(getter(copy))]
pub state_root: Hash256,

View File

@@ -47,6 +47,7 @@ pub struct ExecutionPayloadHeader<E: EthSpec> {
#[superstruct(getter(copy))]
pub parent_hash: ExecutionBlockHash,
#[superstruct(getter(copy))]
#[serde(with = "serde_utils::address_hex")]
pub fee_recipient: Address,
#[superstruct(getter(copy))]
pub state_root: Hash256,

View File

@@ -90,7 +90,11 @@ impl From<GraffitiString> for Graffiti {
graffiti
.get_mut(..graffiti_len)
.expect("graffiti_len <= GRAFFITI_BYTES_LEN")
.copy_from_slice(graffiti_bytes);
.copy_from_slice(
graffiti_bytes
.get(..graffiti_len)
.expect("graffiti_len <= GRAFFITI_BYTES_LEN"),
);
graffiti.into()
}
}
@@ -180,6 +184,6 @@ impl TreeHash for Graffiti {
impl TestRandom for Graffiti {
fn random_for_test(rng: &mut impl RngCore) -> Self {
Self::from(Hash256::random_for_test(rng).to_fixed_bytes())
Self::from(Hash256::random_for_test(rng).0)
}
}

View File

@@ -109,8 +109,6 @@ pub mod light_client_header;
pub mod non_zero_usize;
pub mod runtime_var_list;
use ethereum_types::{H160, H256};
pub use crate::activation_queue::ActivationQueue;
pub use crate::aggregate_and_proof::{
AggregateAndProof, AggregateAndProofBase, AggregateAndProofElectra, AggregateAndProofRef,
@@ -254,17 +252,18 @@ pub use crate::voluntary_exit::VoluntaryExit;
pub use crate::withdrawal::Withdrawal;
pub use crate::withdrawal_credentials::WithdrawalCredentials;
pub use crate::withdrawal_request::WithdrawalRequest;
pub use fixed_bytes::FixedBytesExtended;
pub type CommitteeIndex = u64;
pub type Hash256 = H256;
pub type Uint256 = ethereum_types::U256;
pub type Address = H160;
pub type Hash256 = fixed_bytes::Hash256;
pub type Uint256 = fixed_bytes::Uint256;
pub type Address = fixed_bytes::Address;
pub type ForkVersion = [u8; 4];
pub type BLSFieldElement = Uint256;
pub type Blob<E> = FixedVector<u8, <E as EthSpec>::BytesPerBlob>;
pub type KzgProofs<E> = VariableList<KzgProof, <E as EthSpec>::MaxBlobCommitmentsPerBlock>;
pub type VersionedHash = Hash256;
pub type Hash64 = ethereum_types::H64;
pub type Hash64 = alloy_primitives::B64;
pub use bls::{
AggregatePublicKey, AggregateSignature, Keypair, PublicKey, PublicKeyBytes, SecretKey,

View File

@@ -9,5 +9,6 @@ pub struct ProposerPreparationData {
#[serde(with = "serde_utils::quoted_u64")]
pub validator_index: u64,
/// The fee-recipient address.
#[serde(with = "serde_utils::address_hex")]
pub fee_recipient: Address,
}

View File

@@ -1,5 +1,6 @@
//! Identifies each shard by an integer identifier.
use crate::{AttestationRef, ChainSpec, CommitteeIndex, Epoch, EthSpec, Slot};
use alloy_primitives::{bytes::Buf, U256};
use safe_arith::{ArithError, SafeArith};
use serde::{Deserialize, Serialize};
use std::ops::{Deref, DerefMut};
@@ -77,7 +78,7 @@ impl SubnetId {
/// along with the first epoch in which these subscriptions are no longer valid.
#[allow(clippy::arithmetic_side_effects)]
pub fn compute_subnets_for_epoch<E: EthSpec>(
node_id: ethereum_types::U256,
raw_node_id: [u8; 32],
epoch: Epoch,
spec: &ChainSpec,
) -> Result<(impl Iterator<Item = SubnetId>, Epoch), &'static str> {
@@ -85,10 +86,13 @@ impl SubnetId {
let subscription_duration = spec.epochs_per_subnet_subscription;
let prefix_bits = spec.attestation_subnet_prefix_bits as u64;
let shuffling_prefix_bits = spec.attestation_subnet_shuffling_prefix_bits as u64;
let node_id = U256::from_be_slice(&raw_node_id);
// calculate the prefixes used to compute the subnet and shuffling
let node_id_prefix = (node_id >> (256 - prefix_bits)).as_u64();
let shuffling_prefix = (node_id >> (256 - (prefix_bits + shuffling_prefix_bits))).as_u64();
let node_id_prefix = (node_id >> (256 - prefix_bits)).as_le_slice().get_u64_le();
let shuffling_prefix = (node_id >> (256 - (prefix_bits + shuffling_prefix_bits)))
.as_le_slice()
.get_u64_le();
// number of groups the shuffling creates
let shuffling_groups = 1 << shuffling_prefix_bits;
@@ -170,6 +174,8 @@ impl AsRef<str> for SubnetId {
#[cfg(test)]
mod tests {
use crate::Uint256;
use super::*;
/// A set of tests compared to the python specification
@@ -188,7 +194,7 @@ mod tests {
"60930578857433095740782970114409273483106482059893286066493409689627770333527",
"103822458477361691467064888613019442068586830412598673713899771287914656699997",
]
.map(|v| ethereum_types::U256::from_dec_str(v).unwrap());
.map(|v| Uint256::from_str_radix(v, 10).unwrap().to_be_bytes::<32>());
let epochs = [
54321u64, 1017090249, 1827566880, 846255942, 766597383, 1204990115, 1616209495,
@@ -222,7 +228,7 @@ mod tests {
for x in 0..node_ids.len() {
println!("Test: {}", x);
println!(
"NodeId: {}\n Epoch: {}\n, expected_update_time: {}\n, expected_subnets: {:?}",
"NodeId: {:?}\n Epoch: {}\n, expected_update_time: {}\n, expected_subnets: {:?}",
node_ids[x], epochs[x], expected_valid_time[x], expected_subnets[x]
);

View File

@@ -105,7 +105,7 @@ impl From<Signature> for SyncSelectionProof {
#[cfg(test)]
mod test {
use super::*;
use crate::MainnetEthSpec;
use crate::{FixedBytesExtended, MainnetEthSpec};
use eth2_interop_keypairs::keypair;
#[test]

View File

@@ -4,6 +4,6 @@ impl TestRandom for Uint256 {
fn random_for_test(rng: &mut impl RngCore) -> Self {
let mut key_bytes = [0; 32];
rng.fill_bytes(&mut key_bytes);
Self::from_little_endian(&key_bytes[..])
Self::from_le_slice(&key_bytes[..])
}
}

View File

@@ -1,6 +1,6 @@
use crate::{
test_utils::TestRandom, Address, BeaconState, ChainSpec, Checkpoint, Epoch, EthSpec, ForkName,
Hash256, PublicKeyBytes,
test_utils::TestRandom, Address, BeaconState, ChainSpec, Checkpoint, Epoch, EthSpec,
FixedBytesExtended, ForkName, Hash256, PublicKeyBytes,
};
use serde::{Deserialize, Serialize};
use ssz_derive::{Decode, Encode};
@@ -129,7 +129,7 @@ impl Validator {
/// Returns `true` if the validator has eth1 withdrawal credential.
pub fn has_eth1_withdrawal_credential(&self, spec: &ChainSpec) -> bool {
self.withdrawal_credentials
.as_bytes()
.as_slice()
.first()
.map(|byte| *byte == spec.eth1_address_withdrawal_prefix_byte)
.unwrap_or(false)
@@ -145,7 +145,7 @@ impl Validator {
self.has_execution_withdrawal_credential(spec)
.then(|| {
self.withdrawal_credentials
.as_bytes()
.as_slice()
.get(12..)
.map(Address::from_slice)
})
@@ -158,7 +158,7 @@ impl Validator {
pub fn change_withdrawal_credentials(&mut self, execution_address: &Address, spec: &ChainSpec) {
let mut bytes = [0u8; 32];
bytes[0] = spec.eth1_address_withdrawal_prefix_byte;
bytes[12..].copy_from_slice(execution_address.as_bytes());
bytes[12..].copy_from_slice(execution_address.as_slice());
self.withdrawal_credentials = Hash256::from(bytes);
}
@@ -283,7 +283,7 @@ impl Default for Validator {
fn default() -> Self {
Self {
pubkey: PublicKeyBytes::empty(),
withdrawal_credentials: Hash256::default(),
withdrawal_credentials: Hash256::zero(),
activation_eligibility_epoch: Epoch::from(u64::MAX),
activation_epoch: Epoch::from(u64::MAX),
exit_epoch: Epoch::from(u64::MAX),
@@ -299,7 +299,7 @@ pub fn is_compounding_withdrawal_credential(
spec: &ChainSpec,
) -> bool {
withdrawal_credentials
.as_bytes()
.as_slice()
.first()
.map(|prefix_byte| *prefix_byte == spec.compounding_withdrawal_prefix_byte)
.unwrap_or(false)

View File

@@ -12,6 +12,7 @@ pub struct SignedValidatorRegistrationData {
#[derive(PartialEq, Debug, Serialize, Deserialize, Clone, Encode, Decode, TreeHash)]
pub struct ValidatorRegistrationData {
#[serde(with = "serde_utils::address_hex")]
pub fee_recipient: Address,
#[serde(with = "serde_utils::quoted_u64")]
pub gas_limit: u64,

View File

@@ -24,6 +24,7 @@ pub struct Withdrawal {
pub index: u64,
#[serde(with = "serde_utils::quoted_u64")]
pub validator_index: u64,
#[serde(with = "serde_utils::address_hex")]
pub address: Address,
#[serde(with = "serde_utils::quoted_u64")]
pub amount: u64,

View File

@@ -13,7 +13,7 @@ impl WithdrawalCredentials {
pub fn eth1(withdrawal_address: Address, spec: &ChainSpec) -> Self {
let mut withdrawal_credentials = [0; 32];
withdrawal_credentials[0] = spec.eth1_address_withdrawal_prefix_byte;
withdrawal_credentials[12..].copy_from_slice(withdrawal_address.as_bytes());
withdrawal_credentials[12..].copy_from_slice(withdrawal_address.as_slice());
Self(Hash256::from_slice(&withdrawal_credentials))
}
}
@@ -39,7 +39,7 @@ mod test {
get_withdrawal_credentials(&keypair.pk, spec.bls_withdrawal_prefix_byte);
let hash: Hash256 = credentials.into();
assert_eq!(hash[0], spec.bls_withdrawal_prefix_byte);
assert_eq!(hash.as_bytes(), &manually_generated_credentials);
assert_eq!(hash.as_slice(), &manually_generated_credentials);
}
#[test]

View File

@@ -20,6 +20,7 @@ use tree_hash_derive::TreeHash;
TestRandom,
)]
pub struct WithdrawalRequest {
#[serde(with = "serde_utils::address_hex")]
pub source_address: Address,
pub validator_pubkey: PublicKeyBytes,
#[serde(with = "serde_utils::quoted_u64")]