Merge branch 'unstable' into dvt

This commit is contained in:
chonghe
2025-04-15 10:41:10 +08:00
committed by GitHub
148 changed files with 4415 additions and 3324 deletions

View File

@@ -824,6 +824,26 @@ impl BeaconNodeHttpClient {
self.get_opt(path).await
}
/// `GET beacon/states/{state_id}/pending_consolidations`
///
/// Returns `Ok(None)` on a 404 error.
pub async fn get_beacon_states_pending_consolidations(
&self,
state_id: StateId,
) -> Result<Option<ExecutionOptimisticFinalizedResponse<Vec<PendingConsolidation>>>, Error>
{
let mut path = self.eth_path(V1)?;
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("beacon")
.push("states")
.push(&state_id.to_string())
.push("pending_consolidations");
self.get_opt(path).await
}
/// `GET beacon/light_client/updates`
///
/// Returns `Ok(None)` on a 404 error.

View File

@@ -104,8 +104,8 @@ impl std::fmt::Display for SyncState {
match self {
SyncState::SyncingFinalized { .. } => write!(f, "Syncing Finalized Chain"),
SyncState::SyncingHead { .. } => write!(f, "Syncing Head Chain"),
SyncState::Synced { .. } => write!(f, "Synced"),
SyncState::Stalled { .. } => write!(f, "Stalled"),
SyncState::Synced => write!(f, "Synced"),
SyncState::Stalled => write!(f, "Stalled"),
SyncState::SyncTransition => write!(f, "Evaluating known peers"),
SyncState::BackFillSyncing { .. } => write!(f, "Syncing Historical Blocks"),
}

View File

@@ -18,7 +18,9 @@ use std::fmt::{self, Display};
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use test_random_derive::TestRandom;
use types::beacon_block_body::KzgCommitments;
use types::test_utils::TestRandom;
pub use types::*;
#[cfg(feature = "lighthouse")]
@@ -802,13 +804,13 @@ pub struct LightClientUpdatesQuery {
}
#[derive(Encode, Decode)]
pub struct LightClientUpdateSszResponse {
pub response_chunk_len: Vec<u8>,
pub response_chunk: Vec<u8>,
pub struct LightClientUpdateResponseChunk {
pub response_chunk_len: u64,
pub response_chunk: LightClientUpdateResponseChunkInner,
}
#[derive(Encode, Decode)]
pub struct LightClientUpdateResponseChunk {
pub struct LightClientUpdateResponseChunkInner {
pub context: [u8; 4],
pub payload: Vec<u8>,
}
@@ -2015,11 +2017,11 @@ impl<E: EthSpec> ForkVersionDeserialize for FullPayloadContents<E> {
fork_name: ForkName,
) -> Result<Self, D::Error> {
if fork_name.deneb_enabled() {
serde_json::from_value(value)
ExecutionPayloadAndBlobs::deserialize_by_fork::<'de, D>(value, fork_name)
.map(Self::PayloadAndBlobs)
.map_err(serde::de::Error::custom)
} else if fork_name.bellatrix_enabled() {
serde_json::from_value(value)
ExecutionPayload::deserialize_by_fork::<'de, D>(value, fork_name)
.map(Self::Payload)
.map_err(serde::de::Error::custom)
} else {
@@ -2037,6 +2039,28 @@ pub struct ExecutionPayloadAndBlobs<E: EthSpec> {
pub blobs_bundle: BlobsBundle<E>,
}
impl<E: EthSpec> ForkVersionDeserialize for ExecutionPayloadAndBlobs<E> {
fn deserialize_by_fork<'de, D: Deserializer<'de>>(
value: Value,
fork_name: ForkName,
) -> Result<Self, D::Error> {
#[derive(Deserialize)]
#[serde(bound = "E: EthSpec")]
struct Helper<E: EthSpec> {
execution_payload: serde_json::Value,
blobs_bundle: BlobsBundle<E>,
}
let helper: Helper<E> = serde_json::from_value(value).map_err(serde::de::Error::custom)?;
Ok(Self {
execution_payload: ExecutionPayload::deserialize_by_fork::<'de, D>(
helper.execution_payload,
fork_name,
)?,
blobs_bundle: helper.blobs_bundle,
})
}
}
impl<E: EthSpec> ForkVersionDecode for ExecutionPayloadAndBlobs<E> {
fn from_ssz_bytes_by_fork(bytes: &[u8], fork_name: ForkName) -> Result<Self, DecodeError> {
let mut builder = ssz::SszDecoderBuilder::new(bytes);
@@ -2067,7 +2091,7 @@ pub enum ContentType {
Ssz,
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, Encode, Decode)]
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, Encode, Decode, TestRandom)]
#[serde(bound = "E: EthSpec")]
pub struct BlobsBundle<E: EthSpec> {
pub commitments: KzgCommitments<E>,
@@ -2162,6 +2186,10 @@ pub struct StandardAttestationRewards {
#[cfg(test)]
mod test {
use std::fmt::Debug;
use types::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use super::*;
#[test]
@@ -2175,4 +2203,107 @@ mod test {
let y: ValidatorId = serde_json::from_str(pubkey_str).unwrap();
assert_eq!(serde_json::to_string(&y).unwrap(), pubkey_str);
}
#[test]
fn test_execution_payload_execution_payload_deserialize_by_fork() {
let rng = &mut XorShiftRng::from_seed([42; 16]);
let payloads = [
ExecutionPayload::Bellatrix(
ExecutionPayloadBellatrix::<MainnetEthSpec>::random_for_test(rng),
),
ExecutionPayload::Capella(ExecutionPayloadCapella::<MainnetEthSpec>::random_for_test(
rng,
)),
ExecutionPayload::Deneb(ExecutionPayloadDeneb::<MainnetEthSpec>::random_for_test(
rng,
)),
ExecutionPayload::Electra(ExecutionPayloadElectra::<MainnetEthSpec>::random_for_test(
rng,
)),
ExecutionPayload::Fulu(ExecutionPayloadFulu::<MainnetEthSpec>::random_for_test(rng)),
];
let merged_forks = &ForkName::list_all()[2..];
assert_eq!(
payloads.len(),
merged_forks.len(),
"we should test every known fork; add new fork variant to payloads above"
);
for (payload, &fork_name) in payloads.into_iter().zip(merged_forks) {
assert_eq!(payload.fork_name(), fork_name);
let payload_str = serde_json::to_string(&payload).unwrap();
let mut de = serde_json::Deserializer::from_str(&payload_str);
generic_deserialize_by_fork(&mut de, payload, fork_name);
}
}
#[test]
fn test_execution_payload_and_blobs_deserialize_by_fork() {
let rng = &mut XorShiftRng::from_seed([42; 16]);
let payloads = [
{
let execution_payload =
ExecutionPayload::Deneb(
ExecutionPayloadDeneb::<MainnetEthSpec>::random_for_test(rng),
);
let blobs_bundle = BlobsBundle::random_for_test(rng);
ExecutionPayloadAndBlobs {
execution_payload,
blobs_bundle,
}
},
{
let execution_payload =
ExecutionPayload::Electra(
ExecutionPayloadElectra::<MainnetEthSpec>::random_for_test(rng),
);
let blobs_bundle = BlobsBundle::random_for_test(rng);
ExecutionPayloadAndBlobs {
execution_payload,
blobs_bundle,
}
},
{
let execution_payload =
ExecutionPayload::Fulu(
ExecutionPayloadFulu::<MainnetEthSpec>::random_for_test(rng),
);
let blobs_bundle = BlobsBundle::random_for_test(rng);
ExecutionPayloadAndBlobs {
execution_payload,
blobs_bundle,
}
},
];
let blob_forks = &ForkName::list_all()[4..];
assert_eq!(
payloads.len(),
blob_forks.len(),
"we should test every known fork; add new fork variant to payloads above"
);
for (payload, &fork_name) in payloads.into_iter().zip(blob_forks) {
assert_eq!(payload.execution_payload.fork_name(), fork_name);
let payload_str = serde_json::to_string(&payload).unwrap();
let mut de = serde_json::Deserializer::from_str(&payload_str);
generic_deserialize_by_fork(&mut de, payload, fork_name);
}
}
fn generic_deserialize_by_fork<
'de,
D: Deserializer<'de>,
O: ForkVersionDeserialize + PartialEq + Debug,
>(
deserializer: D,
original: O,
fork_name: ForkName,
) {
let val = Value::deserialize(deserializer).unwrap();
let roundtrip = O::deserialize_by_fork::<'de, D>(val, fork_name).unwrap();
assert_eq!(original, roundtrip);
}
}