Cell Dissemination (Partial messages) (#8314)

- https://github.com/ethereum/consensus-specs/pull/4558
- https://eips.ethereum.org/EIPS/eip-8136


  


Co-Authored-By: Daniel Knopik <daniel@dknopik.de>

Co-Authored-By: Pawan Dhananjay <pawandhananjay@gmail.com>

Co-Authored-By: Jimmy Chen <jchen.tc@gmail.com>
This commit is contained in:
Daniel Knopik
2026-04-23 20:52:28 +02:00
committed by GitHub
parent e086628efe
commit 8a384ff445
54 changed files with 4797 additions and 630 deletions

View File

@@ -21,6 +21,8 @@ ethereum_ssz_derive = { workspace = true }
fixed_bytes = { workspace = true }
fnv = { workspace = true }
futures = { workspace = true }
# Enable partial messages feature
gossipsub = { package = "libp2p-gossipsub", git = "https://github.com/libp2p/rust-libp2p.git", features = ["partial_messages"] }
hex = { workspace = true }
if-addrs = "0.14"
itertools = { workspace = true }

View File

@@ -140,6 +140,9 @@ pub struct Config {
/// Flag for advertising a fake CGC to peers for testing ONLY.
pub advertise_false_custody_group_count: Option<u64>,
/// Whether to enable partial data column support.
pub enable_partial_columns: bool,
}
impl Config {
@@ -364,6 +367,7 @@ impl Default for Config {
inbound_rate_limiter_config: None,
idontwant_message_size_threshold: DEFAULT_IDONTWANT_MESSAGE_SIZE_THRESHOLD,
advertise_false_custody_group_count: None,
enable_partial_columns: false,
}
}
}

View File

@@ -99,7 +99,7 @@ impl std::fmt::Display for ClearDialError<'_> {
pub use crate::types::{
Enr, EnrSyncCommitteeBitfield, GossipTopic, NetworkGlobals, PubsubMessage, Subnet,
SubnetDiscovery,
SubnetDiscovery, decode_partial,
};
pub use prometheus_client;

View File

@@ -83,6 +83,14 @@ pub static FAILED_PUBLISHES_PER_MAIN_TOPIC: LazyLock<Result<IntGaugeVec>> = Lazy
&["topic_hash"],
)
});
pub static FAILED_PARTIAL_PUBLISHES_PER_MAIN_TOPIC: LazyLock<Result<IntGaugeVec>> =
LazyLock::new(|| {
try_create_int_gauge_vec(
"gossipsub_failed_partial_publishes_per_main_topic",
"Failed gossip partial message publishes",
&["topic_hash"],
)
});
pub static TOTAL_RPC_ERRORS_PER_CLIENT: LazyLock<Result<IntCounterVec>> = LazyLock::new(|| {
try_create_int_counter_vec(
"libp2p_rpc_errors_per_client",

View File

@@ -14,17 +14,19 @@ use crate::rpc::{
GoodbyeReason, HandlerErr, InboundRequestId, Protocol, RPC, RPCError, RPCMessage, RPCReceived,
RequestType, ResponseTermination, RpcResponse, RpcSuccessResponse,
};
use crate::service::partial_column_header_tracker::PartialColumnHeaderTracker;
use crate::types::{
GossipEncoding, GossipKind, GossipTopic, SnappyTransform, Subnet, SubnetDiscovery,
all_topics_at_fork, core_topics_to_subscribe, is_fork_non_core_topic, subnet_from_topic_hash,
GossipEncoding, GossipKind, GossipTopic, OutgoingPartialColumn, SnappyTransform, Subnet,
SubnetDiscovery, all_topics_at_fork, core_topics_to_subscribe, is_fork_non_core_topic,
subnet_from_topic_hash,
};
use crate::{Enr, NetworkGlobals, PubsubMessage, TopicHash, metrics};
use crate::{Enr, NetworkGlobals, PubsubMessage, TopicHash, decode_partial, metrics};
use api_types::{AppRequestId, Response};
use futures::stream::StreamExt;
use gossipsub_scoring_parameters::{PeerScoreSettings, lighthouse_gossip_thresholds};
use libp2p::gossipsub::{
self, IdentTopic as Topic, MessageAcceptance, MessageAuthenticity, MessageId, PublishError,
TopicScoreParams,
self, Event, IdentTopic as Topic, MessageAcceptance, MessageAuthenticity, MessageId,
PublishError, TopicScoreParams,
};
use libp2p::identity::Keypair;
use libp2p::multiaddr::{self, Multiaddr, Protocol as MProtocol};
@@ -40,16 +42,18 @@ use std::pin::Pin;
use std::sync::Arc;
use std::time::Duration;
use tracing::{debug, error, info, trace, warn};
use types::{ChainSpec, ForkName};
use types::{
EnrForkId, EthSpec, ForkContext, Slot, SubnetId, consts::altair::SYNC_COMMITTEE_SUBNET_COUNT,
ChainSpec, DataColumnSubnetId, EnrForkId, EthSpec, ForkContext, ForkName, PartialDataColumn,
PartialDataColumnHeader, Slot, SubnetId, consts::altair::SYNC_COMMITTEE_SUBNET_COUNT,
};
use utils::{Context as ServiceContext, build_transport, strip_peer_id};
pub mod api_types;
mod gossip_cache;
pub mod gossipsub_scoring_parameters;
mod partial_column_header_tracker;
pub mod utils;
/// The number of peers we target per subnet for discovery queries.
pub const TARGET_SUBNET_PEERS: usize = 3;
@@ -99,6 +103,15 @@ pub enum NetworkEvent<E: EthSpec> {
/// The message itself.
message: PubsubMessage<E>,
},
/// A partial data column sidecar received via gossipsub partial protocol.
PartialDataColumnSidecar {
/// The peer from which we received this message.
source: PeerId,
/// The partial column data.
column: Box<PartialDataColumn<E>>,
/// The topic that this message was sent on.
topic: GossipTopic,
},
/// Inform the network to send a Status to this peer.
StatusPeer(PeerId),
NewListenAddr(Multiaddr),
@@ -162,6 +175,7 @@ pub struct Network<E: EthSpec> {
/// The interval for updating gossipsub scores
update_gossipsub_scores: tokio::time::Interval,
gossip_cache: GossipCache,
partial_column_header_tracker: PartialColumnHeaderTracker,
/// This node's PeerId.
pub local_peer_id: PeerId,
}
@@ -505,6 +519,7 @@ impl<E: EthSpec> Network<E> {
score_settings,
update_gossipsub_scores,
gossip_cache,
partial_column_header_tracker: PartialColumnHeaderTracker::new(),
local_peer_id,
};
@@ -804,9 +819,18 @@ impl<E: EthSpec> Network<E> {
.write()
.insert(topic.clone());
let partial = topic
.kind()
.use_partial_messages(self.network_globals.config.as_ref());
let topic: Topic = topic.into();
match self.gossipsub_mut().subscribe(&topic) {
let subscribe_result = if partial {
self.gossipsub_mut().subscribe_partial(&topic, true)
} else {
self.gossipsub_mut().subscribe(&topic)
};
match subscribe_result {
Err(e) => {
warn!(%topic, error = ?e, "Failed to subscribe to topic");
false
@@ -849,6 +873,16 @@ impl<E: EthSpec> Network<E> {
"Attempted to publish duplicate message"
);
}
PublishError::NoPeersSubscribedToTopic
if topic
.kind()
.use_partial_messages(self.network_globals.config.as_ref()) =>
{
debug!(
kind = %topic.kind(),
"No peers supporting full messages"
);
}
ref e => {
warn!(
error = ?e,
@@ -886,6 +920,66 @@ impl<E: EthSpec> Network<E> {
}
}
/// Publishes partial data column sidecars to the gossipsub network.
pub fn publish_partial(
&mut self,
columns: Vec<Arc<PartialDataColumn<E>>>,
header: Arc<PartialDataColumnHeader<E>>,
) {
if !self.network_globals.config.enable_partial_columns {
return;
}
debug!(
count = columns.len(),
"Sending partial data column sidecars"
);
for column in columns {
let subnet =
DataColumnSubnetId::from_column_index(column.index, &self.fork_context.spec);
let topic = GossipTopic::new(
GossipKind::DataColumnSidecar(subnet),
GossipEncoding::default(),
self.enr_fork_id.fork_digest,
);
let header_sent_set = self
.partial_column_header_tracker
.get_for_block(column.block_root);
let partial_message = OutgoingPartialColumn::new(column, &header, header_sent_set);
let publish_topic: Topic = topic.clone().into();
if let Err(e) = self
.gossipsub_mut()
.publish_partial(publish_topic, partial_message)
{
match e {
PublishError::NoPeersSubscribedToTopic => {
debug!(
kind = %topic.kind(),
"No peers supporting partial messages"
);
}
ref e => {
warn!(
error = ?e,
kind = %topic.kind(),
"Could not publish partial message"
);
}
}
// add to metrics
if let Some(v) = metrics::get_int_gauge(
&metrics::FAILED_PARTIAL_PUBLISHES_PER_MAIN_TOPIC,
&[&format!("{:?}", topic.kind())],
) {
v.inc()
};
}
}
}
/// Informs the gossipsub about the result of a message validation.
/// If the message is valid it will get propagated by gossipsub.
pub fn report_message_validation_result(
@@ -918,6 +1012,29 @@ impl<E: EthSpec> Network<E> {
);
}
/// Informs the gossipsub about the failure of a partial message validation.
pub fn report_partial_message_validation_failure(
&mut self,
propagation_source: PeerId,
topic: GossipTopic,
) {
if let Some(client) = self
.network_globals
.peers
.read()
.peer_info(&propagation_source)
.map(|info| info.client().kind.as_ref())
{
metrics::inc_counter_vec(
&metrics::GOSSIP_UNACCEPTED_MESSAGES_PER_CLIENT,
&[client, "reject"],
)
}
self.gossipsub_mut()
.report_invalid_partial(propagation_source, &TopicHash::from(Topic::from(topic)));
}
/// Updates the current gossipsub scoring parameters based on the validator count and current
/// slot.
pub fn update_gossipsub_parameters(
@@ -1290,6 +1407,56 @@ impl<E: EthSpec> Network<E> {
}
}
}
Event::Partial {
topic_hash,
peer_id,
group_id,
message,
..
} => {
let topic = GossipTopic::decode(topic_hash.as_str())
.inspect_err(|error| {
debug!(
topic = ?topic_hash,
error,
"Could not decode gossipsub partial message topic"
);
// punish the peer
self.gossipsub_mut()
.report_invalid_partial(peer_id, &topic_hash);
})
.ok()?;
if let Some(message) = message {
match decode_partial::<E>(&topic, &group_id, &message) {
Err(error) => {
debug!(
topic = ?topic_hash,
error,
"Could not decode gossipsub partial message"
);
//reject the message
self.gossipsub_mut()
.report_invalid_partial(peer_id, &topic_hash);
}
Ok(column) => {
debug!(
block_root = %column.block_root,
index = column.index,
%peer_id,
cells_present = %column.sidecar.cells_present_bitmap,
"Decoded partial message"
);
// Notify the network
return Some(NetworkEvent::PartialDataColumnSidecar {
source: peer_id,
column: Box::new(column),
topic,
});
}
}
}
}
gossipsub::Event::Subscribed { peer_id, topic } => {
if let Ok(topic) = GossipTopic::decode(topic.as_str()) {
if let Some(subnet_id) = topic.subnet_id() {

View File

@@ -0,0 +1,28 @@
use crate::types::HeaderSentSet;
use lru::LruCache;
use parking_lot::Mutex;
use std::collections::HashSet;
use std::num::NonZeroUsize;
use std::sync::Arc;
use types::core::Hash256;
const MAX_BLOCKS: NonZeroUsize = NonZeroUsize::new(4).unwrap();
pub struct PartialColumnHeaderTracker {
blocks: LruCache<Hash256, HeaderSentSet>,
}
impl PartialColumnHeaderTracker {
pub fn new() -> Self {
PartialColumnHeaderTracker {
blocks: LruCache::new(MAX_BLOCKS),
}
}
pub fn get_for_block(&mut self, hash: Hash256) -> HeaderSentSet {
Arc::clone(
self.blocks
.get_or_insert(hash, || Arc::new(Mutex::new(HashSet::new()))),
)
}
}

View File

@@ -1,4 +1,5 @@
mod globals;
mod partial;
mod pubsub;
mod subnet;
mod topics;
@@ -13,7 +14,9 @@ pub type Enr = discv5::enr::Enr<discv5::enr::CombinedKey>;
pub use eth2::lighthouse::sync_state::{BackFillState, CustodyBackFillState, SyncState};
pub use globals::NetworkGlobals;
pub use pubsub::{PubsubMessage, SnappyTransform};
pub use partial::HeaderSentSet;
pub use partial::OutgoingPartialColumn;
pub use pubsub::{PubsubMessage, SnappyTransform, decode_partial};
pub use subnet::{Subnet, SubnetDiscovery};
pub use topics::{
GossipEncoding, GossipKind, GossipTopic, TopicConfig, all_topics_at_fork,

View File

@@ -0,0 +1,503 @@
use crate::PeerId;
use itertools::Itertools;
use libp2p::gossipsub::partial_messages::{Metadata, Partial, PartialAction, PartialError};
use parking_lot::Mutex;
use ssz::{Decode, Encode};
use std::collections::HashSet;
use std::fmt::Debug;
use std::sync::Arc;
use tracing::{debug, error};
use types::core::{EthSpec, Hash256};
use types::data::{
CellBitmap, PartialDataColumn, PartialDataColumnHeader, PartialDataColumnPartsMetadata,
PartialDataColumnSidecar, PartialDataColumnSidecarRef,
};
const PARTIAL_COLUMNS_VERSION_BYTE: u8 = 0;
pub type HeaderSentSet = Arc<Mutex<HashSet<PeerId>>>;
#[derive(Debug, Clone)]
pub struct OutgoingPartialColumn<E: EthSpec> {
partial_column: Arc<PartialDataColumn<E>>,
metadata: MaybeKnownMetadata<E>,
header_message: Vec<u8>,
header_sent_set: HeaderSentSet,
}
impl<E: EthSpec> OutgoingPartialColumn<E> {
pub fn new(
partial_column: Arc<PartialDataColumn<E>>,
header: &PartialDataColumnHeader<E>,
header_sent_set: HeaderSentSet,
) -> Self {
// For now, always request all cells
let mut requests = partial_column.sidecar.cells_present_bitmap.clone();
for idx in 0..requests.len() {
requests
.set(idx, true)
.expect("Bound asserted via `len` above");
}
let metadata = PartialDataColumnPartsMetadata::<E> {
available: partial_column.sidecar.cells_present_bitmap.clone(),
requests,
}
.into();
let header_message = PartialDataColumnSidecarRef {
cells_present_bitmap: CellBitmap::<E>::with_capacity(
partial_column.sidecar.cells_present_bitmap.len(),
)
.expect("Taking length from bitmap with same bound"),
column: vec![],
kzg_proofs: vec![],
header: Some(header).into(),
}
.as_ssz_bytes();
OutgoingPartialColumn {
partial_column,
metadata,
header_message,
header_sent_set,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum MaybeKnownMetadata<E: EthSpec> {
Unknown,
Known {
metadata: Box<PartialDataColumnPartsMetadata<E>>,
encoded: Vec<u8>,
},
}
impl<E: EthSpec> MaybeKnownMetadata<E> {
fn do_update(
&mut self,
received: PartialDataColumnPartsMetadata<E>,
) -> Result<bool, PartialError> {
let MaybeKnownMetadata::Known { metadata, encoded } = self else {
*self = MaybeKnownMetadata::Known {
encoded: received.as_ssz_bytes(),
metadata: Box::new(received),
};
return Ok(true);
};
if ![
received.available.len(),
received.requests.len(),
metadata.available.len(),
metadata.requests.len(),
]
.into_iter()
.all_equal()
{
return Err(PartialError::OutOfRange);
}
let new_available = metadata.available.union(&received.available);
let new_request = metadata.requests.union(&received.requests);
if metadata.available == new_available && metadata.requests == new_request {
return Ok(false);
}
metadata.available = new_available;
metadata.requests = new_request;
*encoded = metadata.as_ssz_bytes();
Ok(true)
}
}
impl<E: EthSpec> Metadata for MaybeKnownMetadata<E> {
fn as_slice(&self) -> &[u8] {
match self {
MaybeKnownMetadata::Unknown => &[],
MaybeKnownMetadata::Known { encoded, .. } => encoded,
}
}
fn update(&mut self, data: &[u8]) -> Result<bool, PartialError> {
let received = PartialDataColumnPartsMetadata::from_ssz_bytes(data)
.map_err(|_| PartialError::InvalidFormat)?;
self.do_update(received)
}
fn update_from_data(&mut self, data: &[u8]) -> Result<(), PartialError> {
if data.is_empty() {
return Ok(());
}
let sidecar = PartialDataColumnSidecar::<E>::from_ssz_bytes(data)
.map_err(|_| PartialError::InvalidFormat)?;
self.do_update(PartialDataColumnPartsMetadata {
available: sidecar.cells_present_bitmap.clone(),
requests: sidecar.cells_present_bitmap,
})
.map(|_| ())
}
}
impl<E: EthSpec> From<PartialDataColumnPartsMetadata<E>> for MaybeKnownMetadata<E> {
fn from(metadata: PartialDataColumnPartsMetadata<E>) -> Self {
Self::Known {
encoded: metadata.as_ssz_bytes(),
metadata: Box::new(metadata),
}
}
}
impl<E: EthSpec> Partial for OutgoingPartialColumn<E> {
fn group_id(&self) -> Vec<u8> {
let mut group_id = Vec::with_capacity(Hash256::len_bytes() + 1);
group_id.push(PARTIAL_COLUMNS_VERSION_BYTE);
group_id.extend_from_slice(self.partial_column.block_root.as_slice());
group_id
}
fn metadata(&self) -> Box<dyn Metadata> {
Box::new(self.metadata.clone())
}
fn partial_action_from_metadata(
&self,
peer_id: PeerId,
metadata: Option<&[u8]>,
) -> Result<PartialAction, PartialError> {
match metadata {
None => {
// send the header-only messsage to the peer if we have not yet
let send = self.header_sent_set.lock().insert(peer_id).then(|| {
(
self.header_message.clone(),
Box::new(MaybeKnownMetadata::<E>::Unknown) as Box<dyn Metadata>,
)
});
debug!(
peer=%peer_id,
group_id=%self.partial_column.block_root,
column_index=self.partial_column.index,
sending_header=send.is_some(),
"Partial send: No metadata"
);
Ok(PartialAction { need: false, send })
}
Some([]) => Ok(PartialAction {
need: false,
send: None,
}),
Some(metadata) => {
// The peer is apparently aware of the header, make sure we track that:
self.header_sent_set.lock().insert(peer_id);
let peer_metadata = PartialDataColumnPartsMetadata::<E>::from_ssz_bytes(metadata)
.map_err(|_| PartialError::InvalidFormat)?;
let expected_len = self.partial_column.sidecar.cells_present_bitmap.len();
if peer_metadata.available.len() != expected_len
|| peer_metadata.requests.len() != expected_len
{
return Err(PartialError::InvalidFormat);
}
let need = !peer_metadata
.available
.is_subset(&self.partial_column.sidecar.cells_present_bitmap);
let want = peer_metadata.requests.difference(&peer_metadata.available);
let send = self
.partial_column
.sidecar
.filter(|idx| want.get(idx).expect("Bound checked above"))
.map_err(|err| {
error!(?err, "Unexpected error filtering sidecar");
PartialError::InvalidFormat
})?
.map(|sidecar| {
debug!(
peer=%peer_id,
group_id=%self.partial_column.block_root,
column_index=self.partial_column.index,
metadata=%peer_metadata,
sending=%sidecar.cells_present_bitmap,
"Partial send: Sending"
);
(
sidecar.as_ssz_bytes(),
Box::new(MaybeKnownMetadata::<E>::from(
PartialDataColumnPartsMetadata {
available: peer_metadata
.available
.union(&sidecar.cells_present_bitmap),
requests: peer_metadata
.requests
.union(&sidecar.cells_present_bitmap),
},
)) as Box<dyn Metadata + 'static>,
)
});
if send.is_none() {
debug!(
peer=%peer_id,
group_id=%self.partial_column.block_root,
column_index=self.partial_column.index,
metadata=%peer_metadata,
"Partial send: Nothing to send"
);
}
Ok(PartialAction { need, send })
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use bls::Signature;
use fixed_bytes::FixedBytesExtended;
use libp2p::identity::Keypair;
use ssz_types::FixedVector;
use types::block::{BeaconBlockHeader, SignedBeaconBlockHeader};
use types::core::{MinimalEthSpec, Slot};
use types::data::PartialDataColumnHeader;
type E = MinimalEthSpec;
fn make_cell(marker: u8) -> types::Cell<E> {
let mut cell = types::Cell::<E>::default();
cell[0] = marker;
cell
}
fn make_header(num_commitments: usize) -> PartialDataColumnHeader<E> {
PartialDataColumnHeader {
kzg_commitments: vec![types::KzgCommitment([0u8; 48]); num_commitments]
.try_into()
.unwrap(),
signed_block_header: SignedBeaconBlockHeader {
message: BeaconBlockHeader {
slot: Slot::new(1),
proposer_index: 0,
parent_root: Hash256::zero(),
state_root: Hash256::zero(),
body_root: Hash256::zero(),
},
signature: Signature::empty(),
},
kzg_commitments_inclusion_proof: FixedVector::new(
vec![Hash256::zero(); E::kzg_commitments_inclusion_proof_depth()],
)
.unwrap(),
}
}
fn make_partial_column(
block_root: Hash256,
total_blobs: usize,
present_indices: &[usize],
) -> Arc<PartialDataColumn<E>> {
let mut bitmap = CellBitmap::<E>::with_capacity(total_blobs).unwrap();
for &idx in present_indices {
bitmap.set(idx, true).unwrap();
}
Arc::new(PartialDataColumn {
block_root,
index: 0,
sidecar: PartialDataColumnSidecar {
cells_present_bitmap: bitmap,
column: present_indices
.iter()
.map(|&idx| make_cell(idx as u8))
.collect::<Vec<_>>()
.try_into()
.unwrap(),
kzg_proofs: present_indices
.iter()
.map(|_| types::KzgProof::empty())
.collect::<Vec<_>>()
.try_into()
.unwrap(),
header: None.into(),
},
})
}
fn random_peer_id() -> PeerId {
let keypair = Keypair::generate_ed25519();
PeerId::from(keypair.public())
}
// -- MaybeKnownMetadata tests --
#[test]
fn update_from_unknown_initializes() {
let mut meta = MaybeKnownMetadata::<E>::Unknown;
let mut bitmap = CellBitmap::<E>::with_capacity(4).unwrap();
bitmap.set(0, true).unwrap();
let received = PartialDataColumnPartsMetadata {
available: bitmap.clone(),
requests: bitmap,
};
let changed = meta.do_update(received).unwrap();
assert!(changed);
assert!(matches!(meta, MaybeKnownMetadata::Known { .. }));
}
#[test]
fn update_unions_bitmaps() {
let mut bitmap1 = CellBitmap::<E>::with_capacity(4).unwrap();
bitmap1.set(0, true).unwrap();
let mut meta: MaybeKnownMetadata<E> = PartialDataColumnPartsMetadata {
available: bitmap1.clone(),
requests: bitmap1,
}
.into();
let mut bitmap2 = CellBitmap::<E>::with_capacity(4).unwrap();
bitmap2.set(1, true).unwrap();
let changed = meta
.do_update(PartialDataColumnPartsMetadata {
available: bitmap2.clone(),
requests: bitmap2,
})
.unwrap();
assert!(changed);
if let MaybeKnownMetadata::Known { metadata, .. } = &meta {
assert!(metadata.available.get(0).unwrap());
assert!(metadata.available.get(1).unwrap());
assert!(!metadata.available.get(2).unwrap());
} else {
panic!("Expected Known metadata");
}
}
#[test]
fn update_returns_false_when_no_change() {
let mut bitmap = CellBitmap::<E>::with_capacity(4).unwrap();
bitmap.set(0, true).unwrap();
bitmap.set(1, true).unwrap();
let mut meta: MaybeKnownMetadata<E> = PartialDataColumnPartsMetadata {
available: bitmap.clone(),
requests: bitmap.clone(),
}
.into();
// Update with a subset
let mut subset = CellBitmap::<E>::with_capacity(4).unwrap();
subset.set(0, true).unwrap();
let changed = meta
.do_update(PartialDataColumnPartsMetadata {
available: subset.clone(),
requests: subset,
})
.unwrap();
assert!(!changed);
}
#[test]
fn update_rejects_mismatched_lengths() {
let mut bitmap4 = CellBitmap::<E>::with_capacity(4).unwrap();
bitmap4.set(0, true).unwrap();
let mut meta: MaybeKnownMetadata<E> = PartialDataColumnPartsMetadata {
available: bitmap4.clone(),
requests: bitmap4,
}
.into();
let mut bitmap6 = CellBitmap::<E>::with_capacity(6).unwrap();
bitmap6.set(0, true).unwrap();
let result = meta.do_update(PartialDataColumnPartsMetadata {
available: bitmap6.clone(),
requests: bitmap6,
});
assert!(result.is_err());
}
// -- OutgoingPartialColumn::partial_action_from_metadata tests --
#[test]
fn no_metadata_sends_header_once() {
let root = Hash256::repeat_byte(1);
let header = make_header(4);
let partial = make_partial_column(root, 4, &[0, 1]);
let header_sent_set: HeaderSentSet = Arc::new(Mutex::new(HashSet::new()));
let outgoing = OutgoingPartialColumn::new(partial, &header, header_sent_set);
let peer = random_peer_id();
// First call with no metadata → sends header
let action = outgoing.partial_action_from_metadata(peer, None).unwrap();
assert!(action.send.is_some());
// Second call for same peer → no send
let action2 = outgoing.partial_action_from_metadata(peer, None).unwrap();
assert!(action2.send.is_none());
}
#[test]
fn metadata_filters_cells_to_send() {
let root = Hash256::repeat_byte(1);
let header = make_header(4);
// We have cells [0, 2, 3]
let partial = make_partial_column(root, 4, &[0, 2, 3]);
let header_sent_set: HeaderSentSet = Arc::new(Mutex::new(HashSet::new()));
let outgoing = OutgoingPartialColumn::new(partial, &header, header_sent_set);
let peer = random_peer_id();
// Peer has [0, 1], wants [0, 1, 2, 3]
let mut peer_available = CellBitmap::<E>::with_capacity(4).unwrap();
peer_available.set(0, true).unwrap();
peer_available.set(1, true).unwrap();
let mut peer_request = CellBitmap::<E>::with_capacity(4).unwrap();
for i in 0..4 {
peer_request.set(i, true).unwrap();
}
let peer_meta = PartialDataColumnPartsMetadata::<E> {
available: peer_available,
requests: peer_request,
};
let encoded = peer_meta.as_ssz_bytes();
let action = outgoing
.partial_action_from_metadata(peer, Some(&encoded))
.unwrap();
// We should send cells [2, 3] (want = request - available = [2,3], and we have [0,2,3])
assert!(action.send.is_some());
}
#[test]
fn metadata_sets_need_when_peer_has_unknown_cells() {
let root = Hash256::repeat_byte(1);
let header = make_header(4);
// We have cells [0]
let partial = make_partial_column(root, 4, &[0]);
let header_sent_set: HeaderSentSet = Arc::new(Mutex::new(HashSet::new()));
let outgoing = OutgoingPartialColumn::new(partial, &header, header_sent_set);
let peer = random_peer_id();
// Peer has [0, 1, 2] — cells [1, 2] are unknown to us
let mut peer_available = CellBitmap::<E>::with_capacity(4).unwrap();
peer_available.set(0, true).unwrap();
peer_available.set(1, true).unwrap();
peer_available.set(2, true).unwrap();
let peer_meta = PartialDataColumnPartsMetadata::<E> {
available: peer_available.clone(),
requests: peer_available,
};
let encoded = peer_meta.as_ssz_bytes();
let action = outgoing
.partial_action_from_metadata(peer, Some(&encoded))
.unwrap();
assert!(action.need);
}
}

View File

@@ -1,23 +1,23 @@
//! Handles the encoding and decoding of pubsub messages.
use crate::TopicHash;
use crate::types::{GossipEncoding, GossipKind, GossipTopic};
use libp2p::gossipsub;
use gossipsub::TopicHash;
use snap::raw::{Decoder, Encoder, decompress_len};
use ssz::{Decode, Encode};
use std::io::{Error, ErrorKind};
use std::sync::Arc;
use types::{
AttesterSlashing, AttesterSlashingBase, AttesterSlashingElectra, BlobSidecar,
DataColumnSidecar, DataColumnSubnetId, EthSpec, ForkContext, ForkName,
LightClientFinalityUpdate, LightClientOptimisticUpdate, PayloadAttestationMessage,
ProposerSlashing, SignedAggregateAndProof, SignedAggregateAndProofBase,
SignedAggregateAndProofElectra, SignedBeaconBlock, SignedBeaconBlockAltair,
SignedBeaconBlockBase, SignedBeaconBlockBellatrix, SignedBeaconBlockCapella,
SignedBeaconBlockDeneb, SignedBeaconBlockElectra, SignedBeaconBlockFulu,
SignedBeaconBlockGloas, SignedBlsToExecutionChange, SignedContributionAndProof,
SignedExecutionPayloadBid, SignedExecutionPayloadEnvelope, SignedProposerPreferences,
SignedVoluntaryExit, SingleAttestation, SubnetId, SyncCommitteeMessage, SyncSubnetId,
DataColumnSidecar, DataColumnSubnetId, EthSpec, ForkContext, ForkName, Hash256,
LightClientFinalityUpdate, LightClientOptimisticUpdate, PartialDataColumn,
PartialDataColumnSidecar, PayloadAttestationMessage, ProposerSlashing, SignedAggregateAndProof,
SignedAggregateAndProofBase, SignedAggregateAndProofElectra, SignedBeaconBlock,
SignedBeaconBlockAltair, SignedBeaconBlockBase, SignedBeaconBlockBellatrix,
SignedBeaconBlockCapella, SignedBeaconBlockDeneb, SignedBeaconBlockElectra,
SignedBeaconBlockFulu, SignedBeaconBlockGloas, SignedBlsToExecutionChange,
SignedContributionAndProof, SignedExecutionPayloadBid, SignedExecutionPayloadEnvelope,
SignedProposerPreferences, SignedVoluntaryExit, SingleAttestation, SubnetId,
SyncCommitteeMessage, SyncSubnetId,
};
#[derive(Debug, Clone, PartialEq)]
@@ -464,6 +464,35 @@ impl<E: EthSpec> PubsubMessage<E> {
}
}
/// Decodes incoming partial data column sidecar from gossipsub partial protocol.
/// Note: Currently, data columns are the only supported partial messages. In future this could
/// return an enum.
pub fn decode_partial<E: EthSpec>(
topic: &GossipTopic,
group: &[u8],
data: &[u8],
) -> Result<PartialDataColumn<E>, String> {
match topic.kind() {
GossipKind::DataColumnSidecar(id) => {
if group.first() != Some(&0) {
return Err(format!("Unknown data column format: {:?}", group.first()));
}
let block_root = Hash256::from_ssz_bytes(&group[1..])
.map_err(|e| format!("Error decoding group: {:?}", e))?;
let sidecar = PartialDataColumnSidecar::from_ssz_bytes(data)
.map_err(|e| format!("Error decoding sidecar: {:?}", e))?;
let data_column = PartialDataColumn {
block_root,
// Partial messages are spec'd under the assumption that there is one column per subnet.
index: **id,
sidecar,
};
Ok(data_column)
}
other => Err(format!("Partial message unsupported for topic: {other}")),
}
}
impl<E: EthSpec> std::fmt::Display for PubsubMessage<E> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {

View File

@@ -11,7 +11,7 @@ use types::{
sync_committee::SyncSubnetId,
};
use crate::Subnet;
use crate::{NetworkConfig, Subnet};
/// The gossipsub topic names.
// These constants form a topic name of the form /TOPIC_PREFIX/TOPIC/ENCODING_POSTFIX
@@ -200,6 +200,15 @@ pub enum GossipKind {
LightClientOptimisticUpdate,
}
impl GossipKind {
pub fn use_partial_messages(&self, config: &NetworkConfig) -> bool {
match self {
GossipKind::DataColumnSidecar(_) => config.enable_partial_columns,
_ => false,
}
}
}
impl std::fmt::Display for GossipKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {